hexsha
stringlengths 40
40
| size
int64 1
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
239
| max_stars_repo_name
stringlengths 5
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
239
| max_issues_repo_name
stringlengths 5
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
239
| max_forks_repo_name
stringlengths 5
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 1
1.03M
| avg_line_length
float64 1
958k
| max_line_length
int64 1
1.03M
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4a038eb75904f29c41fd684c2ce83758ff6a889d
| 3,580
|
py
|
Python
|
tests/test_pytest_plugin.py
|
bcb/aiohttp
|
77638364958c74fff7719fdc028a79aa2281c3ad
|
[
"Apache-2.0"
] | null | null | null |
tests/test_pytest_plugin.py
|
bcb/aiohttp
|
77638364958c74fff7719fdc028a79aa2281c3ad
|
[
"Apache-2.0"
] | 2
|
2018-06-12T15:22:48.000Z
|
2018-06-12T15:31:13.000Z
|
tests/test_pytest_plugin.py
|
bcb/aiohttp
|
77638364958c74fff7719fdc028a79aa2281c3ad
|
[
"Apache-2.0"
] | 1
|
2020-01-17T00:33:59.000Z
|
2020-01-17T00:33:59.000Z
|
pytest_plugins = 'pytester'
def test_myplugin(testdir):
testdir.makepyfile("""\
import asyncio
import pytest
from unittest import mock
from aiohttp import web
pytest_plugins = 'aiohttp.pytest_plugin'
@asyncio.coroutine
def hello(request):
return web.Response(body=b'Hello, world')
def create_app(loop):
app = web.Application(loop=loop)
app.router.add_route('GET', '/', hello)
return app
@asyncio.coroutine
def test_hello(test_client):
client = yield from test_client(create_app)
resp = yield from client.get('/')
assert resp.status == 200
text = yield from resp.text()
assert 'Hello, world' in text
@asyncio.coroutine
def test_hello_from_app(test_client, loop):
app = web.Application(loop=loop)
app.router.add_get('/', hello)
client = yield from test_client(app)
resp = yield from client.get('/')
assert resp.status == 200
text = yield from resp.text()
assert 'Hello, world' in text
@asyncio.coroutine
def test_hello_with_loop(test_client, loop):
client = yield from test_client(create_app)
resp = yield from client.get('/')
assert resp.status == 200
text = yield from resp.text()
assert 'Hello, world' in text
@asyncio.coroutine
def test_hello_fails(test_client):
client = yield from test_client(create_app)
resp = yield from client.get('/')
assert resp.status == 200
text = yield from resp.text()
assert 'Hello, wield' in text
@asyncio.coroutine
def test_hello_with_fake_loop(test_client):
with pytest.raises(AssertionError):
fake_loop = mock.Mock()
yield from test_client(web.Application(loop=fake_loop))
@asyncio.coroutine
def test_set_args(test_client, loop):
with pytest.raises(AssertionError):
app = web.Application(loop=loop)
yield from test_client(app, 1, 2, 3)
@asyncio.coroutine
def test_set_keyword_args(test_client, loop):
with pytest.raises(AssertionError):
app = web.Application(loop=loop)
yield from test_client(app, param=1)
@asyncio.coroutine
def test_noop():
pass
@asyncio.coroutine
def previous(request):
if request.method == 'POST':
request.app['value'] = (yield from request.post())['value']
return web.Response(body=b'thanks for the data')
else:
v = request.app.get('value', 'unknown')
return web.Response(body='value: {}'.format(v).encode())
def create_stateful_app(loop):
app = web.Application(loop=loop)
app.router.add_route('*', '/', previous)
return app
@pytest.fixture
def cli(loop, test_client):
return loop.run_until_complete(test_client(create_stateful_app))
@asyncio.coroutine
def test_set_value(cli):
resp = yield from cli.post('/', data={'value': 'foo'})
assert resp.status == 200
text = yield from resp.text()
assert text == 'thanks for the data'
assert cli.app['value'] == 'foo'
@asyncio.coroutine
def test_get_value(cli):
resp = yield from cli.get('/')
assert resp.status == 200
text = yield from resp.text()
assert text == 'value: unknown'
cli.app['value'] = 'bar'
resp = yield from cli.get('/')
assert resp.status == 200
text = yield from resp.text()
assert text == 'value: bar'
def test_noncoro():
assert True
@asyncio.coroutine
def test_client_failed_to_create(test_client):
def make_app(loop):
raise RuntimeError()
with pytest.raises(RuntimeError):
yield from test_client(make_app)
""")
result = testdir.runpytest('-p', 'no:sugar')
result.assert_outcomes(passed=11, failed=1)
| 24.026846
| 68
| 0.681564
|
4a038f521d584f28c9011d14a01b5919e0989d27
| 5,537
|
py
|
Python
|
data/external/repositories_2to3/132160/kaggle-ndsb-master/configurations/bagging_14_convroll4_big_weightdecay.py
|
Keesiu/meta-kaggle
|
87de739aba2399fd31072ee81b391f9b7a63f540
|
[
"MIT"
] | null | null | null |
data/external/repositories_2to3/132160/kaggle-ndsb-master/configurations/bagging_14_convroll4_big_weightdecay.py
|
Keesiu/meta-kaggle
|
87de739aba2399fd31072ee81b391f9b7a63f540
|
[
"MIT"
] | null | null | null |
data/external/repositories_2to3/132160/kaggle-ndsb-master/configurations/bagging_14_convroll4_big_weightdecay.py
|
Keesiu/meta-kaggle
|
87de739aba2399fd31072ee81b391f9b7a63f540
|
[
"MIT"
] | 1
|
2019-12-04T08:23:33.000Z
|
2019-12-04T08:23:33.000Z
|
import numpy as np
import theano
import theano.tensor as T
import lasagne as nn
import data
import load
import nn_plankton
import dihedral
import dihedral_fast
import tmp_dnn
import tta
validation_split_path = "splits/bagging_split_14.pkl"
patch_size = (95, 95)
augmentation_params = {
'zoom_range': (1 / 1.6, 1.6),
'rotation_range': (0, 360),
'shear_range': (-20, 20),
'translation_range': (-10, 10),
'do_flip': True,
'allow_stretch': 1.3,
}
batch_size = 128 // 4
chunk_size = 32768 // 4
num_chunks_train = 840
momentum = 0.9
learning_rate_schedule = {
0: 0.003,
700: 0.0003,
800: 0.00003,
}
validate_every = 20
save_every = 20
def estimate_scale(img):
return np.maximum(img.shape[0], img.shape[1]) / 85.0
# augmentation_transforms_test = []
# for flip in [True, False]:
# for zoom in [1/1.3, 1/1.2, 1/1.1, 1.0, 1.1, 1.2, 1.3]:
# for rot in np.linspace(0.0, 360.0, 5, endpoint=False):
# tf = data.build_augmentation_transform(zoom=(zoom, zoom), rotation=rot, flip=flip)
# augmentation_transforms_test.append(tf)
augmentation_transforms_test = tta.build_quasirandom_transforms(70, **{
'zoom_range': (1 / 1.4, 1.4),
'rotation_range': (0, 360),
'shear_range': (-10, 10),
'translation_range': (-8, 8),
'do_flip': True,
'allow_stretch': 1.2,
})
data_loader = load.ZmuvRescaledDataLoader(estimate_scale=estimate_scale, num_chunks_train=num_chunks_train,
patch_size=patch_size, chunk_size=chunk_size, augmentation_params=augmentation_params,
augmentation_transforms_test=augmentation_transforms_test, validation_split_path=validation_split_path)
# Conv2DLayer = nn.layers.cuda_convnet.Conv2DCCLayer
# MaxPool2DLayer = nn.layers.cuda_convnet.MaxPool2DCCLayer
Conv2DLayer = tmp_dnn.Conv2DDNNLayer
MaxPool2DLayer = tmp_dnn.MaxPool2DDNNLayer
def build_model():
l0 = nn.layers.InputLayer((batch_size, 1, patch_size[0], patch_size[1]))
l0c = dihedral.CyclicSliceLayer(l0)
l1a = Conv2DLayer(l0c, num_filters=32, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu, untie_biases=True)
l1b = Conv2DLayer(l1a, num_filters=16, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu, untie_biases=True)
l1 = MaxPool2DLayer(l1b, ds=(3, 3), strides=(2, 2))
l1r = dihedral_fast.CyclicConvRollLayer(l1)
l2a = Conv2DLayer(l1r, num_filters=64, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu, untie_biases=True)
l2b = Conv2DLayer(l2a, num_filters=32, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu, untie_biases=True)
l2 = MaxPool2DLayer(l2b, ds=(3, 3), strides=(2, 2))
l2r = dihedral_fast.CyclicConvRollLayer(l2)
l3a = Conv2DLayer(l2r, num_filters=128, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu, untie_biases=True)
l3b = Conv2DLayer(l3a, num_filters=128, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu, untie_biases=True)
l3c = Conv2DLayer(l3b, num_filters=64, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu, untie_biases=True)
l3 = MaxPool2DLayer(l3c, ds=(3, 3), strides=(2, 2))
l3r = dihedral_fast.CyclicConvRollLayer(l3)
l4a = Conv2DLayer(l3r, num_filters=256, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu, untie_biases=True)
l4b = Conv2DLayer(l4a, num_filters=256, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu, untie_biases=True)
l4c = Conv2DLayer(l4b, num_filters=128, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu, untie_biases=True)
l4 = MaxPool2DLayer(l4c, ds=(3, 3), strides=(2, 2))
l4r = dihedral_fast.CyclicConvRollLayer(l4)
l4f = nn.layers.flatten(l4r)
l5 = nn.layers.DenseLayer(nn.layers.dropout(l4f, p=0.5), num_units=512, W=nn_plankton.Orthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu)
l5r = dihedral_fast.CyclicRollLayer(l5)
l6 = nn.layers.DenseLayer(nn.layers.dropout(l5r, p=0.5), num_units=512, W=nn_plankton.Orthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu)
l6m = dihedral.CyclicPoolLayer(l6, pool_function=nn_plankton.rms)
l7 = nn.layers.DenseLayer(nn.layers.dropout(l6m, p=0.5), num_units=data.num_classes, nonlinearity=T.nnet.softmax, W=nn_plankton.Orthogonal(1.0))
return [l0], l7
def build_objective(l_ins, l_out):
lambda_reg = 0.0005
params = nn.layers.get_all_non_bias_params(l_out)
reg_term = sum(T.sum(p**2) for p in params)
def loss(y, t):
return nn_plankton.log_loss(y, t) + lambda_reg * reg_term
return nn.objectives.Objective(l_out, loss_function=loss)
| 44.296
| 206
| 0.711577
|
4a03905c279f598174db8de3151753cdadb5c3d6
| 10,055
|
py
|
Python
|
termtosvg/main.py
|
pshevtsov/termtosvg
|
5b5c91ff17294ccc71a4ba3cff9ecc7a1cc3fafe
|
[
"BSD-3-Clause"
] | null | null | null |
termtosvg/main.py
|
pshevtsov/termtosvg
|
5b5c91ff17294ccc71a4ba3cff9ecc7a1cc3fafe
|
[
"BSD-3-Clause"
] | null | null | null |
termtosvg/main.py
|
pshevtsov/termtosvg
|
5b5c91ff17294ccc71a4ba3cff9ecc7a1cc3fafe
|
[
"BSD-3-Clause"
] | null | null | null |
"""Command line interface of termtosvg"""
import argparse
import logging
import sys
import tempfile
import termtosvg.config
import termtosvg.anim
logger = logging.getLogger('termtosvg')
USAGE = """termtosvg [output_file] [-g GEOMETRY] [-m MIN_DURATION] [-M MAX_DURATION]
[-t TEMPLATE] [-h]
Record a terminal session and render an SVG animation on the fly
"""
EPILOG = "See also 'termtosvg record --help' and 'termtosvg render --help'"
RECORD_USAGE = """termtosvg record [output_file] [-g GEOMETRY] [-m MIN_DURATION]
[-M MAX_DURATION] [-h]"""
RENDER_USAGE = """termtosvg render input_file [output_file] [-m MIN_DURATION]
[-M MAX_DURATION] [-t TEMPLATE] [-h]"""
def integral_duration(duration):
if duration.lower().endswith('ms'):
duration = duration[:-len('ms')]
if duration.isdigit() and int(duration) >= 1:
return int(duration)
raise ValueError('duration must be an integer greater than 0')
def parse(args, templates, default_template, default_geometry, default_min_dur, default_max_dur):
"""Parse command line arguments
:param args: Arguments to parse
:param templates: Mapping between template names and templates
:param default_template: Name of the default template
:param default_geometry: Default geometry of the screen
:param default_min_dur: Default minimal duration between frames in milliseconds
:param default_max_dur: Default maximal duration between frames in milliseconds
:return: Tuple made of the subcommand called (None, 'render' or 'record') and all parsed
arguments
"""
template_parser = argparse.ArgumentParser(add_help=False)
template_parser.add_argument(
'-t', '--template',
help=('set the SVG template used for rendering the SVG animation. '
'TEMPLATE may either be one of the default templates ({}) '
'or a path to a valid template.').format(', '.join(templates)),
type=lambda name: termtosvg.anim.validate_template(name, templates),
default=default_template,
metavar='TEMPLATE'
)
geometry_parser = argparse.ArgumentParser(add_help=False)
geometry_parser.add_argument(
'-g', '--screen-geometry',
help='geometry of the terminal screen used for rendering the animation. The geometry must '
'be given as the number of columns and the number of rows on the screen separated by the '
'character "x". For example "82x19" for an 82 columns by 19 rows screen.',
metavar='GEOMETRY',
default=default_geometry,
type=termtosvg.config.validate_geometry
)
min_duration_parser = argparse.ArgumentParser(add_help=False)
min_duration_parser.add_argument(
'-m', '--min-frame-duration',
type=integral_duration,
metavar='MIN_DURATION',
default=default_min_dur,
help='minimum duration of a frame in milliseconds (default: {}ms)'.format(default_min_dur)
)
if default_max_dur:
default_max_dur_label = '{}ms'.format(default_max_dur)
else:
default_max_dur_label = 'No maximum value'
max_duration_parser = argparse.ArgumentParser(add_help=False)
max_duration_parser.add_argument(
'-M', '--max-frame-duration',
type=integral_duration,
metavar='MAX_DURATION',
default=default_max_dur,
help='maximum duration of a frame in milliseconds (default: {})'.format(default_max_dur_label)
)
parser = argparse.ArgumentParser(
prog='termtosvg',
parents=[geometry_parser, min_duration_parser, max_duration_parser, template_parser],
usage=USAGE,
epilog=EPILOG
)
parser.add_argument(
'output_file',
nargs='?',
help='optional filename of the SVG animation; if missing, a random filename will be '
'automatically generated',
metavar='output_file'
)
if args:
if args[0] == 'record':
parser = argparse.ArgumentParser(
description='record the session to a file in asciicast v2 format',
parents=[geometry_parser, min_duration_parser, max_duration_parser],
usage=RECORD_USAGE
)
parser.add_argument(
'output_file',
nargs='?',
help='optional filename for the recording; if missing, a random filename will '
'be automatically generated',
metavar='output_file'
)
return 'record', parser.parse_args(args[1:])
elif args[0] == 'render':
parser = argparse.ArgumentParser(
description='render an asciicast recording as an SVG animation',
parents=[template_parser, min_duration_parser, max_duration_parser],
usage=RENDER_USAGE
)
parser.add_argument(
'input_file',
help='recording of a terminal session in asciicast v1 or v2 format'
)
parser.add_argument(
'output_file',
nargs='?',
help='optional filename for the SVG animation; if missing, a random filename will '
'be automatically generated',
metavar='output_file'
)
return 'render', parser.parse_args(args[1:])
return None, parser.parse_args(args)
def record_subcommand(geometry, input_fileno, output_fileno, cast_filename):
"""Save a terminal session as an asciicast recording"""
import termtosvg.term
logger.info('Recording started, enter "exit" command or Control-D to end')
if geometry is None:
columns, lines = termtosvg.term.get_terminal_size(output_fileno)
else:
columns, lines = geometry
with termtosvg.term.TerminalMode(input_fileno):
records = termtosvg.term.record(columns, lines, input_fileno, output_fileno)
with open(cast_filename, 'w') as cast_file:
for record in records:
print(record.to_json_line(), file=cast_file)
logger.info('Recording ended, cast file is {}'.format(cast_filename))
def render_subcommand(template, cast_filename, svg_filename, min_frame_duration, max_frame_duration):
"""Render the animation from an asciicast recording"""
import termtosvg.asciicast
import termtosvg.term
logger.info('Rendering started')
asciicast_records = termtosvg.asciicast.read_records(cast_filename)
replayed_records = termtosvg.term.replay(records=asciicast_records,
from_pyte_char=termtosvg.anim.CharacterCell.from_pyte,
min_frame_duration=min_frame_duration,
max_frame_duration=max_frame_duration)
termtosvg.anim.render_animation(records=replayed_records,
filename=svg_filename,
template=template)
logger.info('Rendering ended, SVG animation is {}'.format(svg_filename))
def record_render_subcommand(template, geometry, input_fileno, output_fileno, svg_filename, min_frame_duration, max_frame_duration):
"""Record and render the animation on the fly"""
import termtosvg.term
logger.info('Recording started, enter "exit" command or Control-D to end')
if geometry is None:
columns, lines = termtosvg.term.get_terminal_size(output_fileno)
else:
columns, lines = geometry
with termtosvg.term.TerminalMode(input_fileno):
asciicast_records = termtosvg.term.record(columns, lines, input_fileno, output_fileno)
replayed_records = termtosvg.term.replay(records=asciicast_records,
from_pyte_char=termtosvg.anim.CharacterCell.from_pyte,
min_frame_duration=min_frame_duration,
max_frame_duration=max_frame_duration)
termtosvg.anim.render_animation(records=replayed_records,
filename=svg_filename,
template=template)
logger.info('Recording ended, SVG animation is {}'.format(svg_filename))
def main(args=None, input_fileno=None, output_fileno=None):
if args is None:
args = sys.argv
if input_fileno is None:
input_fileno = sys.stdin.fileno()
if output_fileno is None:
output_fileno = sys.stdout.fileno()
console_handler = logging.StreamHandler(sys.stderr)
console_handler.setLevel(logging.INFO)
console_formatter = logging.Formatter('%(message)s')
console_handler.setFormatter(console_formatter)
logger.handlers = [console_handler]
logger.setLevel(logging.INFO)
templates = termtosvg.config.default_templates()
default_template = 'gjm8' if 'gjm8' in templates else sorted(templates)[0]
command, args = parse(args[1:], templates, default_template, None, 1, None)
if command == 'record':
cast_filename = args.output_file
if cast_filename is None:
_, cast_filename = tempfile.mkstemp(prefix='termtosvg_', suffix='.cast')
record_subcommand(args.screen_geometry, input_fileno, output_fileno, cast_filename)
elif command == 'render':
svg_filename = args.output_file
if svg_filename is None:
_, svg_filename = tempfile.mkstemp(prefix='termtosvg_', suffix='.svg')
render_subcommand(args.template, args.input_file, svg_filename, args.min_frame_duration, args.max_frame_duration)
else:
svg_filename = args.output_file
if svg_filename is None:
_, svg_filename = tempfile.mkstemp(prefix='termtosvg_', suffix='.svg')
record_render_subcommand(args.template, args.screen_geometry, input_fileno, output_fileno,
svg_filename, args.min_frame_duration, args.max_frame_duration)
for handler in logger.handlers:
handler.close()
| 43.154506
| 132
| 0.654799
|
4a0390d92a3a5b2e36abc224e1bda5a4f8d7618d
| 11,128
|
py
|
Python
|
src/pip/_internal/__init__.py
|
Cristie/pip
|
e74d7a8bc9faa971e89b6aeed33e5187b983ebbd
|
[
"MIT"
] | 1
|
2018-07-24T14:56:11.000Z
|
2018-07-24T14:56:11.000Z
|
src/pip/_internal/__init__.py
|
Cristie/pip
|
e74d7a8bc9faa971e89b6aeed33e5187b983ebbd
|
[
"MIT"
] | null | null | null |
src/pip/_internal/__init__.py
|
Cristie/pip
|
e74d7a8bc9faa971e89b6aeed33e5187b983ebbd
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
from __future__ import absolute_import
import locale
import logging
import os
import optparse
import warnings
import sys
# 2016-06-17 barry@debian.org: urllib3 1.14 added optional support for socks,
# but if invoked (i.e. imported), it will issue a warning to stderr if socks
# isn't available. requests unconditionally imports urllib3's socks contrib
# module, triggering this warning. The warning breaks DEP-8 tests (because of
# the stderr output) and is just plain annoying in normal usage. I don't want
# to add socks as yet another dependency for pip, nor do I want to allow-stder
# in the DEP-8 tests, so just suppress the warning. pdb tells me this has to
# be done before the import of pip.vcs.
from pip._vendor.urllib3.exceptions import DependencyWarning
warnings.filterwarnings("ignore", category=DependencyWarning) # noqa
# We want to inject the use of SecureTransport as early as possible so that any
# references or sessions or what have you are ensured to have it, however we
# only want to do this in the case that we're running on macOS and the linked
# OpenSSL is too old to handle TLSv1.2
try:
import ssl
except ImportError:
pass
else:
# Checks for OpenSSL 1.0.1 on MacOS
if sys.platform == "darwin" and ssl.OPENSSL_VERSION_NUMBER < 0x1000100f:
try:
from pip._vendor.urllib3.contrib import securetransport
except (ImportError, OSError):
pass
else:
securetransport.inject_into_urllib3()
from pip import __version__
from pip._internal import cmdoptions
from pip._internal.exceptions import CommandError, PipError
from pip._internal.utils.misc import get_installed_distributions, get_prog
from pip._internal.utils import deprecation
from pip._internal.vcs import git, mercurial, subversion, bazaar # noqa
from pip._internal.baseparser import (
ConfigOptionParser, UpdatingDefaultsHelpFormatter,
)
from pip._internal.commands import get_summaries, get_similar_commands
from pip._internal.commands import commands_dict
from pip._vendor.urllib3.exceptions import InsecureRequestWarning
logger = logging.getLogger(__name__)
# Hide the InsecureRequestWarning from urllib3
warnings.filterwarnings("ignore", category=InsecureRequestWarning)
def autocomplete():
"""Command and option completion for the main option parser (and options)
and its subcommands (and options).
Enable by sourcing one of the completion shell scripts (bash, zsh or fish).
"""
# Don't complete if user hasn't sourced bash_completion file.
if 'PIP_AUTO_COMPLETE' not in os.environ:
return
cwords = os.environ['COMP_WORDS'].split()[1:]
cword = int(os.environ['COMP_CWORD'])
try:
current = cwords[cword - 1]
except IndexError:
current = ''
subcommands = [cmd for cmd, summary in get_summaries()]
options = []
# subcommand
try:
subcommand_name = [w for w in cwords if w in subcommands][0]
except IndexError:
subcommand_name = None
parser = create_main_parser()
# subcommand options
if subcommand_name:
# special case: 'help' subcommand has no options
if subcommand_name == 'help':
sys.exit(1)
# special case: list locally installed dists for show and uninstall
should_list_installed = (
subcommand_name in ['show', 'uninstall'] and
not current.startswith('-')
)
if should_list_installed:
installed = []
lc = current.lower()
for dist in get_installed_distributions(local_only=True):
if dist.key.startswith(lc) and dist.key not in cwords[1:]:
installed.append(dist.key)
# if there are no dists installed, fall back to option completion
if installed:
for dist in installed:
print(dist)
sys.exit(1)
subcommand = commands_dict[subcommand_name]()
for opt in subcommand.parser.option_list_all:
if opt.help != optparse.SUPPRESS_HELP:
for opt_str in opt._long_opts + opt._short_opts:
options.append((opt_str, opt.nargs))
# filter out previously specified options from available options
prev_opts = [x.split('=')[0] for x in cwords[1:cword - 1]]
options = [(x, v) for (x, v) in options if x not in prev_opts]
# filter options by current input
options = [(k, v) for k, v in options if k.startswith(current)]
# get completion type given cwords and available subcommand options
completion_type = get_path_completion_type(
cwords, cword, subcommand.parser.option_list_all,
)
# get completion files and directories if ``completion_type`` is
# ``<file>``, ``<dir>`` or ``<path>``
if completion_type:
options = auto_complete_paths(current, completion_type)
options = ((opt, 0) for opt in options)
for option in options:
opt_label = option[0]
# append '=' to options which require args
if option[1] and option[0][:2] == "--":
opt_label += '='
print(opt_label)
else:
# show main parser options only when necessary
opts = [i.option_list for i in parser.option_groups]
opts.append(parser.option_list)
opts = (o for it in opts for o in it)
if current.startswith('-'):
for opt in opts:
if opt.help != optparse.SUPPRESS_HELP:
subcommands += opt._long_opts + opt._short_opts
else:
# get completion type given cwords and all available options
completion_type = get_path_completion_type(cwords, cword, opts)
if completion_type:
subcommands = auto_complete_paths(current, completion_type)
print(' '.join([x for x in subcommands if x.startswith(current)]))
sys.exit(1)
def get_path_completion_type(cwords, cword, opts):
"""Get the type of path completion (``file``, ``dir``, ``path`` or None)
:param cwords: same as the environmental variable ``COMP_WORDS``
:param cword: same as the environmental variable ``COMP_CWORD``
:param opts: The available options to check
:return: path completion type (``file``, ``dir``, ``path`` or None)
"""
if cword < 2 or not cwords[cword - 2].startswith('-'):
return
for opt in opts:
if opt.help == optparse.SUPPRESS_HELP:
continue
for o in str(opt).split('/'):
if cwords[cword - 2].split('=')[0] == o:
if any(x in ('path', 'file', 'dir')
for x in opt.metavar.split('/')):
return opt.metavar
def auto_complete_paths(current, completion_type):
"""If ``completion_type`` is ``file`` or ``path``, list all regular files
and directories starting with ``current``; otherwise only list directories
starting with ``current``.
:param current: The word to be completed
:param completion_type: path completion type(`file`, `path` or `dir`)i
:return: A generator of regular files and/or directories
"""
directory, filename = os.path.split(current)
current_path = os.path.abspath(directory)
# Don't complete paths if they can't be accessed
if not os.access(current_path, os.R_OK):
return
filename = os.path.normcase(filename)
# list all files that start with ``filename``
file_list = (x for x in os.listdir(current_path)
if os.path.normcase(x).startswith(filename))
for f in file_list:
opt = os.path.join(current_path, f)
comp_file = os.path.normcase(os.path.join(directory, f))
# complete regular files when there is not ``<dir>`` after option
# complete directories when there is ``<file>``, ``<path>`` or
# ``<dir>``after option
if completion_type != 'dir' and os.path.isfile(opt):
yield comp_file
elif os.path.isdir(opt):
yield os.path.join(comp_file, '')
def create_main_parser():
parser_kw = {
'usage': '\n%prog <command> [options]',
'add_help_option': False,
'formatter': UpdatingDefaultsHelpFormatter(),
'name': 'global',
'prog': get_prog(),
}
parser = ConfigOptionParser(**parser_kw)
parser.disable_interspersed_args()
pip_pkg_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
parser.version = 'pip %s from %s (python %s)' % (
__version__, pip_pkg_dir, sys.version[:3],
)
# add the general options
gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser)
parser.add_option_group(gen_opts)
parser.main = True # so the help formatter knows
# create command listing for description
command_summaries = get_summaries()
description = [''] + ['%-27s %s' % (i, j) for i, j in command_summaries]
parser.description = '\n'.join(description)
return parser
def parseopts(args):
parser = create_main_parser()
# Note: parser calls disable_interspersed_args(), so the result of this
# call is to split the initial args into the general options before the
# subcommand and everything else.
# For example:
# args: ['--timeout=5', 'install', '--user', 'INITools']
# general_options: ['--timeout==5']
# args_else: ['install', '--user', 'INITools']
general_options, args_else = parser.parse_args(args)
# --version
if general_options.version:
sys.stdout.write(parser.version)
sys.stdout.write(os.linesep)
sys.exit()
# pip || pip help -> print_help()
if not args_else or (args_else[0] == 'help' and len(args_else) == 1):
parser.print_help()
sys.exit()
# the subcommand name
cmd_name = args_else[0]
if cmd_name not in commands_dict:
guess = get_similar_commands(cmd_name)
msg = ['unknown command "%s"' % cmd_name]
if guess:
msg.append('maybe you meant "%s"' % guess)
raise CommandError(' - '.join(msg))
# all the args without the subcommand
cmd_args = args[:]
cmd_args.remove(cmd_name)
return cmd_name, cmd_args
def main(args=None):
if args is None:
args = sys.argv[1:]
# Configure our deprecation warnings to be sent through loggers
deprecation.install_warning_logger()
autocomplete()
try:
cmd_name, cmd_args = parseopts(args)
except PipError as exc:
sys.stderr.write("ERROR: %s" % exc)
sys.stderr.write(os.linesep)
sys.exit(1)
# Needed for locale.getpreferredencoding(False) to work
# in pip._internal.utils.encoding.auto_decode
try:
locale.setlocale(locale.LC_ALL, '')
except locale.Error as e:
# setlocale can apparently crash if locale are uninitialized
logger.debug("Ignoring error %s when setting locale", e)
command = commands_dict[cmd_name](isolated=("--isolated" in cmd_args))
return command.main(cmd_args)
| 36.847682
| 79
| 0.65115
|
4a039442ae83d8b3cb98f9a7954593cfbbf4ae8a
| 956
|
py
|
Python
|
twitoff/predict.py
|
TheJoys2019/TwitOff
|
9b441280c333c854cd97489f3c19027c44788fb5
|
[
"MIT"
] | null | null | null |
twitoff/predict.py
|
TheJoys2019/TwitOff
|
9b441280c333c854cd97489f3c19027c44788fb5
|
[
"MIT"
] | null | null | null |
twitoff/predict.py
|
TheJoys2019/TwitOff
|
9b441280c333c854cd97489f3c19027c44788fb5
|
[
"MIT"
] | null | null | null |
"""Predictions"""
import numpy as np
from sklearn.linear_model import LogisticRegression
from .models import User
from .twitter import BASILICA
def predict_user(user1_name, user2_name, tweet_text, cache=None):
"""Determine and return which user is more likely to say a given Tweet."""
user1 = User.query.filter(User.name == user1_name).one()
user2 = User.query.filter(User.name == user2_name).one()
user1_embeddings = np.array([tweet.embedding for tweet in user1.tweets])
user2_embeddings = np.array([tweet.embedding for tweet in user2.tweets])
embeddings = np.vstack([user1_embeddings, user2_embeddings])
labels = np.concatenate([np.zeros(len(user1.tweets)),
np.ones(len(user2.tweets))])
log_reg = LogisticRegression().fit(embeddings, labels)
tweet_embedding = BASILICA.embed_sentence(tweet_text, model='twitter')
return log_reg.predict_proba(np.array(tweet_embedding).reshape(1, -1))
| 50.315789
| 78
| 0.726987
|
4a0395045736e16d50acab98628e20c7d21af130
| 2,418
|
py
|
Python
|
pa-to-es/result_parser.py
|
jimmyjones2/community
|
10182ded35a8bdc3537f40e6321a5f66a23da59a
|
[
"Apache-2.0"
] | null | null | null |
pa-to-es/result_parser.py
|
jimmyjones2/community
|
10182ded35a8bdc3537f40e6321a5f66a23da59a
|
[
"Apache-2.0"
] | null | null | null |
pa-to-es/result_parser.py
|
jimmyjones2/community
|
10182ded35a8bdc3537f40e6321a5f66a23da59a
|
[
"Apache-2.0"
] | null | null | null |
'''
Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
SPDX-License-Identifier: MIT-0
result_parser provides a class that takes the results of calling the performance
analyzer and putting together a document suitable for sending to Elasticsearch
'''
import json
class ResultParser():
''' Construct with the text response from calling performance analyzer. Use
the records() method to iterate over the response, retrieving a single
Elasticsearch doc with each call. '''
def __init__(self, metric, response_text):
'''response_text is the body of the response to the GET request.'''
self.response_json = json.loads(response_text)
self.metric = metric
def _unpack_record(self, fields, record):
''' Match the field names with their values in the record. If there's no
applicable value for the field (it's "null"), don't add the field to
the doc. Returns a dict, which is the basis for the doc.'''
ret = {'metric': self.metric.name}
for field_name, value in zip(fields, record):
if value is None or value == 'null':
continue
ret[field_name] = value
return ret
def records(self):
''' Iterates the response, yielding one dict at a time with a single
metric and dimension
A word on the API. PA returns a record for each combination
of the requested dimensions. If a dimension doesn't bear on that
particular metric, PA returns the string "null". To create the
ES doc, you want to expose the combinations of dimensions that
have values for that metric, skipping dimensions that have
"null". The null dimensions are stripped out in _unpack_record. '''
for node_name, values in self.response_json.items():
timestamp = values['timestamp']
data = values['data']
if not data:
break
field_names = [x['name'] for x in data['fields']]
records = data['records']
for record in records:
doc = self._unpack_record(field_names, record)
if not doc:
continue
doc['node_name'] = node_name
doc['@timestamp'] = timestamp
doc['agg'] = self.metric.agg
yield doc
| 43.178571
| 81
| 0.61828
|
4a03963f27ccabb3ec0c594e05fcfd1f482107ef
| 305
|
py
|
Python
|
demo/click/greeter1.py
|
the-dalee/ww-improve-your-python-fu
|
b6fc09ff7635bca5578c9cca5fef860063906907
|
[
"CC-BY-4.0"
] | null | null | null |
demo/click/greeter1.py
|
the-dalee/ww-improve-your-python-fu
|
b6fc09ff7635bca5578c9cca5fef860063906907
|
[
"CC-BY-4.0"
] | null | null | null |
demo/click/greeter1.py
|
the-dalee/ww-improve-your-python-fu
|
b6fc09ff7635bca5578c9cca5fef860063906907
|
[
"CC-BY-4.0"
] | null | null | null |
#!/bin/env python3
import click
@click.command()
@click.argument('greet_from')
@click.argument('greet_to')
def greet(greet_from, greet_to):
"""
This script will greet you politely
"""
print("Hello {} from {}".format(greet_to, greet_from))
if __name__ == "__main__":
greet()
| 19.0625
| 58
| 0.645902
|
4a03973dac7fb3aee5c25a8fc1605885d3fe2f07
| 7,385
|
py
|
Python
|
layout_base.py
|
erdoganonal/daily_scrum_tool
|
5e0291d44965ddf41352893658750f30d243d965
|
[
"MIT"
] | 1
|
2020-11-11T09:13:58.000Z
|
2020-11-11T09:13:58.000Z
|
layout_base.py
|
erdoganonal/daily_scrum_tool
|
5e0291d44965ddf41352893658750f30d243d965
|
[
"MIT"
] | null | null | null |
layout_base.py
|
erdoganonal/daily_scrum_tool
|
5e0291d44965ddf41352893658750f30d243d965
|
[
"MIT"
] | null | null | null |
"""Base for layout, includes common functions"""
from enum import Enum, auto
import abc
import tkinter as tk
from tkinter import ttk
import constants as const
def _configure(widget):
properties = {
"background": const.LAYOUT_BG
}
if isinstance(widget, (ttk.Menubutton,)):
properties.update(font=const.MENU_BTN_FONT)
style = ttk.Style(widget)
style.configure(widget.winfo_class(), **properties)
return
if isinstance(widget, (tk.Button)):
widget.configure(
font=const.BUTTON_FONT,
background=const.BUTTON_BG
)
return
if isinstance(widget, (tk.Label)):
widget.configure(font=const.LABEL_DFT_FONT)
widget.configure(fg=const.LABEL_DFT_FG)
widget.configure(**properties)
def configure(widget):
"Set some configurations for given widget and its children"
_configure(widget)
for child in widget.winfo_children():
configure(child)
class LayoutBase:
"Base for all layouts"
def __init__(self, root, context, parent=None, **grid_options):
self._root = root
self._context = context
if parent is None:
self.parent = tk.Frame(root)
else:
self.parent = parent
self._grid_options = grid_options
@property
def config(self):
"returns the configurations"
return self._context.get_item("config")
@property
def is_active(self):
"""If returns a True value, all handlers will be triggered,
False value, only handle_reset will be triggered.
"""
# Assume that all interfaces is active if they are registered
return True
def place(self):
"Places the widget with given options"
self.parent.grid(**self._grid_options)
def place_forget(self):
"Removes the wigdet"
self.parent.grid_forget()
class SingleLayoutBase(LayoutBase, metaclass=abc.ABCMeta):
"Base for single layouts"
@abc.abstractmethod
def handle_init(self):
"Call when the state is set to INITIAL"
@abc.abstractmethod
def handle_startup(self):
"Call when the state is set to STARTED"
@abc.abstractmethod
def handle_next(self):
"Call when the state is set to NEXTONE"
@abc.abstractmethod
def handle_reset(self):
"Call when the state is set to RESET"
@abc.abstractmethod
def handle_timeout(self):
"Call when the state is set to TIMEOUT"
class MultiLayoutBase(LayoutBase):
"Base for multiple layouts"
def __init__(self, root, context, *layouts, **grid_options):
super().__init__(root, context, **grid_options)
self._layouts = layouts
for layout in self._layouts:
context.register(layout(self.parent, context))
# TimerBase is an abstract class. No instance should be created from it.
# pylint: disable=abstract-method
class TimerBase(SingleLayoutBase):
"Base for timer based layouts."
def __init__(self, root, context, **grid_options):
super().__init__(root, context, **grid_options)
self._timer_after_id = None
def cancel_timer(self):
"Cancels the countdown"
if self._timer_after_id is not None:
self._root.after_cancel(self._timer_after_id)
self._timer_after_id = None
def countdown(self, label, text_format, remaining_time, on_timeout=None):
"Starts the countdown"
minutes = int(remaining_time / 60)
seconds = int(remaining_time % 60)
if (minutes + 60 * seconds) <= 0:
if callable(on_timeout):
on_timeout()
return
if not self._context.state == States.PAUSE:
label.configure(text=text_format.format(minutes, seconds))
remaining_time -= 1
self._timer_after_id = self._root.after(1000, lambda: self.countdown(
label, text_format, remaining_time, on_timeout
))
def countup(self, label, text_format, elapsed_time=0):
"Starts the countup"
minutes = int(elapsed_time / 60)
seconds = int(elapsed_time % 60)
if not self._context.state == States.PAUSE:
label.configure(text=text_format.format(minutes, seconds))
elapsed_time += 1
self._timer_after_id = self._root.after(1000, lambda: self.countup(
label, text_format, elapsed_time
))
class States(Enum):
"Enum values for context state"
INITIAL = auto()
STARTED = auto()
NEXTONE = auto()
TIMEOUT = auto()
PAUSE = auto()
RESET = auto()
class LayoutContext:
"A context that includes entire layouts"
def __init__(self):
self._attributes = []
self._state = States.INITIAL
self._globals = {}
@property
def state(self):
"return the state of the context"
return self._state
@state.setter
def state(self, new_state):
"set the of the context, and call related functions"
if not isinstance(new_state, States):
raise ValueError("new state should be value of States")
self._state = new_state
for layout in self:
# Always allow initial step
if layout.is_active:
layout.place()
else:
# If a component is not active,
# no need to trigger handlers
layout.place_forget()
continue
if isinstance(layout, MultiLayoutBase):
# If a layout contains multiple instances,
# no need to trigger the handlers.
# Trigger sub of layouts is enough.
continue
if new_state == States.INITIAL:
layout.handle_init()
elif new_state == States.STARTED:
layout.handle_startup()
elif new_state == States.NEXTONE:
layout.handle_next()
elif new_state == States.TIMEOUT:
layout.handle_timeout()
elif new_state == States.RESET:
layout.handle_reset()
if self._state != new_state:
# If state does not equal the state that was set, means
# that the state changed during iteration. If a new state set,
# the loop needs to break to not override other state change.
break
def set_state_without_invoke(self, state):
"Set the state without invoking handlers"
self._state = state
def __iter__(self):
for item in self._attributes:
yield item
def get_item(self, name):
"Add an item to the context to use it from another layouts"
return self._globals[name]
def set_item(self, name, value):
"return the item from context which set before"
self._globals[name] = value
def del_item(self, name):
"remove an item from context"
del self._globals[name]
def register(self, *args):
"add given layouts in the context"
for arg in args:
if not isinstance(arg, (SingleLayoutBase, MultiLayoutBase)):
raise TypeError("{0} should be instance of SingleLayoutBase".format(
arg.__class__.__name__
))
self._attributes.append(arg)
| 29.422311
| 84
| 0.614083
|
4a03976ea07e745a8617dc9c30a215e3f2f0b4d1
| 283
|
py
|
Python
|
blog/urls.py
|
selfsryo/django_nuxt_blog
|
6c5aaaf580132129fda070323afe4e5af21dc3c5
|
[
"MIT"
] | 2
|
2021-12-15T08:27:46.000Z
|
2022-02-02T11:51:41.000Z
|
blog/urls.py
|
selfsryo/django_nuxt_blog
|
6c5aaaf580132129fda070323afe4e5af21dc3c5
|
[
"MIT"
] | null | null | null |
blog/urls.py
|
selfsryo/django_nuxt_blog
|
6c5aaaf580132129fda070323afe4e5af21dc3c5
|
[
"MIT"
] | null | null | null |
from django.urls import path, include
from rest_framework import routers
from blog import views
router = routers.DefaultRouter()
router.register('articles', views.ArticleViewSet)
router.register('tags', views.TagViewSet)
urlpatterns = [
path('api/', include(router.urls)),
]
| 20.214286
| 49
| 0.763251
|
4a0397f901c40160e26127b49e10389d15bb7cdd
| 448
|
py
|
Python
|
tests/test_routers.py
|
chopdgd/django-genomix
|
2f56186ff1e9c7e1ff861ab6db912892cfa48e64
|
[
"MIT"
] | 3
|
2017-12-04T16:04:18.000Z
|
2019-08-29T15:53:00.000Z
|
tests/test_routers.py
|
chopdgd/django-genomix
|
2f56186ff1e9c7e1ff861ab6db912892cfa48e64
|
[
"MIT"
] | 208
|
2017-12-02T19:55:10.000Z
|
2022-03-28T14:46:15.000Z
|
tests/test_routers.py
|
chopdgd/django-genomix
|
2f56186ff1e9c7e1ff861ab6db912892cfa48e64
|
[
"MIT"
] | 1
|
2021-05-26T12:04:59.000Z
|
2021-05-26T12:04:59.000Z
|
from django.test import TestCase
from mock import MagicMock
from rest_framework.routers import SimpleRouter
from genomix import routers
class TestDefaultRouter(TestCase):
def setUp(self):
self.DefaultRouter = routers.DefaultRouter()
self.router = MagicMock(spec=SimpleRouter, registry=['url'])
def test_extend(self):
self.DefaultRouter.extend(self.router)
assert self.DefaultRouter.registry == ['url']
| 24.888889
| 68
| 0.732143
|
4a0398d42bd40b9439345b046ba0ee714817e237
| 21,678
|
py
|
Python
|
test/functional/test_framework/util.py
|
Polpette/compare-number435398
|
9099a2d43a69f8cb1f6894b8489b83a519a940a3
|
[
"MIT"
] | null | null | null |
test/functional/test_framework/util.py
|
Polpette/compare-number435398
|
9099a2d43a69f8cb1f6894b8489b83a519a940a3
|
[
"MIT"
] | null | null | null |
test/functional/test_framework/util.py
|
Polpette/compare-number435398
|
9099a2d43a69f8cb1f6894b8489b83a519a940a3
|
[
"MIT"
] | 1
|
2019-12-21T21:36:08.000Z
|
2019-12-21T21:36:08.000Z
|
#!/usr/bin/env python3
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Helpful routines for regression testing."""
from base64 import b64encode
from binascii import hexlify, unhexlify
from decimal import Decimal, ROUND_DOWN
import hashlib
import json
import logging
import os
import random
import re
from subprocess import CalledProcessError
import time
from . import coverage
from .authproxy import AuthServiceProxy, JSONRPCException
logger = logging.getLogger("TestFramework.utils")
# Assert functions
##################
def assert_fee_amount(fee, tx_size, fee_per_kB):
"""Assert the fee was in range"""
target_fee = tx_size * fee_per_kB / 1000
if fee < target_fee:
raise AssertionError("Fee of %s BTC too low! (Should be %s BTC)" % (str(fee), str(target_fee)))
# allow the wallet's estimation to be at most 2 bytes off
if fee > (tx_size + 2) * fee_per_kB / 1000:
raise AssertionError("Fee of %s BTC too high! (Should be %s BTC)" % (str(fee), str(target_fee)))
def assert_equal(thing1, thing2, *args):
if thing1 != thing2 or any(thing1 != arg for arg in args):
raise AssertionError("not(%s)" % " == ".join(str(arg) for arg in (thing1, thing2) + args))
def assert_greater_than(thing1, thing2):
if thing1 <= thing2:
raise AssertionError("%s <= %s" % (str(thing1), str(thing2)))
def assert_greater_than_or_equal(thing1, thing2):
if thing1 < thing2:
raise AssertionError("%s < %s" % (str(thing1), str(thing2)))
def assert_raises(exc, fun, *args, **kwds):
assert_raises_message(exc, None, fun, *args, **kwds)
def assert_raises_message(exc, message, fun, *args, **kwds):
try:
fun(*args, **kwds)
except JSONRPCException:
raise AssertionError("Use assert_raises_rpc_error() to test RPC failures")
except exc as e:
if message is not None and message not in e.error['message']:
raise AssertionError("Expected substring not found:" + e.error['message'])
except Exception as e:
raise AssertionError("Unexpected exception raised: " + type(e).__name__)
else:
raise AssertionError("No exception raised")
def assert_raises_process_error(returncode, output, fun, *args, **kwds):
"""Execute a process and asserts the process return code and output.
Calls function `fun` with arguments `args` and `kwds`. Catches a CalledProcessError
and verifies that the return code and output are as expected. Throws AssertionError if
no CalledProcessError was raised or if the return code and output are not as expected.
Args:
returncode (int): the process return code.
output (string): [a substring of] the process output.
fun (function): the function to call. This should execute a process.
args*: positional arguments for the function.
kwds**: named arguments for the function.
"""
try:
fun(*args, **kwds)
except CalledProcessError as e:
if returncode != e.returncode:
raise AssertionError("Unexpected returncode %i" % e.returncode)
if output not in e.output:
raise AssertionError("Expected substring not found:" + e.output)
else:
raise AssertionError("No exception raised")
def assert_raises_rpc_error(code, message, fun, *args, **kwds):
"""Run an RPC and verify that a specific JSONRPC exception code and message is raised.
Calls function `fun` with arguments `args` and `kwds`. Catches a JSONRPCException
and verifies that the error code and message are as expected. Throws AssertionError if
no JSONRPCException was raised or if the error code/message are not as expected.
Args:
code (int), optional: the error code returned by the RPC call (defined
in src/rpc/protocol.h). Set to None if checking the error code is not required.
message (string), optional: [a substring of] the error string returned by the
RPC call. Set to None if checking the error string is not required.
fun (function): the function to call. This should be the name of an RPC.
args*: positional arguments for the function.
kwds**: named arguments for the function.
"""
assert try_rpc(code, message, fun, *args, **kwds), "No exception raised"
def try_rpc(code, message, fun, *args, **kwds):
"""Tries to run an rpc command.
Test against error code and message if the rpc fails.
Returns whether a JSONRPCException was raised."""
try:
fun(*args, **kwds)
except JSONRPCException as e:
# JSONRPCException was thrown as expected. Check the code and message values are correct.
if (code is not None) and (code != e.error["code"]):
raise AssertionError("Unexpected JSONRPC error code %i" % e.error["code"])
if (message is not None) and (message not in e.error['message']):
raise AssertionError("Expected substring not found:" + e.error['message'])
return True
except Exception as e:
raise AssertionError("Unexpected exception raised: " + type(e).__name__)
else:
return False
def assert_is_hex_string(string):
try:
int(string, 16)
except Exception as e:
raise AssertionError(
"Couldn't interpret %r as hexadecimal; raised: %s" % (string, e))
def assert_is_hash_string(string, length=64):
if not isinstance(string, str):
raise AssertionError("Expected a string, got type %r" % type(string))
elif length and len(string) != length:
raise AssertionError(
"String of length %d expected; got %d" % (length, len(string)))
elif not re.match('[abcdef0-9]+$', string):
raise AssertionError(
"String %r contains invalid characters for a hash." % string)
def assert_array_result(object_array, to_match, expected, should_not_find=False):
"""
Pass in array of JSON objects, a dictionary with key/value pairs
to match against, and another dictionary with expected key/value
pairs.
If the should_not_find flag is true, to_match should not be found
in object_array
"""
if should_not_find:
assert_equal(expected, {})
num_matched = 0
for item in object_array:
all_match = True
for key, value in to_match.items():
if item[key] != value:
all_match = False
if not all_match:
continue
elif should_not_find:
num_matched = num_matched + 1
for key, value in expected.items():
if item[key] != value:
raise AssertionError("%s : expected %s=%s" % (str(item), str(key), str(value)))
num_matched = num_matched + 1
if num_matched == 0 and not should_not_find:
raise AssertionError("No objects matched %s" % (str(to_match)))
if num_matched > 0 and should_not_find:
raise AssertionError("Objects were found %s" % (str(to_match)))
# Utility functions
###################
def check_json_precision():
"""Make sure json library being used does not lose precision converting BTC values"""
n = Decimal("20000000.00000003")
satoshis = int(json.loads(json.dumps(float(n))) * 1.0e8)
if satoshis != 2000000000000003:
raise RuntimeError("JSON encode/decode loses precision")
def count_bytes(hex_string):
return len(bytearray.fromhex(hex_string))
def bytes_to_hex_str(byte_str):
return hexlify(byte_str).decode('ascii')
def hash256(byte_str):
sha256 = hashlib.sha256()
sha256.update(byte_str)
sha256d = hashlib.sha256()
sha256d.update(sha256.digest())
return sha256d.digest()[::-1]
def hex_str_to_bytes(hex_str):
return unhexlify(hex_str.encode('ascii'))
def str_to_b64str(string):
return b64encode(string.encode('utf-8')).decode('ascii')
def satoshi_round(amount):
return Decimal(amount).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
def wait_until(predicate, *, attempts=float('inf'), timeout=float('inf'), lock=None):
if attempts == float('inf') and timeout == float('inf'):
timeout = 60
attempt = 0
timeout += time.time()
while attempt < attempts and time.time() < timeout:
if lock:
with lock:
if predicate():
return
else:
if predicate():
return
attempt += 1
time.sleep(0.05)
# Print the cause of the timeout
assert_greater_than(attempts, attempt)
assert_greater_than(timeout, time.time())
raise RuntimeError('Unreachable')
# RPC/P2P connection constants and functions
############################################
# The maximum number of nodes a single test can spawn
MAX_NODES = 8
# Don't assign rpc or p2p ports lower than this
PORT_MIN = 11000
# The number of ports to "reserve" for p2p and rpc, each
PORT_RANGE = 5000
class PortSeed:
# Must be initialized with a unique integer for each process
n = None
def get_rpc_proxy(url, node_number, timeout=None, coveragedir=None):
"""
Args:
url (str): URL of the RPC server to call
node_number (int): the node number (or id) that this calls to
Kwargs:
timeout (int): HTTP timeout in seconds
Returns:
AuthServiceProxy. convenience object for making RPC calls.
"""
proxy_kwargs = {}
if timeout is not None:
proxy_kwargs['timeout'] = timeout
proxy = AuthServiceProxy(url, **proxy_kwargs)
proxy.url = url # store URL on proxy for info
coverage_logfile = coverage.get_filename(
coveragedir, node_number) if coveragedir else None
return coverage.AuthServiceProxyWrapper(proxy, coverage_logfile)
def p2p_port(n):
assert(n <= MAX_NODES)
return PORT_MIN + n + (MAX_NODES * PortSeed.n) % (PORT_RANGE - 1 - MAX_NODES)
def rpc_port(n):
return PORT_MIN + PORT_RANGE + n + (MAX_NODES * PortSeed.n) % (PORT_RANGE - 1 - MAX_NODES)
def rpc_url(datadir, i, rpchost=None):
rpc_u, rpc_p = get_auth_cookie(datadir)
host = '127.0.0.1'
port = rpc_port(i)
if rpchost:
parts = rpchost.split(':')
if len(parts) == 2:
host, port = parts
else:
host = rpchost
return "http://%s:%s@%s:%d" % (rpc_u, rpc_p, host, int(port))
# Node functions
################
def initialize_datadir(dirname, n):
datadir = os.path.join(dirname, "node" + str(n))
if not os.path.isdir(datadir):
os.makedirs(datadir)
with open(os.path.join(datadir, "noodlyappendagecoin.conf"), 'w', encoding='utf8') as f:
f.write("regtest=1\n")
f.write("port=" + str(p2p_port(n)) + "\n")
f.write("rpcport=" + str(rpc_port(n)) + "\n")
f.write("listenonion=0\n")
return datadir
def get_datadir_path(dirname, n):
return os.path.join(dirname, "node" + str(n))
def get_auth_cookie(datadir):
user = None
password = None
if os.path.isfile(os.path.join(datadir, "noodlyappendagecoin.conf")):
with open(os.path.join(datadir, "noodlyappendagecoin.conf"), 'r', encoding='utf8') as f:
for line in f:
if line.startswith("rpcuser="):
assert user is None # Ensure that there is only one rpcuser line
user = line.split("=")[1].strip("\n")
if line.startswith("rpcpassword="):
assert password is None # Ensure that there is only one rpcpassword line
password = line.split("=")[1].strip("\n")
if os.path.isfile(os.path.join(datadir, "regtest", ".cookie")):
with open(os.path.join(datadir, "regtest", ".cookie"), 'r') as f:
userpass = f.read()
split_userpass = userpass.split(':')
user = split_userpass[0]
password = split_userpass[1]
if user is None or password is None:
raise ValueError("No RPC credentials")
return user, password
def log_filename(dirname, n_node, logname):
return os.path.join(dirname, "node" + str(n_node), "regtest", logname)
def get_bip9_status(node, key):
info = node.getblockchaininfo()
return info['bip9_softforks'][key]
def set_node_times(nodes, t):
for node in nodes:
node.setmocktime(t)
def disconnect_nodes(from_connection, node_num):
for peer_id in [peer['id'] for peer in from_connection.getpeerinfo() if "testnode%d" % node_num in peer['subver']]:
from_connection.disconnectnode(nodeid=peer_id)
for _ in range(50):
if [peer['id'] for peer in from_connection.getpeerinfo() if "testnode%d" % node_num in peer['subver']] == []:
break
time.sleep(0.1)
else:
raise AssertionError("timed out waiting for disconnect")
def connect_nodes(from_connection, node_num):
ip_port = "127.0.0.1:" + str(p2p_port(node_num))
from_connection.addnode(ip_port, "onetry")
# poll until version handshake complete to avoid race conditions
# with transaction relaying
while any(peer['version'] == 0 for peer in from_connection.getpeerinfo()):
time.sleep(0.1)
def connect_nodes_bi(nodes, a, b):
connect_nodes(nodes[a], b)
connect_nodes(nodes[b], a)
def sync_blocks(rpc_connections, *, wait=1, timeout=60):
"""
Wait until everybody has the same tip.
sync_blocks needs to be called with an rpc_connections set that has least
one node already synced to the latest, stable tip, otherwise there's a
chance it might return before all nodes are stably synced.
"""
# Use getblockcount() instead of waitforblockheight() to determine the
# initial max height because the two RPCs look at different internal global
# variables (chainActive vs latestBlock) and the former gets updated
# earlier.
maxheight = max(x.getblockcount() for x in rpc_connections)
start_time = cur_time = time.time()
while cur_time <= start_time + timeout:
tips = [r.waitforblockheight(maxheight, int(wait * 1000)) for r in rpc_connections]
if all(t["height"] == maxheight for t in tips):
if all(t["hash"] == tips[0]["hash"] for t in tips):
return
raise AssertionError("Block sync failed, mismatched block hashes:{}".format(
"".join("\n {!r}".format(tip) for tip in tips)))
cur_time = time.time()
raise AssertionError("Block sync to height {} timed out:{}".format(
maxheight, "".join("\n {!r}".format(tip) for tip in tips)))
def sync_chain(rpc_connections, *, wait=1, timeout=60):
"""
Wait until everybody has the same best block
"""
while timeout > 0:
best_hash = [x.getbestblockhash() for x in rpc_connections]
if best_hash == [best_hash[0]] * len(best_hash):
return
time.sleep(wait)
timeout -= wait
raise AssertionError("Chain sync failed: Best block hashes don't match")
def sync_mempools(rpc_connections, *, wait=1, timeout=60):
"""
Wait until everybody has the same transactions in their memory
pools
"""
while timeout > 0:
pool = set(rpc_connections[0].getrawmempool())
num_match = 1
for i in range(1, len(rpc_connections)):
if set(rpc_connections[i].getrawmempool()) == pool:
num_match = num_match + 1
if num_match == len(rpc_connections):
return
time.sleep(wait)
timeout -= wait
raise AssertionError("Mempool sync failed")
# Transaction/Block functions
#############################
def find_output(node, txid, amount):
"""
Return index to output of txid with value amount
Raises exception if there is none.
"""
txdata = node.getrawtransaction(txid, 1)
for i in range(len(txdata["vout"])):
if txdata["vout"][i]["value"] == amount:
return i
raise RuntimeError("find_output txid %s : %s not found" % (txid, str(amount)))
def gather_inputs(from_node, amount_needed, confirmations_required=1):
"""
Return a random set of unspent txouts that are enough to pay amount_needed
"""
assert(confirmations_required >= 0)
utxo = from_node.listunspent(confirmations_required)
random.shuffle(utxo)
inputs = []
total_in = Decimal("0.00000000")
while total_in < amount_needed and len(utxo) > 0:
t = utxo.pop()
total_in += t["amount"]
inputs.append({"txid": t["txid"], "vout": t["vout"], "address": t["address"]})
if total_in < amount_needed:
raise RuntimeError("Insufficient funds: need %d, have %d" % (amount_needed, total_in))
return (total_in, inputs)
def make_change(from_node, amount_in, amount_out, fee):
"""
Create change output(s), return them
"""
outputs = {}
amount = amount_out + fee
change = amount_in - amount
if change > amount * 2:
# Create an extra change output to break up big inputs
change_address = from_node.getnewaddress()
# Split change in two, being careful of rounding:
outputs[change_address] = Decimal(change / 2).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
change = amount_in - amount - outputs[change_address]
if change > 0:
outputs[from_node.getnewaddress()] = change
return outputs
def random_transaction(nodes, amount, min_fee, fee_increment, fee_variants):
"""
Create a random transaction.
Returns (txid, hex-encoded-transaction-data, fee)
"""
from_node = random.choice(nodes)
to_node = random.choice(nodes)
fee = min_fee + fee_increment * random.randint(0, fee_variants)
(total_in, inputs) = gather_inputs(from_node, amount + fee)
outputs = make_change(from_node, total_in, amount, fee)
outputs[to_node.getnewaddress()] = float(amount)
rawtx = from_node.createrawtransaction(inputs, outputs)
signresult = from_node.signrawtransaction(rawtx)
txid = from_node.sendrawtransaction(signresult["hex"], True)
return (txid, signresult["hex"], fee)
# Helper to create at least "count" utxos
# Pass in a fee that is sufficient for relay and mining new transactions.
def create_confirmed_utxos(fee, node, count):
to_generate = int(0.5 * count) + 101
while to_generate > 0:
node.generate(min(25, to_generate))
to_generate -= 25
utxos = node.listunspent()
iterations = count - len(utxos)
addr1 = node.getnewaddress()
addr2 = node.getnewaddress()
if iterations <= 0:
return utxos
for i in range(iterations):
t = utxos.pop()
inputs = []
inputs.append({"txid": t["txid"], "vout": t["vout"]})
outputs = {}
send_value = t['amount'] - fee
outputs[addr1] = satoshi_round(send_value / 2)
outputs[addr2] = satoshi_round(send_value / 2)
raw_tx = node.createrawtransaction(inputs, outputs)
signed_tx = node.signrawtransaction(raw_tx)["hex"]
node.sendrawtransaction(signed_tx)
while (node.getmempoolinfo()['size'] > 0):
node.generate(1)
utxos = node.listunspent()
assert(len(utxos) >= count)
return utxos
# Create large OP_RETURN txouts that can be appended to a transaction
# to make it large (helper for constructing large transactions).
def gen_return_txouts():
# Some pre-processing to create a bunch of OP_RETURN txouts to insert into transactions we create
# So we have big transactions (and therefore can't fit very many into each block)
# create one script_pubkey
script_pubkey = "6a4d0200" # OP_RETURN OP_PUSH2 512 bytes
for i in range(512):
script_pubkey = script_pubkey + "01"
# concatenate 128 txouts of above script_pubkey which we'll insert before the txout for change
txouts = "81"
for k in range(128):
# add txout value
txouts = txouts + "0000000000000000"
# add length of script_pubkey
txouts = txouts + "fd0402"
# add script_pubkey
txouts = txouts + script_pubkey
return txouts
def create_tx(node, coinbase, to_address, amount):
inputs = [{"txid": coinbase, "vout": 0}]
outputs = {to_address: amount}
rawtx = node.createrawtransaction(inputs, outputs)
signresult = node.signrawtransaction(rawtx)
assert_equal(signresult["complete"], True)
return signresult["hex"]
# Create a spend of each passed-in utxo, splicing in "txouts" to each raw
# transaction to make it large. See gen_return_txouts() above.
def create_lots_of_big_transactions(node, txouts, utxos, num, fee):
addr = node.getnewaddress()
txids = []
for _ in range(num):
t = utxos.pop()
inputs = [{"txid": t["txid"], "vout": t["vout"]}]
outputs = {}
change = t['amount'] - fee
outputs[addr] = satoshi_round(change)
rawtx = node.createrawtransaction(inputs, outputs)
newtx = rawtx[0:92]
newtx = newtx + txouts
newtx = newtx + rawtx[94:]
signresult = node.signrawtransaction(newtx, None, None, "NONE")
txid = node.sendrawtransaction(signresult["hex"], True)
txids.append(txid)
return txids
def mine_large_block(node, utxos=None):
# generate a 66k transaction,
# and 14 of them is close to the 1MB block limit
num = 14
txouts = gen_return_txouts()
utxos = utxos if utxos is not None else []
if len(utxos) < num:
utxos.clear()
utxos.extend(node.listunspent())
fee = 100 * node.getnetworkinfo()["relayfee"]
create_lots_of_big_transactions(node, txouts, utxos, num, fee=fee)
node.generate(1)
| 38.098418
| 119
| 0.651398
|
4a0398fd9be4a6e735c8c9f7ff112a1493b0b401
| 620
|
py
|
Python
|
corehq/sql_accessors/migrations/0054_drop_reindexa_accessor_functions.py
|
dimagilg/commcare-hq
|
ea1786238eae556bb7f1cbd8d2460171af1b619c
|
[
"BSD-3-Clause"
] | 471
|
2015-01-10T02:55:01.000Z
|
2022-03-29T18:07:18.000Z
|
corehq/sql_accessors/migrations/0054_drop_reindexa_accessor_functions.py
|
dimagilg/commcare-hq
|
ea1786238eae556bb7f1cbd8d2460171af1b619c
|
[
"BSD-3-Clause"
] | 14,354
|
2015-01-01T07:38:23.000Z
|
2022-03-31T20:55:14.000Z
|
corehq/sql_accessors/migrations/0054_drop_reindexa_accessor_functions.py
|
dimagilg/commcare-hq
|
ea1786238eae556bb7f1cbd8d2460171af1b619c
|
[
"BSD-3-Clause"
] | 175
|
2015-01-06T07:16:47.000Z
|
2022-03-29T13:27:01.000Z
|
# Generated by Django 1.10.7 on 2017-07-06 21:18
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('sql_accessors', '0053_drop_unused_sql'),
]
operations = [
migrations.RunSQL("DROP FUNCTION IF EXISTS get_all_cases_modified_since(timestamp with time zone, INTEGER, INTEGER)"),
migrations.RunSQL("DROP FUNCTION IF EXISTS get_all_forms_received_since(timestamp with time zone, INTEGER, INTEGER)"),
migrations.RunSQL("DROP FUNCTION IF EXISTS get_all_ledger_values_modified_since(timestamp with time zone, INTEGER, INTEGER)"),
]
| 32.631579
| 134
| 0.735484
|
4a039a8f69efbd32b79f5d22fec6b573877b8eba
| 6,897
|
py
|
Python
|
SLAM_Lectures/Unit_PP/pp_01_f_astar_potential_function_question.py
|
yubaoliu/AISLAM
|
b12bba78b17ca61253ee0584927e3efaaa3d13d8
|
[
"MIT"
] | 7
|
2017-06-03T15:40:20.000Z
|
2021-05-17T16:05:58.000Z
|
SLAM_Lectures/Unit_PP/pp_01_f_astar_potential_function_question.py
|
yubaoliu/AISLAM
|
b12bba78b17ca61253ee0584927e3efaaa3d13d8
|
[
"MIT"
] | null | null | null |
SLAM_Lectures/Unit_PP/pp_01_f_astar_potential_function_question.py
|
yubaoliu/AISLAM
|
b12bba78b17ca61253ee0584927e3efaaa3d13d8
|
[
"MIT"
] | 7
|
2017-06-03T15:40:26.000Z
|
2020-08-12T12:11:41.000Z
|
# A* path planning.
# Addition of potential function keeps path away from obstacles.
# Note this modification changes only a single line in the A* algorithm.
# pp_01_f_astar_potential_function
# (c) Claus Brenner, 18 JAN 2014
from heapq import heappush, heappop
import numpy as np
import scipy.ndimage
import traceback
import gui
import common
# The world extents in units.
world_extents = (200, 150)
# The obstacle map.
# Obstacle = 255, free space = 0.
world_obstacles = np.zeros(world_extents, dtype=np.uint8)
# The array of visited cells during search.
visited_nodes = None
# Switch which determines if visited nodes shall be drawn in the GUI.
show_visited_nodes = True
# Switch which determines if potential function should be used.
use_potential_function = True
# The optimal path between start and goal. This is a list of (x,y) pairs.
optimal_path = []
# Functions for GUI functionality.
def add_obstacle(pos):
common.set_obstacle(world_obstacles, pos, True)
common.draw_background(gui, world_obstacles, visited_nodes, optimal_path,
show_visited_nodes)
def remove_obstacle(pos):
common.set_obstacle(world_obstacles, pos, False)
common.draw_background(gui, world_obstacles, visited_nodes, optimal_path,
show_visited_nodes)
def clear_obstacles():
global world_obstacles
world_obstacles = np.zeros(world_extents, dtype=np.uint8)
update_callback()
def toggle_visited_nodes():
global show_visited_nodes
show_visited_nodes = not show_visited_nodes
common.draw_background(gui, world_obstacles, visited_nodes, optimal_path,
show_visited_nodes)
def toggle_potential_function():
global use_potential_function
use_potential_function = not use_potential_function
update_callback()
def apply_distance_transform():
global world_obstacles
if use_potential_function and np.max(world_obstacles) == 255:
# Compute distance transform.
dist_transform = 255-np.minimum(
16*scipy.ndimage.morphology.distance_transform_edt(
255-world_obstacles), 255)
m = max(np.max(dist_transform), 1) # Prevent m==0.
world_obstacles = np.uint8((dist_transform * 255) / m)
else:
# Keep 255 values only (set all other to 0).
world_obstacles = (world_obstacles == 255) * np.uint8(255)
def update_callback(pos = None):
# First apply distance transform to world_obstacles.
apply_distance_transform()
# Call path planning algorithm.
start, goal = gui.get_start_goal()
if not (start==None or goal==None):
global optimal_path
global visited_nodes
try:
optimal_path, visited_nodes = astar(start, goal, world_obstacles)
except Exception, e:
print traceback.print_exc()
# Draw new background.
common.draw_background(gui, world_obstacles, visited_nodes, optimal_path,
show_visited_nodes)
# --------------------------------------------------------------------------
# A* algorithm.
# --------------------------------------------------------------------------
# Allowed movements and costs on the grid.
# Each tuple is: (movement_x, movement_y, cost).
s2 = np.sqrt(2)
movements = [ # Direct neighbors (4N).
(1,0, 1.), (0,1, 1.), (-1,0, 1.), (0,-1, 1.),
# Diagonal neighbors.
# Comment this out to play with 4N only (faster).
(1,1, s2), (-1,1, s2), (-1,-1, s2), (1,-1, s2),
]
def distance(p, q):
"""Helper function to compute distance between two points."""
return np.sqrt((p[0]-q[0])**2 + (p[1]-q[1])**2)
def astar(start, goal, obstacles):
"""A* algorithm."""
# In the beginning, the start is the only element in our front.
# NOW, the first element is the total cost through the point, which is
# the cost from start to point plus the estimated cost to the goal.
# The second element is the cost of the path from the start to the point.
# The third element is the position (cell) of the point.
# The fourth component is the position we came from when entering the tuple
# to the front.
# In the beginning, no cell has been visited.
extents = obstacles.shape
visited = np.zeros(extents, dtype=np.float32)
# Also, we use a dictionary to remember where we came from.
came_from = {}
# While there are elements to investigate in our front.
while front:
# Get smallest item and remove from front.
# Check if this has been visited already.
# Now it has been visited. Mark with cost.
# Also remember that we came from previous when we marked pos.
# Check if the goal has been reached.
if pos == goal:
break # Finished!
# Check all neighbors.
for dx, dy, deltacost in movements:
# Determine new position and check bounds.
# Add to front if: not visited before and no obstacle.
new_pos = (new_x, new_y)
# CHANGE 01_f: add the 'obstacle cost' to new_cost AND
# new_total_cost. As obstacle cost, use:
# obstacles[new_pos] / 64.
# The divider 64 determines the tradeoff between 'avoiding
# obstacles' and 'driving longer distances'. You may experiment
# with other values, but make sure you set it back to 64 for
# the grader.
# Please check again that you do not enter a tuple into
# the heap if it has been visited already or its obstacles[]
# value is 255 (check for '==255', not for '> 0').
# Reconstruct path, starting from goal.
return (path, visited)
# Main program.
if __name__ == '__main__':
# Link functions to buttons.
callbacks = {"update": update_callback,
"button_1_press": add_obstacle,
"button_1_drag": add_obstacle,
"button_1_release": update_callback,
"button_2_press": remove_obstacle,
"button_2_drag": remove_obstacle,
"button_2_release": update_callback,
"button_3_press": remove_obstacle,
"button_3_drag": remove_obstacle,
"button_3_release": update_callback,
}
# Extra buttons.
buttons = [("Clear", clear_obstacles),
("Use Potential Function", toggle_potential_function),
("Show Visited", toggle_visited_nodes)]
# Init GUI.
gui = gui.GUI(world_extents, 4, callbacks,
buttons, "on", "A* Algorithm using potential function.")
# Start GUI main loop.
gui.run()
| 38.747191
| 80
| 0.61795
|
4a039b407da29b5d3a2683bd53ca39820efb97da
| 34,366
|
py
|
Python
|
tests/test_generation.py
|
goibibo/swaggergenerator
|
60aeb44c6e9df874e527a3f34ae77e1c82df4fa9
|
[
"MIT"
] | 1
|
2021-06-07T10:37:52.000Z
|
2021-06-07T10:37:52.000Z
|
tests/test_generation.py
|
goibibo/swaggergenerator
|
60aeb44c6e9df874e527a3f34ae77e1c82df4fa9
|
[
"MIT"
] | null | null | null |
tests/test_generation.py
|
goibibo/swaggergenerator
|
60aeb44c6e9df874e527a3f34ae77e1c82df4fa9
|
[
"MIT"
] | 1
|
2021-06-05T08:26:44.000Z
|
2021-06-05T08:26:44.000Z
|
import pytest
import requests
from swaggergenerator3 import Generator, get_yaml
def test_no_params(httpbin):
generator = Generator()
response = requests.get(httpbin.url + '/get')
generator.provide_example(response.request, response)
response = requests.post(httpbin.url + '/post')
generator.provide_example(response.request, response)
expected = {
'/post': {
'post': {
'responses': {
'200': {
'description': 'TODO',
'schema': {
'additionalProperties': False,
'type': 'object',
'properties': {
'files': {
'additionalProperties': False,
'type': 'object',
'properties': {}},
'origin': {
'type': 'string'},
'form': {
'additionalProperties': False,
'type': 'object',
'properties': {}},
'url': {
'type': 'string'},
'args': {
'additionalProperties': False,
'type': 'object',
'properties': {}},
'headers': {
'additionalProperties': False,
'type': 'object',
'properties': {
'Content-Length': {'type': 'string'},
'Accept-Encoding': {'type': 'string'},
'Connection': {'type': 'string'},
'Accept': {'type': 'string'},
'User-Agent': {'type': 'string'},
'Host': {'type': 'string'}}
},
'json': {'type': 'null'},
'data': {'type': 'string'}}
}
}
},
'parameters': [], 'description': 'TODO'}},
'/get': {
'get': {
'responses': {
'200': {
'description': 'TODO',
'schema': {
'additionalProperties': False, 'type': 'object',
'properties': {
'origin': {'type': 'string'},
'headers': {
'additionalProperties': False,
'type': 'object',
'properties': {
# 'Content-Length': {'type': 'string'},
'Accept-Encoding': {'type': 'string'},
'Connection': {'type': 'string'},
'Accept': {'type': 'string'},
'User-Agent': {'type': 'string'},
'Host': {'type': 'string'}}},
'args': {
'additionalProperties': False,
'type': 'object',
'properties': {}},
'url': {'type': 'string'}}}}},
'parameters': [], 'description': 'TODO'}}}
assert generator.generate_paths() == expected
def test_get_params(httpbin):
generator = Generator()
response = requests.get(httpbin.url + '/get', params={'query_key': 'query_val'})
generator.provide_example(response.request, response)
expected = {
'/get': {
'get': {
'responses': {
'200': {
'description': 'TODO',
'schema': {
'additionalProperties': False,
'type': 'object',
'properties': {
'origin': {'type': 'string'},
'headers': {
'additionalProperties': False,
'type': 'object',
'properties': {
# 'Content-Length': {'type': 'string'},
'Accept-Encoding': {'type': 'string'},
'Connection': {'type': 'string'},
'Accept': {'type': 'string'},
'User-Agent': {'type': 'string'},
'Host': {'type': 'string'}}},
'args': {
'additionalProperties': False,
'type': 'object',
'properties': {
'query_key': {
'type': 'string'}}},
'url': {'type': 'string'}}}}
},
'parameters': [
{'required': True, 'type': 'string', 'name': 'query_key',
'in': 'query'}], 'description': 'TODO'}}
}
assert generator.generate_paths() == expected
def test_post_body(httpbin):
generator = Generator()
response = requests.post(httpbin.url + '/post', json={'body_key': {'body_subkey': 'body_val'}})
generator.provide_example(response.request, response)
expected = {
'/post': {
'post': {
'responses': {
'200': {
'description': 'TODO',
'schema': {
'additionalProperties': False,
'type': 'object',
'properties': {
'files': {
'additionalProperties': False,
'type': 'object',
'properties': {}},
'origin': {'type': 'string'},
'form': {
'additionalProperties': False,
'type': 'object',
'properties': {}},
'url': {'type': 'string'},
'args': {
'additionalProperties': False,
'type': 'object',
'properties': {}},
'headers': {
'additionalProperties': False,
'type': 'object',
'properties': {
'Content-Length': {'type': 'string'},
'Accept-Encoding': {'type': 'string'},
'Connection': {'type': 'string'},
'Accept': {'type': 'string'},
'User-Agent': {'type': 'string'},
'Host': {'type': 'string'},
'Content-Type': {'type': 'string'}}},
'json': {
'additionalProperties': False,
'type': 'object',
'properties': {
'body_key': {
'additionalProperties': False,
'type': 'object',
'properties': {
'body_subkey': {
'type': 'string'}}}}
},
'data': {'type': 'string'}}}}},
'parameters': [{'schema': {'additionalProperties': False,
'type': 'object', 'properties': {
'body_key': {'additionalProperties': False,
'type': 'object', 'properties': {
'body_subkey': {'type': 'string'}}}}},
'name': 'body_data', 'in': 'body'}],
'description': 'TODO'}}}
assert generator.generate_paths() == expected
def test_naive_path_params(httpbin):
generator = Generator()
response = requests.get(httpbin.url + '/cache/1')
generator.provide_example(response.request, response)
response = requests.get(httpbin.url + '/cache/2')
generator.provide_example(response.request, response)
expected = {
'/cache/{param1}': {
'get': {
'responses': {
'200': {
'description': 'TODO',
'schema': {
'additionalProperties': False,
'type': 'object',
'properties': {'origin': {
'type': 'string'},
'headers': {
'additionalProperties': False,
'type': 'object',
'properties': {
# 'Content-Length': {'type': 'string'},
'Accept-Encoding': {'type': 'string'},
'Connection': {'type': 'string'},
'Accept': {'type': 'string'},
'User-Agent': {'type': 'string'},
'Host': {'type': 'string'}}},
'args': {
'additionalProperties': False,
'type': 'object',
'properties': {}},
'url': {'type': 'string'}}}
}
},
'parameters': [{'required': True, 'type': 'string',
'name': 'param1', 'in': 'path'}],
'description': 'TODO'}}}
assert generator.generate_paths() == expected
def test_component_length_mismatch(httpbin):
generator = Generator()
response = requests.get(httpbin.url + '/get')
generator.provide_example(response.request, response)
response = requests.get(httpbin.url + '/cache/2')
generator.provide_example(response.request, response)
expected = {
'/get': {
'get': {
'responses': {
'200': {
'description': 'TODO',
'schema': {
'additionalProperties': False,
'type': 'object',
'properties': {
'origin': {'type': 'string'},
'headers': {
'additionalProperties': False,
'type': 'object',
'properties': {
# 'Content-Length': {'type': 'string'},
'Accept-Encoding': {'type': 'string'},
'Connection': {'type': 'string'},
'Accept': {'type': 'string'},
'User-Agent': {'type': 'string'},
'Host': {'type': 'string'}}},
'args': {
'additionalProperties': False,
'type': 'object',
'properties': {}},
'url': {
'type': 'string'}}}}
},
'parameters': [],
'description': 'TODO'}
},
'/cache/{param1}': {
'get': {
'responses': {
'200': {
'description': 'TODO',
'schema': {
'additionalProperties': False,
'type': 'object',
'properties': {
'origin': {'type': 'string'},
'headers': {
'additionalProperties': False,
'type': 'object',
'properties': {
# 'Content-Length': {'type': 'string'},
'Accept-Encoding': {'type': 'string'},
'Connection': {'type': 'string'},
'Accept': {'type': 'string'},
'User-Agent': {'type': 'string'},
'Host': {'type': 'string'}}
},
'args': {
'additionalProperties': False,
'type': 'object',
'properties': {}},
'url': {'type': 'string'}}}
}
},
'parameters': [{
'required': True, 'type': 'string', 'name': 'param1', 'in': 'path'
}],
'description': 'TODO'}}}
assert generator.generate_paths() == expected
def test_non_naive_path_params(httpbin):
generator = Generator()
response = requests.get(httpbin.url + '/basic-auth/1/pass', auth=('1', 'pass'))
generator.provide_example(response.request, response)
response = requests.get(httpbin.url + '/basic-auth/user/pass', auth=('user', 'pass'))
generator.provide_example(response.request, response)
expected = {
'/basic-auth/{param1}/pass': {
'get': {
'responses': {
'200': {
'description': 'TODO',
'schema': {
'additionalProperties': False,
'type': 'object',
'properties': {
'authenticated': {'type': 'boolean'},
'user': {'type': 'string'}}}}},
'parameters': [
{'required': True, 'type': 'string',
'name': 'param1', 'in': 'path'}],
'description': 'TODO'}}}
assert generator.generate_paths() == expected
def test_custom_path_params(httpbin):
class CustomGenerator(Generator):
def is_param(self, e, path):
return e in {'user1', 'user2'} or super(CustomGenerator, self).is_param(e, path)
generator = CustomGenerator()
response = requests.get(httpbin.url + '/basic-auth/user1/pass', auth=('user1', 'pass'))
generator.provide_example(response.request, response)
response = requests.get(httpbin.url + '/basic-auth/user2/pass', auth=('user2', 'pass'))
generator.provide_example(response.request, response)
expected = {
'/basic-auth/{param1}/pass': {
'get': {
'responses': {
'200': {
'description': 'TODO',
'schema': {
'additionalProperties': False,
'type': 'object',
'properties': {
'authenticated': {'type': 'boolean'},
'user': {'type': 'string'}}}}},
'parameters': [
{'required': True, 'type': 'string',
'name': 'param1', 'in': 'path'}],
'description': 'TODO'}}}
assert generator.generate_paths() == expected
def test_base_path(httpbin):
generator = Generator(base_path='/cache')
response = requests.get(httpbin.url + '/cache/1')
generator.provide_example(response.request, response)
response = requests.get(httpbin.url + '/cache/2')
generator.provide_example(response.request, response)
expected = {
'/{param1}': {
'get': {
'responses': {
'200': {
'description': 'TODO',
'schema': {
'additionalProperties': False, 'type': 'object',
'properties': {
'origin': {'type': 'string'},
'headers': {
'additionalProperties': False, 'type': 'object',
'properties': {
# 'Content-Length': {'type': 'string'},
'Accept-Encoding': {'type': 'string'},
'Connection': {'type': 'string'},
'Accept': {'type': 'string'},
'User-Agent': {'type': 'string'},
'Host': {'type': 'string'}}
},
'args': {
'additionalProperties': False, 'type': 'object',
'properties': {}}, 'url': {'type': 'string'}
}}}
},
'parameters': [
{
'required': True, 'type': 'string', 'name': 'param1',
'in': 'path'}],
'description': 'TODO'}}}
assert generator.generate_paths() == expected
def test_param_blacklist(httpbin):
generator = Generator(query_key_blacklist={'token'})
response = requests.get(httpbin.url + '/get', params={'token': '123'})
generator.provide_example(response.request, response)
expected = {
'/get': {
'get': {
'responses': {
'200': {
'description': 'TODO',
'schema': {'additionalProperties': False,
'type': 'object',
'properties': {'origin': {
'type': 'string'},
'headers': {
'additionalProperties': False,
'type': 'object',
'properties': {
# 'Content-Length': {'type': 'string'},
'Accept-Encoding': {'type': 'string'},
'Connection': {'type': 'string'},
'Accept': {'type': 'string'},
'User-Agent': {'type': 'string'},
'Host': {'type': 'string'}}},
'args': {
'additionalProperties': False,
'type': 'object',
'properties': {'token': {'type': 'string'}}
},
'url': {'type': 'string'}}}}
},
'parameters': [], 'description': 'TODO'}}}
assert generator.generate_paths() == expected
def test_definition_matching(httpbin):
existing_schema = {
'definitions': {
'Person': {
'type': 'object',
'properties': {
'name': {
'type': 'string',
},
'id': {
'type': 'integer',
}
}
}
}
}
generator = Generator(existing_schema=existing_schema)
response = requests.post(httpbin.url + '/post',
json=[{'name': 'foo', 'id': 1}, {'name': 'bar', 'id': 2}])
generator.provide_example(response.request, response)
expected = {
'/post': {
'post': {
'responses': {
'200': {
'description': 'TODO',
'schema': {'$ref': '#/definitions/Person'}}
},
'parameters': [
{
'schema': {
'items': {
'$ref': '#/definitions/Person'
},
'type': 'array'},
'name': 'body_data', 'in': 'body'}], 'description': 'TODO'}}}
assert generator.generate_paths() == expected
def test_subdefinition_matching(httpbin):
existing_schema = {
'definitions': {
'Person': {
'type': 'object',
'additionalProperties': False,
'properties': {
'name': {
'$ref': '#/definitions/Name',
}
},
},
'Name': {
'type': 'object',
'additionalProperties': False,
'properties': {
'first': {
type: 'string',
},
'last': {
type: 'string',
},
}
}
}
}
generator = Generator(existing_schema=existing_schema)
response = requests.post(httpbin.url + '/post', json={'name': {'first': 'foo', 'last': 'bar'}})
generator.provide_example(response.request, response)
expected = {
'/post': {
'post': {
'responses': {
'200': {
'description': 'TODO',
'schema': {'additionalProperties': False,
'type': 'object',
'properties': {
'files': {
'additionalProperties': False,
'type': 'object',
'properties': {}},
'origin': {'type': 'string'},
'form': {
'additionalProperties': False,
'type': 'object',
'properties': {}},
'url': {'type': 'string'},
'args': {
'additionalProperties': False,
'type': 'object',
'properties': {}},
'headers': {
'additionalProperties': False,
'type': 'object',
'properties': {
'Content-Length': {'type': 'string'},
'Accept-Encoding': {'type': 'string'},
'Connection': {'type': 'string'},
'Accept': {'type': 'string'},
'User-Agent': {'type': 'string'},
'Host': {'type': 'string'},
'Content-Type': {'type': 'string'}}},
'json': {
'$ref': '#/definitions/Person'},
'data': {
'type': 'string'}}}}},
'parameters': [{'schema': {'$ref': '#/definitions/Person'},
'name': 'body_data', 'in': 'body'}],
'description': 'TODO'}}}
assert generator.generate_paths() == expected
def test_empty_array_with_valid_examples(httpbin):
generator = Generator()
response = requests.post(httpbin.url + '/post', json=[])
generator.provide_example(response.request, response)
response = requests.post(httpbin.url + '/post', json=[1, 2, 3])
generator.provide_example(response.request, response)
expected = {
'/post': {
'post': {
'responses': {
'200': {
'description': 'TODO',
'schema': {
'additionalProperties': False,
'type': 'object',
'properties': {
'files': {
'additionalProperties': False,
'type': 'object',
'properties': {}
},
'origin': {'type': 'string'},
'form': {
'additionalProperties': False,
'type': 'object',
'properties': {}},
'url': {'type': 'string'},
'args': {
'additionalProperties': False,
'type': 'object',
'properties': {}},
'headers': {
'additionalProperties': False,
'type': 'object',
'properties': {
'Content-Length': {'type': 'string'},
'Accept-Encoding': {'type': 'string'},
'Connection': {'type': 'string'},
'Accept': {'type': 'string'},
'User-Agent': {'type': 'string'},
'Host': {'type': 'string'},
'Content-Type': {'type': 'string'}}},
'json': {
'items': {'type': 'number'},
'type': 'array'
},
'data': {'type': 'string'}}}}
},
'parameters': [
{'schema': {'items': {'type': 'number'}, 'type': 'array'},
'name': 'body_data', 'in': 'body'}],
'description': 'TODO'}}}
assert generator.generate_paths() == expected
def test_empty_array_alone_ignored(httpbin):
generator = Generator()
response = requests.post(httpbin.url + '/post', json=[])
generator.provide_example(response.request, response)
expected = {'/post': {'post': {'responses': {}, 'parameters': [], 'description': 'TODO'}}}
assert generator.generate_paths() == expected
def test_known_paths_ignored(httpbin):
existing_schema = {
'paths': {
'/get': {
'get': {}
}
}
}
generator = Generator(existing_schema=existing_schema)
response = requests.get(httpbin.url + '/get')
generator.provide_example(response.request, response)
expected = {}
assert generator.generate_paths() == expected
def test_example_str(httpbin):
generator = Generator()
response = requests.get(httpbin.url + '/get')
generator.provide_example(response.request, response)
assert str(generator.path_to_examples['/get'][0]) == "'get /get -> 200'"
def test_get_yaml(httpbin):
generator = Generator()
response = requests.post(httpbin.url + '/post', json=[])
generator.provide_example(response.request, response)
expected = {'/post': {'post': {'responses': {}, 'parameters': [], 'description': 'TODO'}}}
schemas = generator.generate_paths()
assert schemas == expected
expected_yaml = """ /post:
post:
description: TODO
parameters: []
responses: {}
"""
assert get_yaml(schemas) == expected_yaml
def test_provided_default(httpbin):
generator = Generator(
default={'description': 'unexpected error', 'schema': {'$ref': '#/definitions/Error'}})
response = requests.post(httpbin.url + '/get', json=[])
generator.provide_example(response.request, response)
expected = {'/get': {'post': {'responses': {
'default': {'description': 'unexpected error',
'schema': {'$ref': '#/definitions/Error'}}},
'description': 'TODO'}}}
assert generator.generate_paths() == expected
def test_optional_field_nonempty_example(httpbin):
generator = Generator()
response = requests.post(httpbin.url + '/post', json={'parent': {'other': True}})
generator.provide_example(response.request, response)
response = requests.post(httpbin.url + '/post',
json={'parent': {'optional': True, 'other': True}})
generator.provide_example(response.request, response)
expected = {
'/post': {
'post': {
'responses': {
'200': {
'description': 'TODO',
'schema': {
'additionalProperties': False,
'type': 'object',
'properties': {'files': {
'additionalProperties': False,
'type': 'object',
'properties': {}},
'origin': {
'type': 'string'},
'form': {
'additionalProperties': False,
'type': 'object',
'properties': {}},
'url': {
'type': 'string'},
'args': {
'additionalProperties': False,
'type': 'object',
'properties': {}},
'headers': {
'additionalProperties': False,
'type': 'object',
'properties': {
'Content-Length': {'type': 'string'},
'Accept-Encoding': {'type': 'string'},
'Connection': {'type': 'string'},
'Accept': {'type': 'string'},
'User-Agent': {'type': 'string'},
'Host': {'type': 'string'},
'Content-Type': {'type': 'string'}}},
'json': {
'additionalProperties': False,
'type': 'object',
'properties': {
'parent': {
'additionalProperties': False,
'type': 'object',
'properties': {
'other': {'type': 'boolean'},
'optional': {'type': 'boolean'}}
}
}
},
'data': {
'type': 'string'
}}}}},
'parameters': [
{
'schema': {
'additionalProperties': False,
'type': 'object',
'properties': {
'parent': {
'additionalProperties': False,
'type': 'object',
'properties': {
'other': {'type': 'boolean'},
'optional': {'type': 'boolean'}}}}
},
'name': 'body_data', 'in': 'body'}],
'description': 'TODO'}}}
assert generator.generate_paths() == expected
if __name__ == '__main__':
pytest.main()
| 45.337731
| 99
| 0.33312
|
4a039fac5dd802b7fc4f80a49ddd7b12e55dee15
| 4,329
|
py
|
Python
|
setup.py
|
JamesSeaward/qiskit-terra
|
03083dcc23c13fce17c66369c555943299547aeb
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
JamesSeaward/qiskit-terra
|
03083dcc23c13fce17c66369c555943299547aeb
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
JamesSeaward/qiskit-terra
|
03083dcc23c13fce17c66369c555943299547aeb
|
[
"Apache-2.0"
] | 1
|
2020-02-27T14:05:24.000Z
|
2020-02-27T14:05:24.000Z
|
# -*- coding: utf-8 -*-
# This code is part of Qiskit.
#
# (C) Copyright IBM 2017.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"The Qiskit Terra setup file."
import os
import sys
from setuptools import setup, find_packages, Extension
try:
from Cython.Build import cythonize
except ImportError:
import subprocess
subprocess.call([sys.executable, '-m', 'pip', 'install', 'Cython>=0.27.1'])
from Cython.Build import cythonize
REQUIREMENTS = [
"jsonschema>=2.6",
"marshmallow>=3,<4",
"marshmallow_polyfield>=5.7,<6",
"networkx>=2.2;python_version>'3.5'",
# Networkx 2.4 is the final version with python 3.5 support.
"networkx>=2.2,<2.4;python_version=='3.5'",
"numpy>=1.13",
"ply>=3.10",
"psutil>=5",
"scipy>=1.0",
"sympy>=1.3",
"dill>=0.3",
]
# Add Cython extensions here
CYTHON_EXTS = ['utils', 'swap_trial']
CYTHON_MODULE = 'qiskit.transpiler.passes.routing.cython.stochastic_swap'
CYTHON_SOURCE_DIR = 'qiskit/transpiler/passes/routing/cython/stochastic_swap'
INCLUDE_DIRS = []
# Extra link args
LINK_FLAGS = []
# If on Win and not in MSYS2 (i.e. Visual studio compile)
if (sys.platform == 'win32' and os.environ.get('MSYSTEM') is None):
COMPILER_FLAGS = ['/O2']
# Everything else
else:
COMPILER_FLAGS = ['-O2', '-funroll-loops', '-std=c++11']
if sys.platform == 'darwin':
# These are needed for compiling on OSX 10.14+
COMPILER_FLAGS.append('-mmacosx-version-min=10.9')
LINK_FLAGS.append('-mmacosx-version-min=10.9')
EXT_MODULES = []
# Add Cython Extensions
for ext in CYTHON_EXTS:
mod = Extension(CYTHON_MODULE + '.' + ext,
sources=[CYTHON_SOURCE_DIR + '/' + ext + '.pyx'],
include_dirs=INCLUDE_DIRS,
extra_compile_args=COMPILER_FLAGS,
extra_link_args=LINK_FLAGS,
language='c++')
EXT_MODULES.append(mod)
# Read long description from README.
README_PATH = os.path.join(os.path.abspath(os.path.dirname(__file__)),
'README.md')
with open(README_PATH) as readme_file:
README = readme_file.read()
setup(
name="qiskit-terra",
version="0.13.0",
description="Software for developing quantum computing programs",
long_description=README,
long_description_content_type='text/markdown',
url="https://github.com/Qiskit/qiskit-terra",
author="Qiskit Development Team",
author_email="qiskit@qiskit.org",
license="Apache 2.0",
classifiers=[
"Environment :: Console",
"License :: OSI Approved :: Apache Software License",
"Intended Audience :: Developers",
"Intended Audience :: Science/Research",
"Operating System :: Microsoft :: Windows",
"Operating System :: MacOS",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Topic :: Scientific/Engineering",
],
keywords="qiskit sdk quantum",
packages=find_packages(exclude=['test*']),
install_requires=REQUIREMENTS,
setup_requires=['Cython>=0.27.1'],
include_package_data=True,
python_requires=">=3.5",
extras_require={
'visualization': ['matplotlib>=2.1', 'ipywidgets>=7.3.0',
'pydot', "pillow>=4.2.1", "pylatexenc>=1.4",
"seaborn>=0.9.0"],
'full-featured-simulators': ['qiskit-aer>=0.1'],
'crosstalk-pass': ['z3-solver>=4.7'],
},
project_urls={
"Bug Tracker": "https://github.com/Qiskit/qiskit-terra/issues",
"Documentation": "https://qiskit.org/documentation/",
"Source Code": "https://github.com/Qiskit/qiskit-terra",
},
ext_modules=cythonize(EXT_MODULES),
zip_safe=False
)
| 34.632
| 79
| 0.637792
|
4a03a1a5be0741377fd495149846b0ba78edd314
| 3,069
|
py
|
Python
|
code/venv/lib/python3.6/site-packages/pgadmin4/pgadmin/browser/server_groups/servers/databases/schemas/tables/indexes/tests/test_indexes_add.py
|
jhkuang11/UniTrade
|
5f68b853926e167936b58c8543b8f95ebd6f5211
|
[
"MIT"
] | null | null | null |
code/venv/lib/python3.6/site-packages/pgadmin4/pgadmin/browser/server_groups/servers/databases/schemas/tables/indexes/tests/test_indexes_add.py
|
jhkuang11/UniTrade
|
5f68b853926e167936b58c8543b8f95ebd6f5211
|
[
"MIT"
] | 10
|
2020-06-05T19:42:26.000Z
|
2022-03-11T23:38:35.000Z
|
code/venv/lib/python3.6/site-packages/pgadmin4/pgadmin/browser/server_groups/servers/databases/schemas/tables/indexes/tests/test_indexes_add.py
|
jhkuang11/UniTrade
|
5f68b853926e167936b58c8543b8f95ebd6f5211
|
[
"MIT"
] | null | null | null |
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2017, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
import json
import uuid
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
import utils as tables_utils
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
utils as schema_utils
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
database_utils
from pgadmin.utils.route import BaseTestGenerator
from regression import parent_node_dict
from regression.python_test_utils import test_utils as utils
class IndexesAddTestCase(BaseTestGenerator):
"""This class will add new index to existing table column"""
scenarios = [
('Add index Node URL', dict(url='/browser/index/obj/'))
]
def setUp(self):
self.db_name = parent_node_dict["database"][-1]["db_name"]
schema_info = parent_node_dict["schema"][-1]
self.server_id = schema_info["server_id"]
self.db_id = schema_info["db_id"]
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
self.server_id, self.db_id)
if not db_con['data']["connected"]:
raise Exception("Could not connect to database to add a table.")
self.schema_id = schema_info["schema_id"]
self.schema_name = schema_info["schema_name"]
schema_response = schema_utils.verify_schemas(self.server,
self.db_name,
self.schema_name)
if not schema_response:
raise Exception("Could not find the schema to add a table.")
self.table_name = "table_for_column_%s" % (str(uuid.uuid4())[1:6])
self.table_id = tables_utils.create_table(self.server, self.db_name,
self.schema_name,
self.table_name)
def runTest(self):
"""This function will add index to existing table column."""
self.index_name = "test_index_add_%s" % (str(uuid.uuid4())[1:6])
data = {"name": self.index_name,
"spcname": "pg_default",
"amname": "btree",
"columns": [
{"colname": "id", "sort_order": False, "nulls": False}]}
response = self.tester.post(
self.url + str(utils.SERVER_GROUP) + '/' +
str(self.server_id) + '/' + str(self.db_id) +
'/' + str(self.schema_id) + '/' + str(self.table_id) + '/',
data=json.dumps(data),
content_type='html/json')
self.assertEquals(response.status_code, 200)
def tearDown(self):
# Disconnect the database
database_utils.disconnect_database(self, self.server_id, self.db_id)
| 43.842857
| 76
| 0.576083
|
4a03a3c78295802c208b86ec41d7dba1f343076c
| 959
|
py
|
Python
|
tests/api/products/test_products.py
|
lionsdigitalsolutions/python-amazon-sp-api
|
7374523ebc65e2e01e37d03fc4009a44fabf2c3b
|
[
"MIT"
] | null | null | null |
tests/api/products/test_products.py
|
lionsdigitalsolutions/python-amazon-sp-api
|
7374523ebc65e2e01e37d03fc4009a44fabf2c3b
|
[
"MIT"
] | null | null | null |
tests/api/products/test_products.py
|
lionsdigitalsolutions/python-amazon-sp-api
|
7374523ebc65e2e01e37d03fc4009a44fabf2c3b
|
[
"MIT"
] | null | null | null |
import urllib
from sp_api.api.products.products import Products
from sp_api.base import Marketplaces, SellingApiBadRequestException
def test_pricing_for_sku():
res = Products().get_product_pricing_for_skus([])
assert res.payload[0].get('status') == 'Success'
def test_pricing_for_asin():
res = Products().get_product_pricing_for_asins([])
assert res.payload[0].get('status') == 'Success'
def test_pricing_for_asin_expect_400():
try:
Products().get_product_pricing_for_skus(['TEST_CASE_400'], MarketplaceId='TEST_CASE_400')
except SellingApiBadRequestException as br:
assert br.code == 400
# def test_competitive_pricing_for_sku():
# res = Products().get_competitive_pricing_for_skus([])
# assert res.payload[0].get('status') == 'Success'
#
#
# def test_competitive_pricing_for_asin():
# res = Products().get_competitive_pricing_for_asins([])
# assert res.payload[0].get('status') == 'Success'
| 29.060606
| 97
| 0.728884
|
4a03a58b13f38c71eafa4ba96b0912c5f42615b3
| 11,197
|
py
|
Python
|
library/ipadm_addr.py
|
xen0l/ansible-illumos
|
979e1337ac128f7fccb9c49cde5452feaac95f1c
|
[
"BSD-3-Clause"
] | null | null | null |
library/ipadm_addr.py
|
xen0l/ansible-illumos
|
979e1337ac128f7fccb9c49cde5452feaac95f1c
|
[
"BSD-3-Clause"
] | null | null | null |
library/ipadm_addr.py
|
xen0l/ansible-illumos
|
979e1337ac128f7fccb9c49cde5452feaac95f1c
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2015, Adam Števko <adam.stevko@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
DOCUMENTATION = '''
---
module: ipadm_addr
short_description: Manage IP addresses on an interface.
description:
- Create/delete static/dynamic IP addresses on network interfaces on Solaris/illumos systems.
- Up/down static/dynamic IP addresses on network interfaces on Solaris/illumos systems.
- Manage IPv6 link-local addresses on network interfaces on Solaris/illumos systems.
version_added: "2.1"
author: Adam Števko (@xen0l)
options:
address:
description:
- Specifiies an IP address to configure in CIDR notation.
required: false
aliases: [ 'addr' ]
addrtype:
description:
- Specifiies a type of IP address to configure.
required: false
default: static
choices: [ 'static', 'dhcp', 'addrconf' ]
addrobj:
description:
- Specifies an unique IP address on the system.
required: true
temporary:
description:
- Specifies that the configured IP address is temporary. Temporary
IP addresses do not persist across reboots.
required: false
default: false
wait:
description:
- Specifies the time in seconds we wait for obtaining address via DHCP.
required: false
default: 60
state:
description:
- Create/delete/enable/disable an IP address on the network interface.
required: false
default: present
choices: [ 'absent', 'present', 'up', 'down', 'enabled', 'disabled', 'refreshed' ]
'''
EXAMPLES = '''
# Configure IP address 10.0.0.1 on e1000g0
ipadm_addr: addr=10.0.0.1/32 addrobj=e1000g0/v4 state=present
# Delete addrobj
ipadm_addr: addrobj=e1000g0/v4 state=absent
# Configure link-local IPv6 address
ipadm_addr: addtype=addrconf addrobj=vnic0/v6
# Configure address via DHCP and wait 180 seconds for address obtaining
ipadm_addr: addrobj=vnic0/dhcp addrtype=dhcp wait=180
'''
import socket
SUPPORTED_TYPES = ['static', 'addrconf', 'dhcp']
class Addr(object):
def __init__(self, module):
self.module = module
self.address = module.params['address']
self.addrtype = module.params['addrtype']
self.addrobj = module.params['addrobj']
self.temporary = module.params['temporary']
self.state = module.params['state']
self.wait = module.params['wait']
def is_cidr_notation(self):
return self.address.count('/') == 1
def is_valid_address(self):
ip_address = self.address.split('/')[0]
try:
if len(ip_address.split('.')) == 4:
socket.inet_pton(socket.AF_INET, ip_address)
else:
socket.inet_pton(socket.AF_INET6, ip_address)
except socket.error:
return False
return True
def is_dhcp(self):
cmd = [self.module.get_bin_path('ipadm')]
cmd.append('show-addr')
cmd.append('-p')
cmd.append('-o')
cmd.append('type')
cmd.append(self.addrobj)
(rc, out, err) = self.module.run_command(cmd)
if rc == 0:
if out.rstrip() != 'dhcp':
return False
return True
else:
self.module.fail_json(msg='Wrong addrtype %s for addrobj "%s": %s' % (out, self.addrobj, err),
rc=rc,
stderr=err)
def addrobj_exists(self):
cmd = [self.module.get_bin_path('ipadm')]
cmd.append('show-addr')
cmd.append(self.addrobj)
(rc, _, _) = self.module.run_command(cmd)
if rc == 0:
return True
else:
return False
def delete_addr(self):
cmd = [self.module.get_bin_path('ipadm')]
cmd.append('delete-addr')
cmd.append(self.addrobj)
return self.module.run_command(cmd)
def create_addr(self):
cmd = [self.module.get_bin_path('ipadm')]
cmd.append('create-addr')
cmd.append('-T')
cmd.append(self.addrtype)
if self.temporary:
cmd.append('-t')
if self.addrtype == 'static':
cmd.append('-a')
cmd.append(self.address)
if self.addrtype == 'dhcp' and self.wait:
cmd.append('-w')
cmd.append(self.wait)
cmd.append(self.addrobj)
return self.module.run_command(cmd)
def up_addr(self):
cmd = [self.module.get_bin_path('ipadm')]
cmd.append('up-addr')
if self.temporary:
cmd.append('-t')
cmd.append(self.addrobj)
return self.module.run_command(cmd)
def down_addr(self):
cmd = [self.module.get_bin_path('ipadm')]
cmd.append('down-addr')
if self.temporary:
cmd.append('-t')
cmd.append(self.addrobj)
return self.module.run_command(cmd)
def enable_addr(self):
cmd = [self.module.get_bin_path('ipadm')]
cmd.append('enable-addr')
cmd.append('-t')
cmd.append(self.addrobj)
return self.module.run_command(cmd)
def disable_addr(self):
cmd = [self.module.get_bin_path('ipadm')]
cmd.append('disable-addr')
cmd.append('-t')
cmd.append(self.addrobj)
return self.module.run_command(cmd)
def refresh_addr(self):
cmd = [self.module.get_bin_path('ipadm')]
cmd.append('refresh-addr')
cmd.append(self.addrobj)
return self.module.run_command(cmd)
def main():
module = AnsibleModule(
argument_spec=dict(
address=dict(aliases=['addr']),
addrtype=dict(default='static', choices=SUPPORTED_TYPES),
addrobj=dict(required=True),
temporary=dict(default=False, type='bool'),
state=dict(
default='present', choices=['absent', 'present', 'up', 'down', 'enabled', 'disabled', 'refreshed']),
wait=dict(default=60),
),
mutually_exclusive=[
('address', 'wait'),
],
supports_check_mode=True
)
addr = Addr(module)
rc = None
out = ''
err = ''
result = {}
result['addrobj'] = addr.addrobj
result['state'] = addr.state
result['temporary'] = addr.temporary
result['addrtype'] = addr.addrtype
if addr.addrtype == 'static' and addr.address:
if addr.is_cidr_notation() and addr.is_valid_address():
result['address'] = addr.address
else:
module.fail_json(msg='Invalid IP address: %s' % addr.address)
if addr.addrtype == 'dhcp' and addr.wait:
result['wait'] = addr.wait
if addr.state == 'absent':
if addr.addrobj_exists():
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = addr.delete_addr()
if rc != 0:
module.fail_json(msg='Error while deleting addrobj: "%s"' % err,
addrobj=addr.addrobj,
stderr=err,
rc=rc)
elif addr.state == 'present':
if not addr.addrobj_exists():
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = addr.create_addr()
if rc != 0:
module.fail_json(msg='Error while configuring IP address: "%s"' % err,
addrobj=addr.addrobj,
addr=addr.address,
stderr=err,
rc=rc)
elif addr.state == 'up':
if addr.addrobj_exists():
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = addr.up_addr()
if rc != 0:
module.fail_json(msg='Error while bringing IP address up: "%s"' % err,
addrobj=addr.addrobj,
stderr=err,
rc=rc)
elif addr.state == 'down':
if addr.addrobj_exists():
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = addr.down_addr()
if rc != 0:
module.fail_json(msg='Error while bringing IP address down: "%s"' % err,
addrobj=addr.addrobj,
stderr=err,
rc=rc)
elif addr.state == 'refreshed':
if addr.addrobj_exists():
if addr.is_dhcp():
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = addr.refresh_addr()
if rc != 0:
module.fail_json(msg='Error while refreshing IP address: "%s"' % err,
addrobj=addr.addrobj,
stderr=err,
rc=rc)
else:
module.fail_json(msg='state "refreshed" cannot be used with "%s" addrtype' % addr.addrtype,
addrobj=addr.addrobj,
stderr=err,
rc=1)
elif addr.state == 'enabled':
if addr.addrobj_exists():
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = addr.enable_addr()
if rc != 0:
module.fail_json(msg='Error while enabling IP address: "%s"' % err,
addrobj=addr.addrobj,
stderr=err,
rc=rc)
elif addr.state == 'disabled':
if addr.addrobj_exists():
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = addr.disable_addr()
if rc != 0:
module.fail_json(msg='Error while disabling IP address: "%s"' % err,
addrobj=addr.addrobj,
stderr=err,
rc=rc)
if rc is None:
result['changed'] = False
else:
result['changed'] = True
if out:
result['stdout'] = out
if err:
result['stderr'] = err
module.exit_json(**result)
from ansible.module_utils.basic import *
main()
| 29.938503
| 116
| 0.543806
|
4a03a58c5ac5aa47a959115a6cd9abfbf6c085b2
| 435
|
py
|
Python
|
books/migrations/0002_auto_20210103_1850.py
|
Marlinekhavele/RentalStore
|
f86f16a18193f05476f81007df70032d406e1156
|
[
"Unlicense"
] | null | null | null |
books/migrations/0002_auto_20210103_1850.py
|
Marlinekhavele/RentalStore
|
f86f16a18193f05476f81007df70032d406e1156
|
[
"Unlicense"
] | null | null | null |
books/migrations/0002_auto_20210103_1850.py
|
Marlinekhavele/RentalStore
|
f86f16a18193f05476f81007df70032d406e1156
|
[
"Unlicense"
] | 1
|
2021-01-03T19:31:15.000Z
|
2021-01-03T19:31:15.000Z
|
# Generated by Django 3.0.7 on 2021-01-03 18:50
from django.db import migrations, models
import uuid
class Migration(migrations.Migration):
dependencies = [
('books', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='book',
name='id',
field=models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False),
),
]
| 21.75
| 106
| 0.616092
|
4a03a59483dc9748d6478a6f82b8bedb07f4a8e0
| 4,664
|
py
|
Python
|
pytorch_optimizer/sam.py
|
gheyret/pytorch_optimizer
|
34e6fd04c76332c2611ad65180b976a2ec7da8eb
|
[
"Apache-2.0"
] | 20
|
2021-09-21T09:36:00.000Z
|
2022-03-16T11:45:55.000Z
|
pytorch_optimizer/sam.py
|
gheyret/pytorch_optimizer
|
34e6fd04c76332c2611ad65180b976a2ec7da8eb
|
[
"Apache-2.0"
] | 14
|
2021-11-01T12:37:59.000Z
|
2022-03-05T18:33:12.000Z
|
pytorch_optimizer/sam.py
|
gheyret/pytorch_optimizer
|
34e6fd04c76332c2611ad65180b976a2ec7da8eb
|
[
"Apache-2.0"
] | 4
|
2021-10-31T08:36:38.000Z
|
2022-02-26T12:49:42.000Z
|
from typing import Dict
import torch
from torch.optim.optimizer import Optimizer
from pytorch_optimizer.base_optimizer import BaseOptimizer
from pytorch_optimizer.types import CLOSURE, DEFAULTS, PARAMETERS
class SAM(Optimizer, BaseOptimizer):
"""
Reference : https://github.com/davda54/sam
Example :
from pytorch_optimizer import SAM
...
model = YourModel()
base_optimizer = Ranger21
optimizer = SAM(model.parameters(), base_optimizer)
...
for input, output in data:
# first forward-backward pass
# use this loss for any training statistics
loss = loss_function(output, model(input))
loss.backward()
optimizer.first_step(zero_grad=True)
# second forward-backward pass
# make sure to do a full forward pass
loss_function(output, model(input)).backward()
optimizer.second_step(zero_grad=True)
Alternative Example with a single closure-based step function:
from pytorch_optimizer import SAM
...
model = YourModel()
base_optimizer = Ranger21
optimizer = SAM(model.parameters(), base_optimizer)
def closure():
loss = loss_function(output, model(input))
loss.backward()
return loss
...
for input, output in data:
loss = loss_function(output, model(input))
loss.backward()
optimizer.step(closure)
optimizer.zero_grad()
"""
def __init__(
self,
params: PARAMETERS,
base_optimizer,
rho: float = 0.05,
adaptive: bool = False,
**kwargs,
):
"""SAM
:param params: PARAMETERS. iterable of parameters to optimize or dicts defining parameter groups
:param base_optimizer: Optimizer. base optimizer
:param rho: float. size of the neighborhood for computing the max loss
:param adaptive: bool. element-wise Adaptive SAM
:param kwargs: Dict. parameters for optimizer.
"""
self.rho = rho
self.validate_parameters()
defaults: DEFAULTS = dict(rho=rho, adaptive=adaptive, **kwargs)
super().__init__(params, defaults)
self.base_optimizer = base_optimizer(self.param_groups, **kwargs)
self.param_groups = self.base_optimizer.param_groups
def validate_parameters(self):
self.validate_rho(self.rho)
@torch.no_grad()
def reset(self):
pass
@torch.no_grad()
def first_step(self, zero_grad: bool = False):
grad_norm = self.grad_norm()
for group in self.param_groups:
scale = group['rho'] / (grad_norm + 1e-12)
for p in group['params']:
if p.grad is None:
continue
self.state[p]['old_p'] = p.clone()
e_w = (torch.pow(p, 2) if group['adaptive'] else 1.0) * p.grad * scale.to(p)
# climb to the local maximum "w + e(w)"
p.add_(e_w)
if zero_grad:
self.zero_grad()
@torch.no_grad()
def second_step(self, zero_grad: bool = False):
for group in self.param_groups:
for p in group['params']:
if p.grad is None:
continue
# get back to "w" from "w + e(w)"
p = self.state[p]['old_p']
# do the actual "sharpness-aware" update
self.base_optimizer.step()
if zero_grad:
self.zero_grad()
@torch.no_grad()
def step(self, closure: CLOSURE = None):
if closure is None:
raise RuntimeError('[-] Sharpness Aware Minimization (SAM) requires closure')
self.first_step(zero_grad=True)
# the closure should do a full forward-backward pass
with torch.enable_grad():
closure()
self.second_step()
def grad_norm(self) -> torch.Tensor:
# put everything on the same device, in case of model parallelism
shared_device = self.param_groups[0]['params'][0].device
return torch.norm(
torch.stack(
[
((torch.abs(p) if group['adaptive'] else 1.0) * p.grad).norm(p=2).to(shared_device)
for group in self.param_groups
for p in group['params']
if p.grad is not None
]
),
p=2,
)
def load_state_dict(self, state_dict: Dict):
super().load_state_dict(state_dict)
self.base_optimizer.param_groups = self.param_groups
| 30.887417
| 104
| 0.57783
|
4a03a648f2be39e1f4e59f9b9b005f164f484013
| 1,609
|
gyp
|
Python
|
win8/delegate_execute/delegate_execute.gyp
|
junmin-zhu/chromium-rivertrail
|
eb1a57aca71fe68d96e48af8998dcfbe45171ee1
|
[
"BSD-3-Clause"
] | 5
|
2018-03-10T13:08:42.000Z
|
2021-07-26T15:02:11.000Z
|
win8/delegate_execute/delegate_execute.gyp
|
sanyaade-mobiledev/chromium.src
|
d496dfeebb0f282468827654c2b3769b3378c087
|
[
"BSD-3-Clause"
] | 1
|
2015-07-21T08:02:01.000Z
|
2015-07-21T08:02:01.000Z
|
win8/delegate_execute/delegate_execute.gyp
|
jianglong0156/chromium.src
|
d496dfeebb0f282468827654c2b3769b3378c087
|
[
"BSD-3-Clause"
] | 6
|
2016-11-14T10:13:35.000Z
|
2021-01-23T15:29:53.000Z
|
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'variables': {
'chromium_code': 1,
},
'includes': [
'../../build/win_precompile.gypi',
],
'targets': [
{
'target_name': 'delegate_execute',
'type': 'executable',
'dependencies': [
'../../base/base.gyp:base',
'../../chrome/chrome.gyp:installer_util',
'../../google_update/google_update.gyp:google_update',
'../../ui/ui.gyp:ui',
'../../win8/win8.gyp:check_sdk_patch',
],
'sources': [
'chrome_util.cc',
'chrome_util.h',
'command_execute_impl.cc',
'command_execute_impl.h',
'command_execute_impl.rgs',
'delegate_execute.cc',
'delegate_execute.rc',
'delegate_execute.rgs',
'delegate_execute_operation.cc',
'delegate_execute_operation.h',
'delegate_execute_util.cc',
'delegate_execute_util.h',
'resource.h',
],
'msvs_settings': {
'VCLinkerTool': {
'SubSystem': '2', # Set /SUBSYSTEM:WINDOWS
},
},
},
{
'target_name': 'delegate_execute_unittests',
'type': 'executable',
'dependencies': [
'../../base/base.gyp:base',
'../../base/base.gyp:run_all_unittests',
'../../testing/gtest.gyp:gtest',
],
'sources': [
'delegate_execute_util.cc',
'delegate_execute_util.h',
'delegate_execute_util_unittest.cc',
],
},
],
}
| 27.271186
| 72
| 0.551274
|
4a03a6c11d0d95e84eb54672f97d7ab5c5070df2
| 22,431
|
py
|
Python
|
warehouse/accounts/services.py
|
MHalseyPerry/warehouse
|
c80fc4cb7c2b97a7a19507dd56cc46a0a876ad86
|
[
"Apache-2.0"
] | 2
|
2021-10-11T21:45:06.000Z
|
2021-11-17T10:29:15.000Z
|
warehouse/accounts/services.py
|
MHalseyPerry/warehouse
|
c80fc4cb7c2b97a7a19507dd56cc46a0a876ad86
|
[
"Apache-2.0"
] | null | null | null |
warehouse/accounts/services.py
|
MHalseyPerry/warehouse
|
c80fc4cb7c2b97a7a19507dd56cc46a0a876ad86
|
[
"Apache-2.0"
] | null | null | null |
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import collections
import functools
import hashlib
import hmac
import logging
import posixpath
import urllib.parse
import uuid
import requests
from passlib.context import CryptContext
from sqlalchemy.orm.exc import NoResultFound
from zope.interface import implementer
import warehouse.utils.otp as otp
import warehouse.utils.webauthn as webauthn
from warehouse.accounts.interfaces import (
IPasswordBreachedService,
ITokenService,
IUserService,
TokenExpired,
TokenInvalid,
TokenMissing,
TooManyFailedLogins,
)
from warehouse.accounts.models import Email, User, WebAuthn
from warehouse.metrics import IMetricsService
from warehouse.rate_limiting import DummyRateLimiter, IRateLimiter
from warehouse.utils.crypto import BadData, SignatureExpired, URLSafeTimedSerializer
logger = logging.getLogger(__name__)
PASSWORD_FIELD = "password"
@implementer(IUserService)
class DatabaseUserService:
def __init__(self, session, *, ratelimiters=None, metrics):
if ratelimiters is None:
ratelimiters = {}
ratelimiters = collections.defaultdict(DummyRateLimiter, ratelimiters)
self.db = session
self.ratelimiters = ratelimiters
self.hasher = CryptContext(
schemes=[
"argon2",
"bcrypt_sha256",
"bcrypt",
"django_bcrypt",
"unix_disabled",
],
deprecated=["auto"],
truncate_error=True,
# Argon 2 Configuration
argon2__memory_cost=1024,
argon2__parallelism=6,
argon2__time_cost=6,
)
self._metrics = metrics
@functools.lru_cache()
def get_user(self, userid):
# TODO: We probably don't actually want to just return the database
# object here.
# TODO: We need some sort of Anonymous User.
return self.db.query(User).get(userid)
@functools.lru_cache()
def get_user_by_username(self, username):
user_id = self.find_userid(username)
return None if user_id is None else self.get_user(user_id)
@functools.lru_cache()
def get_user_by_email(self, email):
user_id = self.find_userid_by_email(email)
return None if user_id is None else self.get_user(user_id)
@functools.lru_cache()
def find_userid(self, username):
try:
user = self.db.query(User.id).filter(User.username == username).one()
except NoResultFound:
return
return user.id
@functools.lru_cache()
def find_userid_by_email(self, email):
try:
# flake8: noqa
user_id = (self.db.query(Email.user_id).filter(Email.email == email).one())[
0
]
except NoResultFound:
return
return user_id
def check_password(self, userid, password, *, tags=None):
tags = tags if tags is not None else []
self._metrics.increment("warehouse.authentication.start", tags=tags)
# The very first thing we want to do is check to see if we've hit our
# global rate limit or not, assuming that we've been configured with a
# global rate limiter anyways.
if not self.ratelimiters["global"].test():
logger.warning("Global failed login threshold reached.")
self._metrics.increment(
"warehouse.authentication.ratelimited",
tags=tags + ["ratelimiter:global"],
)
raise TooManyFailedLogins(resets_in=self.ratelimiters["global"].resets_in())
user = self.get_user(userid)
if user is not None:
# Now, check to make sure that we haven't hitten a rate limit on a
# per user basis.
if not self.ratelimiters["user"].test(user.id):
self._metrics.increment(
"warehouse.authentication.ratelimited",
tags=tags + ["ratelimiter:user"],
)
raise TooManyFailedLogins(
resets_in=self.ratelimiters["user"].resets_in(user.id)
)
# Actually check our hash, optionally getting a new hash for it if
# we should upgrade our saved hashed.
ok, new_hash = self.hasher.verify_and_update(password, user.password)
# First, check to see if the password that we were given was OK.
if ok:
# Then, if the password was OK check to see if we've been given
# a new password hash from the hasher, if so we'll want to save
# that hash.
if new_hash:
user.password = new_hash
self._metrics.increment("warehouse.authentication.ok", tags=tags)
return True
else:
self._metrics.increment(
"warehouse.authentication.failure",
tags=tags + ["failure_reason:password"],
)
else:
self._metrics.increment(
"warehouse.authentication.failure", tags=tags + ["failure_reason:user"]
)
# If we've gotten here, then we'll want to record a failed login in our
# rate limiting before returning False to indicate a failed password
# verification.
if user is not None:
self.ratelimiters["user"].hit(user.id)
self.ratelimiters["global"].hit()
return False
def create_user(self, username, name, password):
user = User(username=username, name=name, password=self.hasher.hash(password))
self.db.add(user)
self.db.flush() # flush the db now so user.id is available
return user
def add_email(self, user_id, email_address, primary=None, verified=False):
user = self.get_user(user_id)
# If primary is None, then we're going to auto detect whether this should be the
# primary address or not. The basic rule is that if the user doesn't already
# have a primary address, then the address we're adding now is going to be
# set to their primary.
if primary is None:
primary = True if user.primary_email is None else False
email = Email(
email=email_address, user=user, primary=primary, verified=verified
)
self.db.add(email)
self.db.flush() # flush the db now so email.id is available
return email
def update_user(self, user_id, **changes):
user = self.get_user(user_id)
for attr, value in changes.items():
if attr == PASSWORD_FIELD:
value = self.hasher.hash(value)
setattr(user, attr, value)
# If we've given the user a new password, then we also want to unset the
# reason for disable... because a new password means no more disabled
# user.
if PASSWORD_FIELD in changes:
user.disabled_for = None
return user
def disable_password(self, user_id, reason=None):
user = self.get_user(user_id)
user.password = self.hasher.disable()
user.disabled_for = reason
def is_disabled(self, user_id):
user = self.get_user(user_id)
# User is not disabled.
if self.hasher.is_enabled(user.password):
return (False, None)
# User is disabled.
else:
return (True, user.disabled_for)
def has_two_factor(self, user_id):
"""
Returns True if the user has any form of two factor
authentication and is allowed to use it.
"""
user = self.get_user(user_id)
return user.has_two_factor
def has_totp(self, user_id):
"""
Returns True if the user has a TOTP device provisioned.
"""
user = self.get_user(user_id)
return user.totp_secret is not None
def has_webauthn(self, user_id):
"""
Returns True if the user has a security key provisioned.
"""
user = self.get_user(user_id)
return len(user.webauthn) > 0
def get_totp_secret(self, user_id):
"""
Returns the user's TOTP secret as bytes.
If the user doesn't have a TOTP, returns None.
"""
user = self.get_user(user_id)
return user.totp_secret
def get_last_totp_value(self, user_id):
"""
Returns the user's last (accepted) TOTP value.
If the user doesn't have a TOTP or hasn't used their TOTP
method, returns None.
"""
user = self.get_user(user_id)
return user.last_totp_value
def check_totp_value(self, user_id, totp_value, *, tags=None):
"""
Returns True if the given TOTP is valid against the user's secret.
If the user doesn't have a TOTP secret or isn't allowed
to use second factor methods, returns False.
"""
tags = tags if tags is not None else []
self._metrics.increment("warehouse.authentication.two_factor.start", tags=tags)
# The very first thing we want to do is check to see if we've hit our
# global rate limit or not, assuming that we've been configured with a
# global rate limiter anyways.
if not self.ratelimiters["global"].test():
logger.warning("Global failed login threshold reached.")
self._metrics.increment(
"warehouse.authentication.two_factor.ratelimited",
tags=tags + ["ratelimiter:global"],
)
raise TooManyFailedLogins(resets_in=self.ratelimiters["global"].resets_in())
# Now, check to make sure that we haven't hitten a rate limit on a
# per user basis.
if not self.ratelimiters["user"].test(user_id):
self._metrics.increment(
"warehouse.authentication.two_factor.ratelimited",
tags=tags + ["ratelimiter:user"],
)
raise TooManyFailedLogins(
resets_in=self.ratelimiters["user"].resets_in(user_id)
)
totp_secret = self.get_totp_secret(user_id)
if totp_secret is None:
self._metrics.increment(
"warehouse.authentication.two_factor.failure",
tags=tags + ["failure_reason:no_totp"],
)
# If we've gotten here, then we'll want to record a failed attempt in our
# rate limiting before returning False to indicate a failed totp
# verification.
self.ratelimiters["user"].hit(user_id)
self.ratelimiters["global"].hit()
return False
last_totp_value = self.get_last_totp_value(user_id)
if last_totp_value is not None and totp_value == last_totp_value.encode():
return False
valid = otp.verify_totp(totp_secret, totp_value)
if valid:
self._metrics.increment("warehouse.authentication.two_factor.ok", tags=tags)
else:
self._metrics.increment(
"warehouse.authentication.two_factor.failure",
tags=tags + ["failure_reason:invalid_totp"],
)
# If we've gotten here, then we'll want to record a failed attempt in our
# rate limiting before returning False to indicate a failed totp
# verification.
self.ratelimiters["user"].hit(user_id)
self.ratelimiters["global"].hit()
return valid
def get_webauthn_credential_options(self, user_id, *, challenge, rp_name, rp_id):
"""
Returns a dictionary of credential options suitable for beginning the WebAuthn
provisioning process for the given user.
"""
user = self.get_user(user_id)
return webauthn.get_credential_options(
user, challenge=challenge, rp_name=rp_name, rp_id=rp_id
)
def get_webauthn_assertion_options(self, user_id, *, challenge, rp_id):
"""
Returns a dictionary of assertion options suitable for beginning the WebAuthn
authentication process for the given user.
"""
user = self.get_user(user_id)
return webauthn.get_assertion_options(user, challenge=challenge, rp_id=rp_id)
def verify_webauthn_credential(self, credential, *, challenge, rp_id, origin):
"""
Checks whether the given credential is valid, i.e. suitable for generating
assertions during authentication.
Returns the validated credential on success, raises
webauthn.RegistrationRejectedException on failure.
"""
validated_credential = webauthn.verify_registration_response(
credential, challenge=challenge, rp_id=rp_id, origin=origin
)
webauthn_cred = (
self.db.query(WebAuthn)
.filter_by(credential_id=validated_credential.credential_id.decode())
.first()
)
if webauthn_cred is not None:
raise webauthn.RegistrationRejectedException("Credential ID already in use")
return validated_credential
def verify_webauthn_assertion(
self, user_id, assertion, *, challenge, origin, rp_id
):
"""
Checks whether the given assertion was produced by the given user's WebAuthn
device.
Returns the updated signage count on success, raises
webauthn.AuthenticationRejectedException on failure.
"""
user = self.get_user(user_id)
return webauthn.verify_assertion_response(
assertion, challenge=challenge, user=user, origin=origin, rp_id=rp_id
)
def add_webauthn(self, user_id, **kwargs):
"""
Adds a WebAuthn credential to the given user.
Returns None if the user already has this credential.
"""
user = self.get_user(user_id)
webauthn = WebAuthn(user=user, **kwargs)
self.db.add(webauthn)
self.db.flush() # flush the db now so webauthn.id is available
return webauthn
def get_webauthn_by_label(self, user_id, label):
"""
Returns a WebAuthn credential for the given user by its label,
or None if no credential for the user has this label.
"""
user = self.get_user(user_id)
return next(
(credential for credential in user.webauthn if credential.label == label),
None,
)
def get_webauthn_by_credential_id(self, user_id, credential_id):
"""
Returns a WebAuthn credential for the given user by its credential ID,
or None of the user doesn't have a credential with this ID.
"""
user = self.get_user(user_id)
return next(
(
credential
for credential in user.webauthn
if credential.credential_id == credential_id
),
None,
)
def record_event(self, user_id, *, tag, ip_address, additional=None):
"""
Creates a new UserEvent for the given user with the given
tag, IP address, and additional metadata.
Returns the event.
"""
user = self.get_user(user_id)
return user.record_event(tag=tag, ip_address=ip_address, additional=additional)
@implementer(ITokenService)
class TokenService:
def __init__(self, secret, salt, max_age):
self.serializer = URLSafeTimedSerializer(secret, salt=salt)
self.max_age = max_age
def dumps(self, data):
return self.serializer.dumps({key: str(value) for key, value in data.items()})
def loads(self, token):
if not token:
raise TokenMissing
try:
data = self.serializer.loads(token, max_age=self.max_age)
except SignatureExpired:
raise TokenExpired
except BadData: # Catch all other exceptions
raise TokenInvalid
return data
def database_login_factory(context, request):
return DatabaseUserService(
request.db,
metrics=request.find_service(IMetricsService, context=None),
ratelimiters={
"global": request.find_service(
IRateLimiter, name="global.login", context=None
),
"user": request.find_service(IRateLimiter, name="user.login", context=None),
},
)
class TokenServiceFactory:
def __init__(self, name, service_class=TokenService):
self.name = name
self.service_class = service_class
def __call__(self, context, request):
secret = request.registry.settings[f"token.{self.name}.secret"]
salt = self.name # Use the service name as the unique salt
max_age = request.registry.settings.get(
f"token.{self.name}.max_age",
request.registry.settings["token.default.max_age"],
)
return self.service_class(secret, salt, max_age)
def __eq__(self, other):
if not isinstance(other, TokenServiceFactory):
return NotImplemented
return (self.name, self.service_class) == (other.name, other.service_class)
@implementer(IPasswordBreachedService)
class HaveIBeenPwnedPasswordBreachedService:
_failure_message_preamble = (
"This password appears in a breach or has been compromised and cannot be used."
)
def __init__(
self,
*,
session,
metrics,
api_base="https://api.pwnedpasswords.com",
help_url=None,
):
self._http = session
self._api_base = api_base
self._metrics = metrics
self._help_url = help_url
@classmethod
def create_service(cls, context, request):
return cls(
session=request.http,
metrics=request.find_service(IMetricsService, context=None),
help_url=request.help_url(_anchor="compromised-password"),
)
@property
def failure_message(self):
message = self._failure_message_preamble
if self._help_url:
message += (
f' See <a href="{self._help_url}">this FAQ entry</a> for more '
"information."
)
return message
@property
def failure_message_plain(self):
message = self._failure_message_preamble
if self._help_url:
message += f" See the FAQ entry at {self._help_url} for more information."
return message
def _metrics_increment(self, *args, **kwargs):
self._metrics.increment(*args, **kwargs)
def _get_url(self, prefix):
return urllib.parse.urljoin(self._api_base, posixpath.join("/range/", prefix))
def check_password(self, password, *, tags=None):
# The HIBP API implements a k-Anonymity scheme, by which you can take a given
# password, hash it using sha1, and then send only the first 5 characters of the
# hex encoded digest. This avoids leaking data to the HIBP API, because without
# the rest of the hash, the HIBP service cannot even begin to brute force or do
# a reverse lookup to determine what password has just been sent to it. For More
# information see:
# https://www.troyhunt.com/ive-just-launched-pwned-passwords-version-2/
self._metrics_increment("warehouse.compromised_password_check.start", tags=tags)
# To work with the HIBP API, we need the sha1 of the UTF8 encoded password.
hashed_password = hashlib.sha1(password.encode("utf8")).hexdigest().lower()
# Fetch the passwords from the HIBP data set.
try:
resp = self._http.get(self._get_url(hashed_password[:5]))
resp.raise_for_status()
except requests.RequestException as exc:
logger.warning("Error contacting HaveIBeenPwned: %r", exc)
self._metrics_increment(
"warehouse.compromised_password_check.error", tags=tags
)
# If we've failed to contact the HIBP service for some reason, we're going
# to "fail open" and allow the password. That's a better option then just
# hard failing whatever the user is attempting to do.
return False
# The dataset that comes back from HIBP looks like:
#
# 0018A45C4D1DEF81644B54AB7F969B88D65:1
# 00D4F6E8FA6EECAD2A3AA415EEC418D38EC:2
# 011053FD0102E94D6AE2F8B83D76FAF94F6:1
# 012A7CA357541F0AC487871FEEC1891C49C:2
# 0136E006E24E7D152139815FB0FC6A50B15:2
# ...
#
# THat is, it is a line delimited textual data, where each line is a hash, a
# colon, and then the number of times that password has appeared in a breach.
# For our uses, we're going to consider any password that has ever appeared in
# a breach to be insecure, even if only once.
for line in resp.text.splitlines():
possible, _ = line.split(":")
if hashed_password[5:] == possible.lower():
self._metrics_increment(
"warehouse.compromised_password_check.compromised", tags=tags
)
return True
# If we made it to this point, then the password is safe.
self._metrics_increment("warehouse.compromised_password_check.ok", tags=tags)
return False
@implementer(IPasswordBreachedService)
class NullPasswordBreachedService:
failure_message = "This password appears in a breach."
failure_message_plain = "This password appears in a breach."
@classmethod
def create_service(cls, context, request):
return cls()
def check_password(self, password, *, tags=None):
# This service allows *every* password as a non-breached password. It will never
# tell a user their password isn't good enough.
return False
| 35.324409
| 88
| 0.628149
|
4a03a6c2bf47d1db1beafe7b97147d0acc8bc7c9
| 4,984
|
py
|
Python
|
kubernetes/test/test_io_xk8s_cluster_addons_v1beta1_cluster_resource_set_binding.py
|
mariusgheorghies/python
|
68ac7e168963d8b5a81dc493b1973d29e903a15b
|
[
"Apache-2.0"
] | null | null | null |
kubernetes/test/test_io_xk8s_cluster_addons_v1beta1_cluster_resource_set_binding.py
|
mariusgheorghies/python
|
68ac7e168963d8b5a81dc493b1973d29e903a15b
|
[
"Apache-2.0"
] | null | null | null |
kubernetes/test/test_io_xk8s_cluster_addons_v1beta1_cluster_resource_set_binding.py
|
mariusgheorghies/python
|
68ac7e168963d8b5a81dc493b1973d29e903a15b
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Kubernetes
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: v1.20.7
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import datetime
import kubernetes.client
from kubernetes.client.models.io_xk8s_cluster_addons_v1beta1_cluster_resource_set_binding import IoXK8sClusterAddonsV1beta1ClusterResourceSetBinding # noqa: E501
from kubernetes.client.rest import ApiException
class TestIoXK8sClusterAddonsV1beta1ClusterResourceSetBinding(unittest.TestCase):
"""IoXK8sClusterAddonsV1beta1ClusterResourceSetBinding unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def make_instance(self, include_optional):
"""Test IoXK8sClusterAddonsV1beta1ClusterResourceSetBinding
include_option is a boolean, when False only required
params are included, when True both required and
optional params are included """
# model = kubernetes.client.models.io_xk8s_cluster_addons_v1beta1_cluster_resource_set_binding.IoXK8sClusterAddonsV1beta1ClusterResourceSetBinding() # noqa: E501
if include_optional :
return IoXK8sClusterAddonsV1beta1ClusterResourceSetBinding(
api_version = '0',
kind = '0',
metadata = kubernetes.client.models.v1/object_meta_v2.v1.ObjectMeta_v2(
annotations = {
'key' : '0'
},
cluster_name = '0',
creation_timestamp = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
deletion_grace_period_seconds = 56,
deletion_timestamp = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
finalizers = [
'0'
],
generate_name = '0',
generation = 56,
labels = {
'key' : '0'
},
managed_fields = [
kubernetes.client.models.v1/managed_fields_entry.v1.ManagedFieldsEntry(
api_version = '0',
fields_type = '0',
fields_v1 = kubernetes.client.models.fields_v1.fieldsV1(),
manager = '0',
operation = '0',
time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), )
],
name = '0',
namespace = '0',
owner_references = [
kubernetes.client.models.v1/owner_reference_v2.v1.OwnerReference_v2(
api_version = '0',
block_owner_deletion = True,
controller = True,
kind = '0',
name = '0',
uid = '0', )
],
resource_version = '0',
self_link = '0',
uid = '0', ),
spec = kubernetes.client.models.io_x_k8s_cluster_addons_v1alpha3_cluster_resource_set_binding_spec.io_x_k8s_cluster_addons_v1alpha3_ClusterResourceSetBinding_spec(
bindings = [
kubernetes.client.models.io_x_k8s_cluster_addons_v1alpha3_cluster_resource_set_binding_spec_bindings.io_x_k8s_cluster_addons_v1alpha3_ClusterResourceSetBinding_spec_bindings(
cluster_resource_set_name = '0',
resources = [
kubernetes.client.models.io_x_k8s_cluster_addons_v1alpha3_cluster_resource_set_binding_spec_resources.io_x_k8s_cluster_addons_v1alpha3_ClusterResourceSetBinding_spec_resources(
applied = True,
hash = '0',
kind = 'Secret',
last_applied_time = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
name = '0', )
], )
], )
)
else :
return IoXK8sClusterAddonsV1beta1ClusterResourceSetBinding(
)
def testIoXK8sClusterAddonsV1beta1ClusterResourceSetBinding(self):
"""Test IoXK8sClusterAddonsV1beta1ClusterResourceSetBinding"""
inst_req_only = self.make_instance(include_optional=False)
inst_req_and_optional = self.make_instance(include_optional=True)
if __name__ == '__main__':
unittest.main()
| 47.018868
| 208
| 0.548154
|
4a03a7214b8911985f8ff2336f7c024a39c897c3
| 9,517
|
py
|
Python
|
hsds/servicenode.py
|
JonosGit/hsds
|
4abc4fc22c1e75cc9b15c879c8d00448a115fc92
|
[
"Apache-2.0"
] | null | null | null |
hsds/servicenode.py
|
JonosGit/hsds
|
4abc4fc22c1e75cc9b15c879c8d00448a115fc92
|
[
"Apache-2.0"
] | null | null | null |
hsds/servicenode.py
|
JonosGit/hsds
|
4abc4fc22c1e75cc9b15c879c8d00448a115fc92
|
[
"Apache-2.0"
] | null | null | null |
##############################################################################
# Copyright by The HDF Group. #
# All rights reserved. #
# #
# This file is part of HSDS (HDF5 Scalable Data Service), Libraries and #
# Utilities. The full HSDS copyright notice, including #
# terms governing use, modification, and redistribution, is contained in #
# the file COPYING, which can be found at the root of the source code #
# distribution tree. If you do not have access to this file, you may #
# request a copy from help@hdfgroup.org. #
##############################################################################
#
# service node of hsds cluster
#
import asyncio
from aiohttp.web import run_app
from util.lruCache import LruCache
import config
from basenode import healthCheck, baseInit
import hsds_logger as log
from util.authUtil import initUserDB
from domain_sn import GET_Domain, PUT_Domain, DELETE_Domain, GET_Domains
from domain_sn import GET_Datasets, GET_Groups, GET_Datatypes
from domain_sn import GET_ACL, GET_ACLs, PUT_ACL
from group_sn import GET_Group, POST_Group, DELETE_Group
from link_sn import GET_Links, GET_Link, PUT_Link, DELETE_Link
from attr_sn import GET_Attributes, GET_Attribute, PUT_Attribute
from attr_sn import DELETE_Attribute, GET_AttributeValue, PUT_AttributeValue
from ctype_sn import GET_Datatype, POST_Datatype, DELETE_Datatype
from dset_sn import GET_Dataset, POST_Dataset, DELETE_Dataset, GET_DatasetShape, PUT_DatasetShape, GET_DatasetType
from chunk_sn import PUT_Value, GET_Value, POST_Value
async def init(loop):
"""Intitialize application and return app object"""
app = baseInit(loop, 'sn')
#
# call app.router.add_get() here to add node-specific routes
#
app.router.add_route('GET', '/', GET_Domain)
app.router.add_route('DELETE', '/', DELETE_Domain)
app.router.add_route('PUT', '/', PUT_Domain)
app.router.add_route('OPTIONS', '/', GET_Domain)
app.router.add_route('GET', '/domains', GET_Domains)
app.router.add_route('OPTIONS', '/domains', GET_Domains)
app.router.add_route('GET', '/acls/{username}', GET_ACL)
app.router.add_route('OPTIONS', '/acls/{username}', GET_ACL)
app.router.add_route('PUT', '/acls/{username}', PUT_ACL)
app.router.add_route('GET', '/acls', GET_ACLs)
app.router.add_route('OPTIONS', '/acls', GET_ACLs)
app.router.add_route('GET', '/groups/{id}', GET_Group)
app.router.add_route('OPTIONS', '/groups/{id}', GET_Group)
app.router.add_route('GET', '/groups/', GET_Group)
app.router.add_route('OPTIONS', '/groups/', GET_Groups)
app.router.add_route('GET', '/groups', GET_Groups)
app.router.add_route('OPTIONS', '/groups', GET_Groups)
app.router.add_route('DELETE', '/groups/{id}', DELETE_Group)
app.router.add_route('POST', '/groups', POST_Group)
app.router.add_route('GET', '/groups/{id}/links', GET_Links)
app.router.add_route('OPTIONS', '/groups/{id}/links', GET_Links)
app.router.add_route('GET', '/groups/{id}/links/{title}', GET_Link)
app.router.add_route('OPTIONS', '/groups/{id}/links/{title}', GET_Link)
app.router.add_route('DELETE', '/groups/{id}/links/{title}', DELETE_Link)
app.router.add_route('PUT', '/groups/{id}/links/{title}', PUT_Link)
app.router.add_route('GET', '/groups/{id}/attributes', GET_Attributes)
app.router.add_route('OPTIONS', '/groups/{id}/attributes', GET_Attributes)
app.router.add_route('GET', '/groups/{id}/attributes/{name}', GET_Attribute)
app.router.add_route('OPTIONS', '/groups/{id}/attributes/{name}', GET_Attribute)
app.router.add_route('DELETE', '/groups/{id}/attributes/{name}', DELETE_Attribute)
app.router.add_route('PUT', '/groups/{id}/attributes/{name}', PUT_Attribute)
app.router.add_route('GET', '/groups/{id}/attributes/{name}/value', GET_AttributeValue)
app.router.add_route('OPTIONS', '/groups/{id}/attributes/{name}/value', GET_AttributeValue)
app.router.add_route('PUT', '/groups/{id}/attributes/{name}/value', PUT_AttributeValue)
app.router.add_route('GET', '/groups/{id}/acls/{username}', GET_ACL)
app.router.add_route('OPTIONS', '/groups/{id}/acls/{username}', GET_ACL)
app.router.add_route('PUT', '/groups/{id}/acls/{username}', PUT_ACL)
app.router.add_route('GET', '/groups/{id}/acls', GET_ACLs)
app.router.add_route('OPTIONS', '/groups/{id}/acls', GET_ACLs)
app.router.add_route('GET', '/datatypes/{id}', GET_Datatype)
app.router.add_route('OPTIONS', '/datatypes/{id}', GET_Datatype)
app.router.add_route('GET', '/datatypes/', GET_Datatype)
app.router.add_route('OPTIONS', '/datatypes/', GET_Datatype)
app.router.add_route('GET', '/datatypes', GET_Datatypes)
app.router.add_route('OPTIONS', '/datatypes', GET_Datatypes)
app.router.add_route('DELETE', '/datatypes/{id}', DELETE_Datatype)
app.router.add_route('POST', '/datatypes', POST_Datatype)
app.router.add_route('GET', '/datatypes/{id}/attributes', GET_Attributes)
app.router.add_route('OPTIONS', '/datatypes/{id}/attributes', GET_Attributes)
app.router.add_route('GET', '/datatypes/{id}/attributes/{name}', GET_Attribute)
app.router.add_route('OPTIONS', '/datatypes/{id}/attributes/{name}', GET_Attribute)
app.router.add_route('DELETE', '/datatypes/{id}/attributes/{name}', DELETE_Attribute)
app.router.add_route('PUT', '/datatypes/{id}/attributes/{name}', PUT_Attribute)
app.router.add_route('GET', '/datatypes/{id}/attributes/{name}/value', GET_AttributeValue)
app.router.add_route('OPTIONS', '/datatypes/{id}/attributes/{name}/value', GET_AttributeValue)
app.router.add_route('PUT', '/datatypes/{id}/attributes/{name}/value', PUT_AttributeValue)
app.router.add_route('GET', '/datatypes/{id}/acls/{username}', GET_ACL)
app.router.add_route('OPTIONS', '/datatypes/{id}/acls/{username}', GET_ACL)
app.router.add_route('PUT', '/datatypes/{id}/acls/{username}', PUT_ACL)
app.router.add_route('GET', '/datatypes/{id}/acls', GET_ACLs)
app.router.add_route('OPTIONS', '/datatypes/{id}/acls', GET_ACLs)
app.router.add_route('GET', '/datasets/{id}', GET_Dataset)
app.router.add_route('OPTIONS', '/datasets/{id}', GET_Dataset)
app.router.add_route('GET', '/datasets/', GET_Dataset)
app.router.add_route('OPTIONS', '/datasets/', GET_Dataset)
app.router.add_route('GET', '/datasets', GET_Datasets)
app.router.add_route('OPTIONS', '/datasets', GET_Datasets)
app.router.add_route('DELETE', '/datasets/{id}', DELETE_Dataset)
app.router.add_route('POST', '/datasets', POST_Dataset)
app.router.add_route('GET', '/datasets/{id}/shape', GET_DatasetShape)
app.router.add_route('OPTIONS', '/datasets/{id}/shape', GET_DatasetShape)
app.router.add_route('PUT', '/datasets/{id}/shape', PUT_DatasetShape)
app.router.add_route('GET', '/datasets/{id}/type', GET_DatasetType)
app.router.add_route('OPTIONS', '/datasets/{id}/type', GET_DatasetType)
app.router.add_route('GET', '/datasets/{id}/attributes', GET_Attributes)
app.router.add_route('OPTIONS', '/datasets/{id}/attributes', GET_Attributes)
app.router.add_route('GET', '/datasets/{id}/attributes/{name}', GET_Attribute)
app.router.add_route('OPTIONS', '/datasets/{id}/attributes/{name}', GET_Attribute)
app.router.add_route('DELETE', '/datasets/{id}/attributes/{name}', DELETE_Attribute)
app.router.add_route('PUT', '/datasets/{id}/attributes/{name}', PUT_Attribute)
app.router.add_route('GET', '/datasets/{id}/attributes/{name}/value', GET_AttributeValue)
app.router.add_route('OPTIONS', '/datasets/{id}/attributes/{name}/value', GET_AttributeValue)
app.router.add_route('PUT', '/datasets/{id}/attributes/{name}/value', PUT_AttributeValue)
app.router.add_route('PUT', '/datasets/{id}/value', PUT_Value)
app.router.add_route('GET', '/datasets/{id}/value', GET_Value)
app.router.add_route('OPTIONS', '/datasets/{id}/value', GET_Value)
app.router.add_route('POST', '/datasets/{id}/value', POST_Value)
app.router.add_route('GET', '/datasets/{id}/acls/{username}', GET_ACL)
app.router.add_route('OPTIONS', '/datasets/{id}/acls/{username}', GET_ACL)
app.router.add_route('PUT', '/datasets/{id}/acls/{username}', PUT_ACL)
app.router.add_route('GET', '/datasets/{id}/acls', GET_ACLs)
app.router.add_route('OPTIONS', '/datasets/{id}/acls', GET_ACLs)
return app
#
# Main
#
def main():
log.info("Servicenode initializing")
loop = asyncio.get_event_loop()
#create the app object
app = loop.run_until_complete(init(loop))
metadata_mem_cache_size = int(config.get("metadata_mem_cache_size"))
log.info("Using metadata memory cache size of: {}".format(metadata_mem_cache_size))
app['meta_cache'] = LruCache(mem_target=metadata_mem_cache_size, chunk_cache=False)
app['domain_cache'] = LruCache(mem_target=metadata_mem_cache_size, chunk_cache=False)
app['loop'] = loop
if config.get("allow_noauth"):
app['allow_noauth'] = True
else:
app['allow_noauth'] = False
initUserDB(app)
asyncio.ensure_future(healthCheck(app), loop=loop)
# run the app
port = int(config.get("sn_port"))
log.info(f"run_app on port: {port}")
run_app(app, port=port)
if __name__ == '__main__':
main()
| 55.982353
| 114
| 0.67763
|
4a03a7875ad8ebd30448a8955e38ecb55ba19824
| 865
|
py
|
Python
|
plyio/binary_io.py
|
bernardomig/plyio
|
f38b385e6bdbad9565c0199b6e704ff24cc14bac
|
[
"MIT"
] | null | null | null |
plyio/binary_io.py
|
bernardomig/plyio
|
f38b385e6bdbad9565c0199b6e704ff24cc14bac
|
[
"MIT"
] | null | null | null |
plyio/binary_io.py
|
bernardomig/plyio
|
f38b385e6bdbad9565c0199b6e704ff24cc14bac
|
[
"MIT"
] | null | null | null |
import numpy as np
import pandas as pd
from struct import Struct
BINARY_DATA_TYPES = {
np.dtype('float32'): 'f',
np.dtype('int32'): 'i',
np.dtype('uint32'): 'I',
np.dtype('int64'): 'q',
np.dtype('uint64'): 'Q',
np.dtype('int8'): 'b',
np.dtype('uint8'): 'B',
}
def _binary_struct(dtypes):
binary_data = '<' + ''.join([BINARY_DATA_TYPES[dtype] for dtype in dtypes])
return Struct(binary_data)
def write_binary(stream, pc):
s = _binary_struct(pc.dtypes)
for row in pc.itertuples(index=False):
stream.write(s.pack(*row))
def read_binary(stream, dtypes, count=None):
s = _binary_struct(dtypes)
size = s.size
data = []
while True:
if count and len(data) + 1 == count: break
row = stream.read(size)
if len(row) < size: break
data.append(s.unpack(row))
return data
| 25.441176
| 79
| 0.610405
|
4a03a81a81ae898b10727ed1d88cacfd193c460c
| 20,847
|
py
|
Python
|
ai/quantization/onnx/src/quantize_base/quantize_detection.py
|
Aiden-Chen/python_tools
|
2e5d6dbdf45ede8c8d9f12a98c3a6d542c00e085
|
[
"Apache-2.0"
] | null | null | null |
ai/quantization/onnx/src/quantize_base/quantize_detection.py
|
Aiden-Chen/python_tools
|
2e5d6dbdf45ede8c8d9f12a98c3a6d542c00e085
|
[
"Apache-2.0"
] | null | null | null |
ai/quantization/onnx/src/quantize_base/quantize_detection.py
|
Aiden-Chen/python_tools
|
2e5d6dbdf45ede8c8d9f12a98c3a6d542c00e085
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding:utf-8 -*- #
# @Author :wang dian
import pickle
import time
import torch
import math
import numpy as np
import math
import config as Cfg
# from shapely.geometry import Polygon
from spconv.spconv_utils import rotate_non_max_suppression_cpu
class Detection(object):
def __init__(self):
self.output_height = Cfg.HEIGHT
self.output_width = Cfg.WIDTH
self.num_class = Cfg.NUM_CLASS
self.anchor_per_loc = self.num_class * 2
self.anchors = self.get_anchors(Cfg.ANCHORS_PATH)
self.batch_size = self.anchors.shape[0]
self.post_center_range = Cfg.POST_CENTER_RANGE
self._num_direction_bins = 2
self._score_threshold = .3
self._iou_threshold = .1
def get_anchors(self, path):
anchors = np.fromfile(path, dtype=np.float32)
anchors = anchors.reshape(-1, self.output_height * self.output_width * self.anchor_per_loc, 7)
return anchors
def np_sigmoid(self, x):
return 1 / (1 + np.exp(-x))
def np_topk(self, matrix, n, axis=0):
full_sort = np.argsort(-matrix, axis=axis)
index = full_sort.take(np.arange(n), axis=axis)
return matrix[index], index
def limit_period(self, val, offset=0.5, period=np.pi):
return val - np.floor(val / period + offset) * period
def corners_nd(self, dims, origin=0.5):
"""generate relative box corners based on length per dim and
origin point.
Args:
dims (float array, shape=[N, ndim]): array of length per dim
origin (list or array or float): origin point relate to smallest point.
Returns:
float array, shape=[N, 2 ** ndim, ndim]: returned corners.
point layout example: (2d) x0y0, x0y1, x1y0, x1y1;
(3d) x0y0z0, x0y0z1, x0y1z0, x0y1z1, x1y0z0, x1y0z1, x1y1z0, x1y1z1
where x0 < x1, y0 < y1, z0 < z1
"""
ndim = int(dims.shape[1])
corners_norm = np.stack(
np.unravel_index(np.arange(2 ** ndim), [2] * ndim),
axis=1).astype(dims.dtype)
if ndim == 2:
# generate clockwise box corners
corners_norm = corners_norm[[0, 1, 3, 2]]
elif ndim == 3:
corners_norm = corners_norm[[0, 1, 3, 2, 4, 5, 7, 6]]
corners_norm = corners_norm - np.array(origin, dtype=dims.dtype)
corners = dims.reshape([-1, 1, ndim]) * corners_norm.reshape(
[1, 2 ** ndim, ndim])
return corners
def rotation_2d(self, points, angles):
"""rotation 2d points based on origin point clockwise when angle positive.
Args:
points (float array, shape=[N, point_size, 2]): points to be rotated.
angles (float array, shape=[N]): rotation angle.
Returns:
float array: same shape as points
"""
rot_sin = np.sin(angles)
rot_cos = np.cos(angles)
rot_mat_T = np.stack([[rot_cos, -rot_sin], [rot_sin, rot_cos]])
corners = np.einsum('aij,jka->aik', points, rot_mat_T)
return corners
def center_to_corner_box2d(self, centers, dims, angles=None, origin=0.5):
"""convert kitti locations, dimensions and angles to corners.
format: center(xy), dims(xy), angles(clockwise when positive)
Args:
centers (float array, shape=[N, 2]): locations in kitti label file.
dims (float array, shape=[N, 2]): dimensions in kitti label file.
angles (float array, shape=[N]): rotation_y in kitti label file.
Returns:
[type]: [description]
"""
# 'length' in kitti format is in x axis.
# xyz(hwl)(kitti label file)<->xyz(lhw)(camera)<->z(-x)(-y)(wlh)(lidar)
# center in kitti format is [0.5, 1.0, 0.5] in xyz.
corners = self.corners_nd(dims, origin=origin)
# corners: [N, 4, 2]
if angles is not None:
corners = self.rotation_2d(corners, angles)
corners += centers.reshape([-1, 1, 2])
return corners
def corner_to_standup_nd(self, boxes_corner):
assert len(boxes_corner.shape) == 3
standup_boxes = []
standup_boxes.append(np.min(boxes_corner, axis=1))
standup_boxes.append(np.max(boxes_corner, axis=1))
return np.concatenate(standup_boxes, -1)
def iou_jit(self, boxes, query_boxes, eps=1.0):
"""calculate box iou. note that jit version runs 2x faster than cython in
my machine!
Parameters
----------
boxes: (N, 4) ndarray of float
query_boxes: (K, 4) ndarray of float
Returns
-------
overlaps: (N, K) ndarray of overlap between boxes and query_boxes
"""
N = boxes.shape[0]
K = query_boxes.shape[0]
overlaps = np.zeros((N, K), dtype=boxes.dtype)
for k in range(K):
box_area = ((query_boxes[k, 2] - query_boxes[k, 0] + eps) *
(query_boxes[k, 3] - query_boxes[k, 1] + eps))
for n in range(N):
iw = (min(boxes[n, 2], query_boxes[k, 2]) - max(
boxes[n, 0], query_boxes[k, 0]) + eps)
if iw > 0:
ih = (min(boxes[n, 3], query_boxes[k, 3]) - max(
boxes[n, 1], query_boxes[k, 1]) + eps)
if ih > 0:
ua = (
(boxes[n, 2] - boxes[n, 0] + eps) *
(boxes[n, 3] - boxes[n, 1] + eps) + box_area - iw * ih)
overlaps[n, k] = iw * ih / ua
return overlaps
def rotate_nms(self, rbboxes, scores, pre_max_size=1000, post_max_size=100):
if pre_max_size is not None:
num_keeped_scores = scores.shape[0]
pre_max_size = min(num_keeped_scores, pre_max_size)
scores, indices = self.np_topk(scores, pre_max_size)
rbboxes = rbboxes[indices]
dets = np.concatenate([rbboxes, np.expand_dims(scores, -1)], axis=1)
dets_np = dets
if len(dets_np) == 0:
keep = np.array([], dtype=np.int64)
else:
ret = np.array(self.rotate_nms_cc(dets_np, self._iou_threshold), dtype=np.int64)
keep = ret[:post_max_size]
if keep.shape[0] == 0:
return np.zeros([0])
if pre_max_size is not None:
keep = np.array(keep)
return indices[keep]
else:
return np.array(keep)
def rotate_nms_cc(self, dets, thresh):
scores = dets[:, 5]
order = scores.argsort()[::-1].astype(np.int32) # highest->lowest
dets_corners = self.center_to_corner_box2d(dets[:, :2], dets[:, 2:4], dets[:, 4])
dets_standup = self.corner_to_standup_nd(dets_corners)
standup_iou = self.iou_jit(dets_standup, dets_standup, eps=0.0)
# todo: rotate_non_max_suppression_cpu
ret_iou = rotate_non_max_suppression_cpu(dets_corners, order, standup_iou, thresh)
return ret_iou
def second_box_decode(self, box_encodings, anchors, encode_angle_to_vector=False, smooth_dim=False):
"""box decode for VoxelNet in lidar
Args:
boxes ([N, 7] Tensor): normal boxes: x, y, z, w, l, h, r
anchors ([N, 7] Tensor): anchor
"""
box_ndim = anchors.shape[-1]
cas, cts = [], []
if box_ndim > 7:
xa, ya, za, wa, la, ha, ra, *cas = np.split(anchors, box_ndim, axis=-1)
if encode_angle_to_vector:
xt, yt, zt, wt, lt, ht, rtx, rty, *cts = np.split(box_encodings, box_ndim, axis=-1)
else:
xt, yt, zt, wt, lt, ht, rt, *cts = np.split(box_encodings, box_ndim, axis=-1)
else:
xa, ya, za, wa, la, ha, ra = np.split(anchors, box_ndim, axis=-1)
if encode_angle_to_vector:
xt, yt, zt, wt, lt, ht, rtx, rty = np.split(box_encodings, box_ndim, axis=-1)
else:
xt, yt, zt, wt, lt, ht, rt = np.split(box_encodings, box_ndim, axis=-1)
diagonal = np.sqrt(la ** 2 + wa ** 2)
xg = xt * diagonal + xa
yg = yt * diagonal + ya
zg = zt * ha + za # -1
if smooth_dim: # FALSE
lg = (lt + 1) * la
wg = (wt + 1) * wa
hg = (ht + 1) * ha
else:
lg = np.exp(lt) * la
wg = np.exp(wt) * wa
hg = np.exp(ht) * ha
if encode_angle_to_vector: # FALSE
rax = np.cos(ra)
ray = np.sin(ra)
rgx = rtx + rax
rgy = rty + ray
rg = math.atan2(rgy, rgx)
else:
rg = rt + ra
cgs = [t + a for t, a in zip(cts, cas)]
decode = np.concatenate([xg, yg, zg, wg, lg, hg, rg, *cgs], axis=-1)
return decode
def process(self, box_preds, cls_preds, dir_cls_preds, format='NCHW'):
if isinstance(box_preds, torch.FloatTensor):
box_preds = box_preds.detach().numpy()
cls_preds = cls_preds.detach().numpy()
dir_cls_preds = dir_cls_preds.detach().numpy()
if format.upper() == 'NCHW':
box_preds = box_preds.reshape(1, self.anchor_per_loc, 7, self.output_height, self.output_width).\
transpose(0, 1, 3, 4, 2)
cls_preds = cls_preds.reshape(1, self.anchor_per_loc, self.num_class, self.output_height,
self.output_width).transpose(0, 1, 3, 4, 2)
dir_cls_preds = dir_cls_preds.reshape(1, self.anchor_per_loc, 2, self.output_height, self.output_width).\
transpose(0, 1, 3, 4, 2)
elif format.upper() == 'NHWC':
box_preds = box_preds.reshape(1, self.output_height, self.output_width, self.anchor_per_loc, 7).\
transpose(0, 3, 1, 2, 4)
cls_preds = cls_preds.reshape(1, self.output_height, self.output_width, self.anchor_per_loc,
self.num_class).transpose(0, 3, 1, 2, 4)
dir_cls_preds = dir_cls_preds.reshape(1, self.output_height, self.output_width, self.anchor_per_loc, 2).\
transpose(0, 3, 1, 2, 4)
else:
raise ValueError('data format is NCHW or NHWC')
return box_preds, cls_preds, dir_cls_preds
def generate_bbox(self, box_preds, cls_preds, dir_cls_preds):
box_preds, cls_preds, dir_cls_preds = self.process(box_preds, cls_preds, dir_cls_preds)
meta_list = [None] * self.batch_size
batch_anchors = self.anchors.reshape(self.batch_size, -1, self.anchors.shape[-1])
batch_anchors_mask = [None] * self.batch_size
batch_box_preds = box_preds
batch_cls_preds = cls_preds
batch_box_preds = batch_box_preds.reshape(self.batch_size, -1, 7)
num_class_with_bg = self.num_class
# if not _encode_background_as_zeros:
# if not True:
# num_class_with_bg = num_class + 1
batch_cls_preds = batch_cls_preds.reshape(self.batch_size, -1, num_class_with_bg)
batch_box_preds = self.second_box_decode(batch_box_preds, batch_anchors)
# if _use_direction_classifier:
batch_dir_preds = dir_cls_preds
batch_dir_preds = batch_dir_preds.reshape(self.batch_size, -1, self._num_direction_bins)
selected = []
for box_preds, cls_preds, dir_preds, a_mask, meta in zip(
batch_box_preds, batch_cls_preds, batch_dir_preds,
batch_anchors_mask, meta_list):
if a_mask is not None: # none
box_preds = box_preds[a_mask]
cls_preds = cls_preds[a_mask]
box_preds = box_preds.astype(np.float32)
cls_preds = cls_preds.astype(np.float32)
# if _use_direction_classifier: # true
if a_mask is not None: # none
dir_preds = dir_preds[a_mask]
dir_labels = np.argmax(dir_preds, axis=-1)
# this don't support softmax
total_scores = self.np_sigmoid(cls_preds)
# to remove overlapped box.
if num_class_with_bg == 1:
top_scores = total_scores.squeeze(-1)
top_labels = np.zeros(
total_scores.shape[0],
dtype=np.int64)
else:
top_scores = np.max(
total_scores, axis=-1)
top_labels = np.argmax(total_scores, axis=1)
_nms_score_thresholds = self._score_threshold
assert _nms_score_thresholds > 0.0
top_scores_keep = top_scores >= _nms_score_thresholds # remove boxes by score
top_scores = top_scores[top_scores_keep] # mask
if top_scores.shape[0] != 0: # the rest of boxes
if _nms_score_thresholds > 0.0:
box_preds = box_preds[top_scores_keep]
dir_labels = dir_labels[top_scores_keep]
top_labels = top_labels[top_scores_keep]
boxes_for_nms = box_preds[:, [0, 1, 3, 4, 6]] # bev_nms 01346
# the nms in 3d detection just remove overlap boxes.
# ================rotate_nms(rbboxes, scores)=====================
pre_max_size = 1000
# post_max_size = 100
num_keeped_scores = top_scores.shape[0]
pre_max_size = min(num_keeped_scores, pre_max_size)
scores, indices = self.np_topk(top_scores, pre_max_size)
rbboxes = boxes_for_nms[indices]
dets = np.concatenate([rbboxes, np.expand_dims(scores, -1)], axis=1)
# dets_np = dets
if len(dets) == 0:
dets_corners = np.array([], dtype=np.int64)
else:
dets_corners = self.center_to_corner_box2d(dets[:, :2], dets[:, 2:4], dets[:, 4])
return dets_corners
else:
return None
def predict(self, box_preds, cls_preds, dir_cls_preds):
box_preds, cls_preds, dir_cls_preds = self.process(box_preds, cls_preds, dir_cls_preds)
meta_list = [None] * self.batch_size
batch_anchors = self.anchors.reshape(self.batch_size, -1, self.anchors.shape[-1])
batch_anchors_mask = [None] * self.batch_size
batch_box_preds = box_preds
batch_cls_preds = cls_preds
batch_box_preds = batch_box_preds.reshape(self.batch_size, -1, 7)
num_class_with_bg = self.num_class
# if not _encode_background_as_zeros:
# if not True:
# num_class_with_bg = num_class + 1
batch_cls_preds = batch_cls_preds.reshape(self.batch_size, -1, num_class_with_bg)
batch_box_preds = self.second_box_decode(batch_box_preds, batch_anchors)
# if _use_direction_classifier:
batch_dir_preds = dir_cls_preds
batch_dir_preds = batch_dir_preds.reshape(self.batch_size, -1, self._num_direction_bins)
predictions_dicts = []
for box_preds, cls_preds, dir_preds, a_mask, meta in zip(
batch_box_preds, batch_cls_preds, batch_dir_preds,
batch_anchors_mask, meta_list):
if a_mask is not None: # none
box_preds = box_preds[a_mask]
cls_preds = cls_preds[a_mask]
box_preds = box_preds.astype(np.float32)
cls_preds = cls_preds.astype(np.float32)
# if _use_direction_classifier: # true
if a_mask is not None: # none
dir_preds = dir_preds[a_mask]
dir_labels = np.argmax(dir_preds, axis=-1)
# this don't support softmax
total_scores = self.np_sigmoid(cls_preds)
# to remove overlapped box.
if num_class_with_bg == 1:
top_scores = total_scores.squeeze(-1)
top_labels = np.zeros(
total_scores.shape[0],
dtype=np.int64)
else:
top_scores = np.max(
total_scores, axis=-1)
top_labels = np.argmax(total_scores, axis=1)
_nms_score_thresholds = self._score_threshold
assert _nms_score_thresholds > 0.0
top_scores_keep = top_scores >= _nms_score_thresholds # remove boxes by score
top_scores = top_scores[top_scores_keep] # mask
if top_scores.shape[0] != 0: # the rest of boxes
if _nms_score_thresholds > 0.0:
box_preds = box_preds[top_scores_keep]
dir_labels = dir_labels[top_scores_keep]
top_labels = top_labels[top_scores_keep]
boxes_for_nms = box_preds[:, [0, 1, 3, 4, 6]] # bev_nms 01346
# the nms in 3d detection just remove overlap boxes.
selected = self.rotate_nms(boxes_for_nms, top_scores)
else:
selected = []
selected_boxes = box_preds[selected]
# if _use_direction_classifier: # true
selected_dir_labels = dir_labels[selected] # selected dir
selected_labels = top_labels[selected] # selected class
selected_scores = top_scores[selected] # selected score
# finally generate predictions.
final_box_preds = np.array([])
if selected_boxes.shape[0] != 0:
box_preds = selected_boxes
scores = selected_scores
label_preds = selected_labels
box_preds.tofile("./final_out/final_box.bin")
scores.tofile('./final_out/final_scores.bin')
label_preds.tofile('./final_out/final_labels.bin')
# if _use_direction_classifier: # true
if True:
dir_labels = selected_dir_labels
period = (2 * np.pi / self._num_direction_bins) # =pi
_dir_offset = 0.0
_dir_limit_offset = 1.0
dir_rot = self.limit_period(box_preds[..., 6] - _dir_offset, _dir_limit_offset, period)
box_preds[..., 6] = dir_rot + _dir_offset + period * dir_labels # dir_rot +0 or dir_rot+pi
final_box_preds = box_preds
final_scores = scores
final_labels = label_preds
# post_center_range = np.array([0.0000, -20.0000, -2.2000, 55.2000, 20.0000, 0.8000], np.float32)
if self.post_center_range is not None: # not none
mask = (final_box_preds[:, :3] >= self.post_center_range[:3]).all(1)
mask &= (final_box_preds[:, :3] <= self.post_center_range[3:]).all(1)
predictions_dict = {
"box3d_lidar": final_box_preds[mask],
"scores": final_scores[mask],
"label_preds": final_labels[mask],
"metadata": meta,
}
else:
predictions_dict = {
"box3d_lidar": final_box_preds,
"scores": final_scores,
"label_preds": final_labels,
"metadata": meta,
}
else:
dtype = batch_box_preds.dtype
if final_box_preds.shape[0] == 0:
print('*********************error***********************')
predictions_dict = {
"box3d_lidar":
np.zeros([0, final_box_preds.shape[-1]],
dtype=dtype, ),
"scores":
np.zeros([0], dtype=dtype),
"label_preds":
np.zeros([0], dtype=top_labels.dtype),
"metadata":
meta,
}
predictions_dicts.append(predictions_dict)
# with open("result_box3d_lidar_scores_label_preds_metadata.pkl", 'wb') as f:
# pickle.dump(predictions_dicts, f)
return predictions_dicts
if __name__ == '__main__':
output_path = '../../detection/out/'
box_preds = np.fromfile(output_path + 'output-output_box-int32-1_56_100_138-0000.bin', dtype=np.float32)
cls_preds = np.fromfile(output_path + 'output-output_cls-int32-1_32_100_138-0000.bin', dtype=np.float32)
dir_cls_preds = np.fromfile(output_path + 'output-output_dir_cls-int32-1_16_100_138-0000.bin', dtype=np.float32)
detect = Detection()
detect.predict(box_preds, cls_preds, dir_cls_preds)
# dets_corners = detect.generate_bbox(box_preds, cls_preds, dir_cls_preds)
# dets_corners.tofile('./final_out/dets_corners.bin')
# print(dets_corners.shape)
| 44.449893
| 117
| 0.564877
|
4a03a8f43dda59abe2fc4e60886c504b6714656e
| 1,513
|
py
|
Python
|
bnn_mcmc_examples/examples/mlp/penguins/metropolis_hastings/pilot_visual_summary.py
|
papamarkou/bnn_mcmc_examples
|
7bb4ecfb33db4c30a8e61e31f528bda0efb24e3d
|
[
"MIT"
] | 1
|
2021-09-09T15:55:37.000Z
|
2021-09-09T15:55:37.000Z
|
bnn_mcmc_examples/examples/mlp/penguins/metropolis_hastings/pilot_visual_summary.py
|
kushagragpt99/bnn_mcmc_examples
|
297cdb1e74335860989bebdb4ff6f6322b6adc06
|
[
"MIT"
] | null | null | null |
bnn_mcmc_examples/examples/mlp/penguins/metropolis_hastings/pilot_visual_summary.py
|
kushagragpt99/bnn_mcmc_examples
|
297cdb1e74335860989bebdb4ff6f6322b6adc06
|
[
"MIT"
] | 1
|
2021-10-05T06:38:57.000Z
|
2021-10-05T06:38:57.000Z
|
# %% Import packages
import kanga.plots as ps
from kanga.chains import ChainArray
from bnn_mcmc_examples.examples.mlp.penguins.constants import diagnostic_iter_thres
from bnn_mcmc_examples.examples.mlp.penguins.metropolis_hastings.constants import sampler_output_pilot_path
from bnn_mcmc_examples.examples.mlp.penguins.model import model
# %% Load chain array
chain_array = ChainArray.from_file(keys=['sample', 'accepted'], path=sampler_output_pilot_path)
# %% Drop burn-in samples
chain_array.vals['sample'] = chain_array.vals['sample'][diagnostic_iter_thres:, :]
chain_array.vals['accepted'] = chain_array.vals['accepted'][diagnostic_iter_thres:]
# %% Plot traces of simulated chain
for i in range(model.num_params()):
ps.trace(
chain_array.get_param(i),
title=r'Traceplot of $\theta_{{{}}}$'.format(i+1),
xlabel='Iteration',
ylabel='Parameter value'
)
# %% Plot running means of simulated chain
for i in range(model.num_params()):
ps.running_mean(
chain_array.get_param(i),
title=r'Running mean plot of parameter $\theta_{{{}}}$'.format(i+1),
xlabel='Iteration',
ylabel='Running mean'
)
# %% Plot histograms of marginals of simulated chain
for i in range(model.num_params()):
ps.hist(
chain_array.get_param(i),
bins=30,
density=True,
title=r'Histogram of parameter $\theta_{{{}}}$'.format(i+1),
xlabel='Parameter value',
ylabel='Parameter relative frequency'
)
| 29.666667
| 107
| 0.696629
|
4a03a8fd988b7c5f05ff3c56db6d93ad69c35892
| 3,246
|
py
|
Python
|
replot/helpers/render.py
|
Phyks/replot
|
5d77471f5156c162a9839db612e73897bce8c8ee
|
[
"MIT"
] | null | null | null |
replot/helpers/render.py
|
Phyks/replot
|
5d77471f5156c162a9839db612e73897bce8c8ee
|
[
"MIT"
] | 30
|
2016-03-01T13:58:23.000Z
|
2016-08-01T10:02:23.000Z
|
replot/helpers/render.py
|
Phyks/replot
|
5d77471f5156c162a9839db612e73897bce8c8ee
|
[
"MIT"
] | null | null | null |
"""
Various helper functions for plotting.
"""
import numpy as np
def set_axis_property(group_, setter, value, default_setter=None):
"""
Set a property on an axis at render time.
:param group_: The subplot for this axis.
:param setter: The setter to use to set the axis property.
:param value: The value for the property, either a value or a dict with \
group as key.
:param default_setter: Default setter to call if ``value`` is a dict but \
has no key for the given subplot.
:returns: None.
"""
if isinstance(value, dict):
try:
if value[group_] is not None:
setter(value[group_])
except KeyError:
# No entry for this axis in the dict, use default argument
if default_setter is not None:
default_setter()
else:
if value is not None:
setter(value)
def linewidth_from_data_units(linewidth, axis, reference='y'):
"""
Convert a linewidth in data units to linewidth in points.
Parameters
----------
linewidth: float
Linewidth in data units of the respective reference-axis
axis: matplotlib axis
The axis which is used to extract the relevant transformation
data (data limits and size must not change afterwards)
reference: string
The axis that is taken as a reference for the data width.
Possible values: 'x' and 'y'. Defaults to 'y'.
Returns
-------
linewidth: float
Linewidth in points
From https://stackoverflow.com/questions/19394505/matplotlib-expand-the-line-with-specified-width-in-data-unit.
"""
fig = axis.get_figure()
if reference == 'x':
length = fig.bbox_inches.width * axis.get_position().width
value_range = np.diff(axis.get_xlim())
elif reference == 'y':
length = fig.bbox_inches.height * axis.get_position().height
value_range = np.diff(axis.get_ylim())
# Convert length to points
length *= 72 # Inches to points is a fixed conversion in matplotlib
# Scale linewidth to value range
return linewidth * (length / value_range)
def data_units_from_points(points, axis, reference='y'):
"""
Convert points to data units on the given axis.
Parameters
----------
points: float
Value in points to convert.
axis: matplotlib axis
The axis which is used to extract the relevant transformation
data (data limits and size must not change afterwards)
reference: string
The axis that is taken as a reference for the data width.
Possible values: 'x' and 'y'. Defaults to 'y'.
Returns
-------
points: float
Converted value.
"""
fig = axis.get_figure()
if reference == 'x':
length = fig.bbox_inches.width * axis.get_position().width
value_range = np.diff(axis.get_xlim())
elif reference == 'y':
length = fig.bbox_inches.height * axis.get_position().height
value_range = np.diff(axis.get_ylim())
# Convert length to points
length *= 72 # Inches to points is a fixed conversion in matplotlib
# Scale linewidth to value range
return points / (length / value_range)
| 32.787879
| 115
| 0.639864
|
4a03a9ae73a5c8cf44973a16b3e3885439b58d13
| 620
|
py
|
Python
|
contrib/qt_translations.py
|
LambocoinFoundation/Lambocoin
|
823e1ad3e3811f8a938b09c192ecf87d56d25fb4
|
[
"MIT"
] | null | null | null |
contrib/qt_translations.py
|
LambocoinFoundation/Lambocoin
|
823e1ad3e3811f8a938b09c192ecf87d56d25fb4
|
[
"MIT"
] | null | null | null |
contrib/qt_translations.py
|
LambocoinFoundation/Lambocoin
|
823e1ad3e3811f8a938b09c192ecf87d56d25fb4
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# Helpful little script that spits out a comma-separated list of
# language codes for Qt icons that should be included
# in binary bitcoin distributions
import glob
import os
import re
import sys
if len(sys.argv) != 3:
sys.exit("Usage: %s $QTDIR/translations $BITCOINDIR/src/qt/locale"%sys.argv[0])
d1 = sys.argv[1]
d2 = sys.argv[2]
l1 = set([ re.search(r'qt_(.*).qm', f).group(1) for f in glob.glob(os.path.join(d1, 'qt_*.qm')) ])
l2 = set([ re.search(r'Lambocoin_(.*).qm', f).group(1) for f in glob.glob(os.path.join(d2, 'Lambocoin_*.qm')) ])
print ",".join(sorted(l1.intersection(l2)))
| 26.956522
| 112
| 0.683871
|
4a03aa5915f5017c57fa10a292f09bf435652875
| 16,575
|
py
|
Python
|
proxy/http/handler.py
|
Sanake/proxy.py
|
bd808af81f8bdaf49c8693821d8417cdecf31dd6
|
[
"BSD-3-Clause"
] | null | null | null |
proxy/http/handler.py
|
Sanake/proxy.py
|
bd808af81f8bdaf49c8693821d8417cdecf31dd6
|
[
"BSD-3-Clause"
] | null | null | null |
proxy/http/handler.py
|
Sanake/proxy.py
|
bd808af81f8bdaf49c8693821d8417cdecf31dd6
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
proxy.py
~~~~~~~~
⚡⚡⚡ Fast, Lightweight, Pluggable, TLS interception capable proxy server focused on
Network monitoring, controls & Application development, testing, debugging.
:copyright: (c) 2013-present by Abhinav Singh and contributors.
:license: BSD, see LICENSE for more details.
"""
import ssl
import time
import errno
import socket
import asyncio
import logging
import selectors
from typing import Tuple, List, Union, Optional, Dict, Any
from .plugin import HttpProtocolHandlerPlugin
from .parser import HttpParser, httpParserStates, httpParserTypes
from .exception import HttpProtocolException
from ..common.types import Readables, Writables
from ..common.utils import wrap_socket
from ..core.base import BaseTcpServerHandler
from ..core.connection import TcpClientConnection
from ..common.flag import flags
from ..common.constants import DEFAULT_CLIENT_RECVBUF_SIZE, DEFAULT_KEY_FILE
from ..common.constants import DEFAULT_SELECTOR_SELECT_TIMEOUT, DEFAULT_TIMEOUT
logger = logging.getLogger(__name__)
flags.add_argument(
'--client-recvbuf-size',
type=int,
default=DEFAULT_CLIENT_RECVBUF_SIZE,
help='Default: 1 MB. Maximum amount of data received from the '
'client in a single recv() operation. Bump this '
'value for faster uploads at the expense of '
'increased RAM.',
)
flags.add_argument(
'--key-file',
type=str,
default=DEFAULT_KEY_FILE,
help='Default: None. Server key file to enable end-to-end TLS encryption with clients. '
'If used, must also pass --cert-file.',
)
flags.add_argument(
'--timeout',
type=int,
default=DEFAULT_TIMEOUT,
help='Default: ' + str(DEFAULT_TIMEOUT) +
'. Number of seconds after which '
'an inactive connection must be dropped. Inactivity is defined by no '
'data sent or received by the client.',
)
class HttpProtocolHandler(BaseTcpServerHandler):
"""HTTP, HTTPS, HTTP2, WebSockets protocol handler.
Accepts `Client` connection and delegates to HttpProtocolHandlerPlugin.
"""
def __init__(self, *args: Any, **kwargs: Any):
super().__init__(*args, **kwargs)
self.start_time: float = time.time()
self.last_activity: float = self.start_time
self.request: HttpParser = HttpParser(
httpParserTypes.REQUEST_PARSER,
enable_proxy_protocol=self.flags.enable_proxy_protocol,
)
self.selector: Optional[selectors.DefaultSelector] = None
if not self.flags.threadless:
self.selector = selectors.DefaultSelector()
self.plugins: Dict[str, HttpProtocolHandlerPlugin] = {}
##
# initialize, is_inactive, shutdown, get_events, handle_events
# overrides Work class definitions.
##
def initialize(self) -> None:
"""Optionally upgrades connection to HTTPS, set ``conn`` in non-blocking mode and initializes plugins."""
conn = self._optionally_wrap_socket(self.work.connection)
conn.setblocking(False)
# Update client connection reference if connection was wrapped
if self._encryption_enabled():
self.work = TcpClientConnection(conn=conn, addr=self.work.addr)
if b'HttpProtocolHandlerPlugin' in self.flags.plugins:
for klass in self.flags.plugins[b'HttpProtocolHandlerPlugin']:
instance: HttpProtocolHandlerPlugin = klass(
self.uid,
self.flags,
self.work,
self.request,
self.event_queue,
)
self.plugins[instance.name()] = instance
logger.debug('Handling connection %r' % self.work.connection)
def is_inactive(self) -> bool:
if not self.work.has_buffer() and \
self._connection_inactive_for() > self.flags.timeout:
return True
return False
def shutdown(self) -> None:
try:
# Flush pending buffer in threaded mode only.
#
# For threadless mode, BaseTcpServerHandler implements
# the must_flush_before_shutdown logic automagically.
if self.selector and self.work.has_buffer():
self._flush()
# Invoke plugin.on_client_connection_close
for plugin in self.plugins.values():
plugin.on_client_connection_close()
logger.debug(
'Closing client connection %r '
'at address %s has buffer %s' %
(self.work.connection, self.work.address, self.work.has_buffer()),
)
conn = self.work.connection
# Unwrap if wrapped before shutdown.
if self._encryption_enabled() and \
isinstance(self.work.connection, ssl.SSLSocket):
conn = self.work.connection.unwrap()
conn.shutdown(socket.SHUT_WR)
logger.debug('Client connection shutdown successful')
except OSError:
pass
finally:
# Section 4.2.2.13 of RFC 1122 tells us that a close() with any pending readable data
# could lead to an immediate reset being sent.
#
# "A host MAY implement a 'half-duplex' TCP close sequence, so that an application
# that has called CLOSE cannot continue to read data from the connection.
# If such a host issues a CLOSE call while received data is still pending in TCP,
# or if new data is received after CLOSE is called, its TCP SHOULD send a RST to
# show that data was lost."
#
self.work.connection.close()
logger.debug('Client connection closed')
super().shutdown()
async def get_events(self) -> Dict[int, int]:
# Get default client events
events: Dict[int, int] = await super().get_events()
# HttpProtocolHandlerPlugin.get_descriptors
for plugin in self.plugins.values():
plugin_read_desc, plugin_write_desc = plugin.get_descriptors()
for rfileno in plugin_read_desc:
if rfileno not in events:
events[rfileno] = selectors.EVENT_READ
else:
events[rfileno] |= selectors.EVENT_READ
for wfileno in plugin_write_desc:
if wfileno not in events:
events[wfileno] = selectors.EVENT_WRITE
else:
events[wfileno] |= selectors.EVENT_WRITE
return events
# We override super().handle_events and never call it
async def handle_events(
self,
readables: Readables,
writables: Writables,
) -> bool:
"""Returns True if proxy must tear down."""
# Flush buffer for ready to write sockets
teardown = await self.handle_writables(writables)
if teardown:
return True
# Invoke plugin.write_to_descriptors
for plugin in self.plugins.values():
teardown = await plugin.write_to_descriptors(writables)
if teardown:
return True
# Read from ready to read sockets
teardown = await self.handle_readables(readables)
if teardown:
return True
# Invoke plugin.read_from_descriptors
for plugin in self.plugins.values():
teardown = await plugin.read_from_descriptors(readables)
if teardown:
return True
return False
def handle_data(self, data: memoryview) -> Optional[bool]:
"""Handles incoming data from client."""
if data is None:
logger.debug('Client closed connection, tearing down...')
self.work.closed = True
return True
try:
# Don't parse incoming data any further after 1st request has completed.
#
# This specially does happen for pipeline requests.
#
# Plugins can utilize on_client_data for such cases and
# apply custom logic to handle request data sent after 1st
# valid request.
if self.request.state != httpParserStates.COMPLETE:
# Parse http request
#
# TODO(abhinavsingh): Remove .tobytes after parser is
# memoryview compliant
self.request.parse(data.tobytes())
if self.request.is_complete:
# Invoke plugin.on_request_complete
for plugin in self.plugins.values():
upgraded_sock = plugin.on_request_complete()
if isinstance(upgraded_sock, ssl.SSLSocket):
logger.debug(
'Updated client conn to %s', upgraded_sock,
)
self.work._conn = upgraded_sock
for plugin_ in self.plugins.values():
if plugin_ != plugin:
plugin_.client._conn = upgraded_sock
elif isinstance(upgraded_sock, bool) and upgraded_sock is True:
return True
else:
# HttpProtocolHandlerPlugin.on_client_data
# Can raise HttpProtocolException to tear down the connection
for plugin in self.plugins.values():
optional_data = plugin.on_client_data(data)
if optional_data is None:
break
data = optional_data
except HttpProtocolException as e:
logger.debug('HttpProtocolException raised')
response: Optional[memoryview] = e.response(self.request)
if response:
self.work.queue(response)
return True
return False
async def handle_writables(self, writables: Writables) -> bool:
if self.work.connection.fileno() in writables and self.work.has_buffer():
logger.debug('Client is ready for writes, flushing buffer')
self.last_activity = time.time()
# TODO(abhinavsingh): This hook could just reside within server recv block
# instead of invoking when flushed to client.
#
# Invoke plugin.on_response_chunk
chunk = self.work.buffer
for plugin in self.plugins.values():
chunk = plugin.on_response_chunk(chunk)
if chunk is None:
break
try:
# Call super() for client flush
teardown = await super().handle_writables(writables)
if teardown:
return True
except BrokenPipeError:
logger.error(
'BrokenPipeError when flushing buffer for client',
)
return True
except OSError:
logger.error('OSError when flushing buffer to client')
return True
return False
async def handle_readables(self, readables: Readables) -> bool:
if self.work.connection.fileno() in readables:
logger.debug('Client is ready for reads, reading')
self.last_activity = time.time()
try:
teardown = await super().handle_readables(readables)
if teardown:
return teardown
except ssl.SSLWantReadError: # Try again later
logger.warning(
'SSLWantReadError encountered while reading from client, will retry ...',
)
return False
except socket.error as e:
if e.errno == errno.ECONNRESET:
# Most requests for mobile devices will end up
# with client closed connection. Using `debug`
# here to avoid flooding the logs.
logger.debug('%r' % e)
else:
logger.warning(
'Exception when receiving from %s connection#%d with reason %r' %
(self.work.tag, self.work.connection.fileno(), e),
)
return True
return False
##
# Internal methods
##
def _encryption_enabled(self) -> bool:
return self.flags.keyfile is not None and \
self.flags.certfile is not None
def _optionally_wrap_socket(
self, conn: socket.socket,
) -> Union[ssl.SSLSocket, socket.socket]:
"""Attempts to wrap accepted client connection using provided certificates.
Shutdown and closes client connection upon error.
"""
if self._encryption_enabled():
assert self.flags.keyfile and self.flags.certfile
# TODO(abhinavsingh): Insecure TLS versions must not be accepted by default
conn = wrap_socket(conn, self.flags.keyfile, self.flags.certfile)
return conn
def _connection_inactive_for(self) -> float:
return time.time() - self.last_activity
##
# run() and _run_once() are here to maintain backward compatibility
# with threaded mode. These methods are only called when running
# in threaded mode.
##
def run(self) -> None:
"""run() method is not used when in --threadless mode.
This is here just to maintain backward compatibility with threaded mode.
"""
loop = asyncio.new_event_loop()
try:
self.initialize()
while True:
# Tear down if client buffer is empty and connection is inactive
if self.is_inactive():
logger.debug(
'Client buffer is empty and maximum inactivity has reached '
'between client and server connection, tearing down...',
)
break
if loop.run_until_complete(self._run_once()):
break
except KeyboardInterrupt: # pragma: no cover
pass
except ssl.SSLError as e:
logger.exception('ssl.SSLError', exc_info=e)
except Exception as e:
logger.exception(
'Exception while handling connection %r' %
self.work.connection, exc_info=e,
)
finally:
self.shutdown()
loop.close()
async def _run_once(self) -> bool:
events, readables, writables = await self._selected_events()
try:
return await self.handle_events(readables, writables)
finally:
assert self.selector
# TODO: Like Threadless we should not unregister
# work fds repeatedly.
for fd in events:
self.selector.unregister(fd)
# FIXME: Returning events is only necessary because we cannot use async context manager
# for < Python 3.8. As a reason, this method is no longer a context manager and caller
# is responsible for unregistering the descriptors.
async def _selected_events(self) -> Tuple[Dict[int, int], Readables, Writables]:
assert self.selector
events = await self.get_events()
for fd in events:
self.selector.register(fd, events[fd])
ev = self.selector.select(timeout=DEFAULT_SELECTOR_SELECT_TIMEOUT)
readables = []
writables = []
for key, mask in ev:
if mask & selectors.EVENT_READ:
readables.append(key.fileobj)
if mask & selectors.EVENT_WRITE:
writables.append(key.fileobj)
return (events, readables, writables)
def _flush(self) -> None:
assert self.selector
logger.debug('Flushing pending data')
try:
self.selector.register(
self.work.connection,
selectors.EVENT_WRITE,
)
while self.work.has_buffer():
logging.debug('Waiting for client read ready')
ev: List[
Tuple[selectors.SelectorKey, int]
] = self.selector.select(timeout=DEFAULT_SELECTOR_SELECT_TIMEOUT)
if len(ev) == 0:
continue
self.work.flush()
except BrokenPipeError:
pass
finally:
self.selector.unregister(self.work.connection)
| 40.036232
| 113
| 0.587511
|
4a03ab8dc9e65675235aba95f7b0a963f108eda5
| 262
|
py
|
Python
|
alinsco_auth_server/users/apps.py
|
sregdorr/django-auth-server
|
174dfe96c66ed7187c69686c32481c92c97a31a5
|
[
"MIT"
] | null | null | null |
alinsco_auth_server/users/apps.py
|
sregdorr/django-auth-server
|
174dfe96c66ed7187c69686c32481c92c97a31a5
|
[
"MIT"
] | null | null | null |
alinsco_auth_server/users/apps.py
|
sregdorr/django-auth-server
|
174dfe96c66ed7187c69686c32481c92c97a31a5
|
[
"MIT"
] | null | null | null |
from django.apps import AppConfig
class UsersAppConfig(AppConfig):
name = "alinsco_auth_server.users"
verbose_name = "Users"
def ready(self):
try:
import users.signals # noqa F401
except ImportError:
pass
| 18.714286
| 45
| 0.625954
|
4a03abd6550d92cf8ff4230f0f29b12cb96f51ee
| 34,484
|
py
|
Python
|
arc/species/speciesTest.py
|
goldmanm/ARC
|
e2fd97942cb50e3ccbf80ee344c8c9ca83f195de
|
[
"MIT"
] | null | null | null |
arc/species/speciesTest.py
|
goldmanm/ARC
|
e2fd97942cb50e3ccbf80ee344c8c9ca83f195de
|
[
"MIT"
] | null | null | null |
arc/species/speciesTest.py
|
goldmanm/ARC
|
e2fd97942cb50e3ccbf80ee344c8c9ca83f195de
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
This module contains unit tests of the arc.species.species module
"""
from __future__ import (absolute_import, division, print_function, unicode_literals)
import unittest
import os
import shutil
from rmgpy.molecule.molecule import Molecule
from rmgpy.species import Species
from rmgpy.reaction import Reaction
from arc.species.species import ARCSpecies, TSGuess, get_min_energy_conformer,\
determine_rotor_type, determine_rotor_symmetry, check_species_xyz
from arc.species.converter import get_xyz_string, get_xyz_matrix, molecules_from_xyz
from arc.settings import arc_path, default_levels_of_theory
from arc.rmgdb import make_rmg_database_object
from arc.scheduler import Scheduler
################################################################################
class TestARCSpecies(unittest.TestCase):
"""
Contains unit tests for the ARCSpecies class
"""
@classmethod
def setUpClass(cls):
"""
A method that is run before all unit tests in this class.
"""
cls.maxDiff = None
# Method 1: RMG Species object (here by SMILES)
cls.spc1_rmg = Species(molecule=[Molecule().fromSMILES(str('C=C[O]'))]) # delocalized radical + amine
cls.spc1_rmg.label = str('vinoxy')
cls.spc1 = ARCSpecies(rmg_species=cls.spc1_rmg)
# Method 2: ARCSpecies object by XYZ (also give SMILES for thermo BAC)
oh_xyz = str("""O 0.00000000 0.00000000 -0.12002167
H 0.00000000 0.00000000 0.85098324""")
cls.spc2 = ARCSpecies(label=str('OH'), xyz=oh_xyz, smiles=str('[OH]'), multiplicity=2, charge=0)
# Method 3: ARCSpecies object by SMILES
cls.spc3 = ARCSpecies(label=str('methylamine'), smiles=str('CN'), multiplicity=1, charge=0)
# Method 4: ARCSpecies object by RMG Molecule object
mol4 = Molecule().fromSMILES(str('C=CC'))
cls.spc4 = ARCSpecies(label=str('propene'), mol=mol4, multiplicity=1, charge=0)
# Method 5: ARCSpecies by AdjacencyList (to generate AdjLists, see https://rmg.mit.edu/molecule_search)
n2h4_adj = str("""1 N u0 p1 c0 {2,S} {3,S} {4,S}
2 N u0 p1 c0 {1,S} {5,S} {6,S}
3 H u0 p0 c0 {1,S}
4 H u0 p0 c0 {1,S}
5 H u0 p0 c0 {2,S}
6 H u0 p0 c0 {2,S}""")
cls.spc5 = ARCSpecies(label=str('N2H4'), adjlist=n2h4_adj, multiplicity=1, charge=0)
n3_xyz = str("""N -1.1997440839 -0.1610052059 0.0274738287
H -1.4016624407 -0.6229695533 -0.8487034080
H -0.0000018759 1.2861082773 0.5926077870
N 0.0000008520 0.5651072858 -0.1124621525
H -1.1294692206 -0.8709078271 0.7537518889
N 1.1997613019 -0.1609980472 0.0274604887
H 1.1294795781 -0.8708998550 0.7537444446
H 1.4015274689 -0.6230592706 -0.8487058662""")
cls.spc6 = ARCSpecies(label=str('N3'), xyz=n3_xyz, multiplicity=1, charge=0, smiles=str('NNN'))
xyz1 = os.path.join(arc_path, 'arc', 'testing', 'xyz', 'AIBN.gjf')
cls.spc7 = ARCSpecies(label='AIBN', smiles=str('N#CC(C)(C)N=NC(C)(C)C#N'), xyz=xyz1)
hso3_xyz = str("""S -0.12383700 0.10918200 -0.21334200
O 0.97332200 -0.98800100 0.31790100
O -1.41608500 -0.43976300 0.14487300
O 0.32370100 1.42850400 0.21585900
H 1.84477700 -0.57224200 0.35517700""")
cls.spc8 = ARCSpecies(label=str('HSO3'), xyz=hso3_xyz, multiplicity=2, charge=0, smiles=str('O[S](=O)=O'))
nh_s_adj = str("""1 N u0 p2 c0 {2,S}
2 H u0 p0 c0 {1,S}""")
nh_s_xyz = str("""N 0.50949998 0.00000000 0.00000000
H -0.50949998 0.00000000 0.00000000""")
cls.spc9 = ARCSpecies(label=str('NH2(S)'), adjlist=nh_s_adj, xyz=nh_s_xyz, multiplicity=1, charge=0)
def test_conformers(self):
"""Test conformer generation"""
self.spc1.generate_conformers() # vinoxy has two res. structures, each is assigned two conformers (RDkit/ob)
self.assertEqual(len(self.spc1.conformers), 4)
self.assertEqual(len(self.spc1.conformers), len(self.spc1.conformer_energies))
def test_rmg_species_conversion_into_arc_species(self):
"""Test the conversion of an RMG species into an ARCSpecies"""
self.spc1_rmg.label = None
self.spc = ARCSpecies(rmg_species=self.spc1_rmg, label=str('vinoxy'))
self.assertEqual(self.spc.label, str('vinoxy'))
self.assertEqual(self.spc.multiplicity, 2)
self.assertEqual(self.spc.charge, 0)
def test_determine_rotors(self):
"""Test determination of rotors in ARCSpecies"""
self.spc1.determine_rotors()
self.spc2.determine_rotors()
self.spc3.determine_rotors()
self.spc4.determine_rotors()
self.spc5.determine_rotors()
self.spc6.determine_rotors()
self.assertEqual(len(self.spc1.rotors_dict), 1)
self.assertEqual(len(self.spc2.rotors_dict), 0)
self.assertEqual(len(self.spc3.rotors_dict), 1)
self.assertEqual(len(self.spc4.rotors_dict), 1)
self.assertEqual(len(self.spc5.rotors_dict), 1)
self.assertEqual(len(self.spc6.rotors_dict), 2)
self.assertEqual(self.spc1.rotors_dict[0][str('pivots')], [2, 3])
self.assertEqual(self.spc1.rotors_dict[0][str('scan')], [4, 2, 3, 1])
self.assertTrue(all([t in [2, 4, 5] for t in self.spc1.rotors_dict[0][str('top')]]))
self.assertEqual(self.spc1.rotors_dict[0][str('times_dihedral_set')], 0)
self.assertEqual(self.spc3.rotors_dict[0][str('pivots')], [1, 2])
self.assertEqual(self.spc4.rotors_dict[0][str('pivots')], [1, 2])
self.assertEqual(self.spc5.rotors_dict[0][str('pivots')], [1, 2])
self.assertEqual(self.spc6.rotors_dict[0][str('pivots')], [1, 4])
self.assertEqual(self.spc6.rotors_dict[0][str('scan')], [2, 1, 4, 6])
self.assertEqual(len(self.spc6.rotors_dict[0][str('top')]), 3)
self.assertTrue(all([t in [1, 5, 2] for t in self.spc6.rotors_dict[0][str('top')]]))
self.assertEqual(self.spc6.rotors_dict[1][str('pivots')], [4, 6])
self.assertEqual(self.spc6.rotors_dict[1][str('scan')], [1, 4, 6, 7])
self.assertEqual(len(self.spc6.rotors_dict[1][str('top')]), 3)
self.assertTrue(all([t in [6, 7, 8] for t in self.spc6.rotors_dict[1][str('top')]]))
def test_symmetry(self):
"""Test external symmetry and chirality determination"""
allene = ARCSpecies(label=str('allene'), smiles=str('C=C=C'), multiplicity=1, charge=0)
allene.final_xyz = """C -1.01646 0.10640 -0.91445
H -1.39000 1.03728 -1.16672
C 0.00000 0.00000 0.00000
C 1.01653 -0.10640 0.91438
H -1.40975 -0.74420 -1.35206
H 0.79874 -0.20864 1.92036
H 2.00101 -0.08444 0.59842"""
allene.determine_symmetry()
self.assertEqual(allene.optical_isomers, 1)
self.assertEqual(allene.external_symmetry, 4)
ammonia = ARCSpecies(label=str('ammonia'), smiles=str('N'), multiplicity=1, charge=0)
ammonia.final_xyz = """N 0.06617 0.20024 0.13886
H -0.62578 -0.34119 0.63709
H -0.32018 0.51306 -0.74036
H 0.87976 -0.37219 -0.03564"""
ammonia.determine_symmetry()
self.assertEqual(ammonia.optical_isomers, 1)
self.assertEqual(ammonia.external_symmetry, 3)
methane = ARCSpecies(label=str('methane'), smiles=str('C'), multiplicity=1, charge=0)
methane.final_xyz = """C 0.00000 0.00000 0.00000
H -0.29717 0.97009 -0.39841
H 1.08773 -0.06879 0.01517
H -0.38523 -0.10991 1.01373
H -0.40533 -0.79140 -0.63049"""
methane.determine_symmetry()
self.assertEqual(methane.optical_isomers, 1)
self.assertEqual(methane.external_symmetry, 12)
chiral = ARCSpecies(label=str('chiral'), smiles=str('C(C)(O)(N)'), multiplicity=1, charge=0)
chiral.final_xyz = """C -0.49341625 0.37828349 0.00442108
H -1.56331545 0.39193350 0.01003359
N 0.01167132 1.06479568 1.20212111
H 1.01157784 1.05203730 1.19687531
H -0.30960193 2.01178202 1.20391932
O -0.03399634 -0.97590449 0.00184366
H -0.36384913 -1.42423238 -0.78033350
C 0.02253835 1.09779040 -1.25561654
H -0.34510997 0.59808430 -2.12741255
H -0.32122209 2.11106387 -1.25369100
H 1.09243518 1.08414066 -1.26122530"""
chiral.determine_symmetry()
self.assertEqual(chiral.optical_isomers, 2)
self.assertEqual(chiral.external_symmetry, 1)
s8 = ARCSpecies(label=str('s8'), smiles=str('S1SSSSSSS1'), multiplicity=1, charge=0)
s8.final_xyz = """S 2.38341 0.12608 0.09413
S 1.45489 1.88955 -0.13515
S -0.07226 2.09247 1.14966
S -1.81072 1.52327 0.32608
S -2.23488 -0.39181 0.74645
S -1.60342 -1.62383 -0.70542
S 0.22079 -2.35820 -0.30909
S 1.66220 -1.25754 -1.16665"""
s8.determine_symmetry()
self.assertEqual(s8.optical_isomers, 1)
self.assertEqual(s8.external_symmetry, 8)
water = ARCSpecies(label=str('H2O'), smiles=str('O'), multiplicity=1, charge=0)
water.final_xyz = """O 0.19927 0.29049 -0.11186
H 0.50770 -0.61852 -0.09124
H -0.70697 0.32803 0.20310"""
water.determine_symmetry()
self.assertEqual(water.optical_isomers, 1)
self.assertEqual(water.external_symmetry, 2)
def test_xyz_format_conversion(self):
"""Test conversions from string to list xyz formats"""
xyz_str0 = """N 2.24690600 -0.00006500 0.11597700
C -1.05654800 1.29155000 -0.02642500
C -1.05661400 -1.29150400 -0.02650600
C -0.30514100 0.00000200 0.00533200
C 1.08358900 -0.00003400 0.06558000
H -0.39168300 2.15448600 -0.00132500
H -1.67242600 1.35091400 -0.93175000
H -1.74185400 1.35367700 0.82742800
H -0.39187100 -2.15447800 0.00045500
H -1.74341400 -1.35278100 0.82619100
H -1.67091600 -1.35164600 -0.93286400
"""
xyz_list, atoms, x, y, z = get_xyz_matrix(xyz_str0)
# test all forms of input for get_xyz_string():
xyz_str1 = get_xyz_string(xyz_list, symbol=atoms)
xyz_str2 = get_xyz_string(xyz_list, number=[7, 6, 6, 6, 6, 1, 1, 1, 1, 1, 1])
mol, _ = molecules_from_xyz(xyz_str0)
xyz_str3 = get_xyz_string(xyz_list, mol=mol)
self.assertEqual(xyz_str0, xyz_str1)
self.assertEqual(xyz_str1, xyz_str2)
self.assertEqual(xyz_str2, xyz_str3)
self.assertEqual(atoms, ['N', 'C', 'C', 'C', 'C', 'H', 'H', 'H', 'H', 'H', 'H'])
self.assertEqual(x, [2.246906, -1.056548, -1.056614, -0.305141, 1.083589, -0.391683, -1.672426, -1.741854,
-0.391871, -1.743414, -1.670916])
self.assertEqual(y[1], 1.29155)
self.assertEqual(z[-1], -0.932864)
def test_is_linear(self):
"""Test determination of molecule linearity by xyz"""
xyz1 = """C 0.000000 0.000000 0.000000
O 0.000000 0.000000 1.159076
O 0.000000 0.000000 -1.159076""" # a trivial case
xyz2 = """S -0.06618943 -0.12360663 -0.07631983
O -0.79539707 0.86755487 1.02675668
O -0.68919931 0.25421823 -1.34830853
N 0.01546439 -1.54297548 0.44580391
C 1.59721519 0.47861334 0.00711000
H 1.94428095 0.40772394 1.03719428
H 2.20318015 -0.14715186 -0.64755729
H 1.59252246 1.51178950 -0.33908352
H -0.87856890 -2.02453514 0.38494433
H -1.34135876 1.49608206 0.53295071""" # a non linear molecule
xyz3 = """N 0.0000000000 0.0000000000 0.3146069129
O -1.0906813653 0.0000000000 -0.1376405244
O 1.0906813653 0.0000000000 -0.1376405244""" # a non linear 3-atom molecule
xyz4 = """N 0.0000000000 0.0000000000 0.1413439534
H -0.8031792912 0.0000000000 -0.4947038368
H 0.8031792912 0.0000000000 -0.4947038368""" # a non linear 3-atom molecule
xyz5 = """S -0.5417345330 0.8208150346 0.0000000000
O 0.9206183692 1.6432038228 0.0000000000
H -1.2739176462 1.9692549926 0.0000000000""" # a non linear 3-atom molecule
xyz6 = """N 1.18784533 0.98526702 0.00000000
C 0.04124533 0.98526702 0.00000000
H -1.02875467 0.98526702 0.00000000""" # linear
xyz7 = """C -4.02394116 0.56169428 0.00000000
H -5.09394116 0.56169428 0.00000000
C -2.82274116 0.56169428 0.00000000
H -1.75274116 0.56169428 0.00000000""" # linear
xyz8 = """C -1.02600933 2.12845307 0.00000000
C -0.77966935 0.95278385 0.00000000
H -1.23666197 3.17751246 0.00000000
H -0.56023545 -0.09447399 0.00000000""" # just 0.5 degree off from linearity, so NOT linear...
xyz9 = """O -1.1998 0.1610 0.0275
O -1.4021 0.6223 -0.8489
O -1.48302 0.80682 -1.19946""" # just 3 points in space on a straight line (not a physical molecule)
spc1 = ARCSpecies(label=str('test_spc'), xyz=xyz1, multiplicity=1, charge=0, smiles=str('O=C=O'))
spc2 = ARCSpecies(label=str('test_spc'), xyz=xyz2, multiplicity=1, charge=0, smiles=str('[NH-][S+](=O)(O)C'))
spc3 = ARCSpecies(label=str('test_spc'), xyz=xyz3, multiplicity=2, charge=0, smiles=str('[O]N=O'))
spc4 = ARCSpecies(label=str('test_spc'), xyz=xyz4, multiplicity=2, charge=0, smiles=str('[NH2]'))
spc5 = ARCSpecies(label=str('test_spc'), xyz=xyz5, multiplicity=2, charge=0, smiles=str('[O]S'))
spc6 = ARCSpecies(label=str('test_spc'), xyz=xyz6, multiplicity=1, charge=0, smiles=str('C#N'))
spc7 = ARCSpecies(label=str('test_spc'), xyz=xyz7, multiplicity=1, charge=0, smiles=str('C#C'))
spc8 = ARCSpecies(label=str('test_spc'), xyz=xyz8, multiplicity=1, charge=0, smiles=str('C#C'))
spc9 = ARCSpecies(label=str('test_spc'), xyz=xyz9, multiplicity=1, charge=0, smiles=str('[O-][O+]=O'))
self.assertTrue(spc1.is_linear())
self.assertTrue(spc6.is_linear())
self.assertTrue(spc7.is_linear())
self.assertTrue(spc9.is_linear())
self.assertFalse(spc2.is_linear())
self.assertFalse(spc3.is_linear())
self.assertFalse(spc4.is_linear())
self.assertFalse(spc5.is_linear())
self.assertFalse(spc8.is_linear())
def test_charge_and_multiplicity(self):
"""Test determination of molecule charge and multiplicity"""
spc1 = ARCSpecies(label='spc1', mol=Molecule(SMILES=str('C[CH]C')), generate_thermo=False)
spc2 = ARCSpecies(label='spc2', mol=Molecule(SMILES=str('CCC')), generate_thermo=False)
spc3 = ARCSpecies(label='spc3', smiles=str('N[NH]'), generate_thermo=False)
spc4 = ARCSpecies(label='spc4', smiles=str('NNN'), generate_thermo=False)
adj1 = """multiplicity 2
1 O u1 p2 c0 {2,S}
2 H u0 p0 c0 {1,S}
"""
adj2 = """1 C u0 p0 c0 {2,S} {4,S} {5,S} {6,S}
2 N u0 p1 c0 {1,S} {3,S} {7,S}
3 O u0 p2 c0 {2,S} {8,S}
4 H u0 p0 c0 {1,S}
5 H u0 p0 c0 {1,S}
6 H u0 p0 c0 {1,S}
7 H u0 p0 c0 {2,S}
8 H u0 p0 c0 {3,S}
"""
spc5 = ARCSpecies(label='spc5', adjlist=str(adj1), generate_thermo=False)
spc6 = ARCSpecies(label='spc6', adjlist=str(adj2), generate_thermo=False)
xyz1 = """O 0.00000000 0.00000000 -0.10796235
H 0.00000000 0.00000000 0.86318839"""
xyz2 = """N -0.74678912 -0.11808620 0.00000000
C 0.70509190 0.01713703 0.00000000
H 1.11547042 -0.48545356 0.87928385
H 1.11547042 -0.48545356 -0.87928385
H 1.07725194 1.05216961 0.00000000
H -1.15564250 0.32084669 0.81500594
H -1.15564250 0.32084669 -0.81500594"""
spc7 = ARCSpecies(label='spc7', xyz=xyz1, generate_thermo=False)
spc8 = ARCSpecies(label='spc8', xyz=xyz2, generate_thermo=False)
self.assertEqual(spc1.charge, 0)
self.assertEqual(spc2.charge, 0)
self.assertEqual(spc3.charge, 0)
self.assertEqual(spc4.charge, 0)
self.assertEqual(spc5.charge, 0)
self.assertEqual(spc6.charge, 0)
self.assertEqual(spc7.charge, 0)
self.assertEqual(spc8.charge, 0)
self.assertEqual(spc1.multiplicity, 2)
self.assertEqual(spc2.multiplicity, 1)
self.assertEqual(spc3.multiplicity, 2)
self.assertEqual(spc4.multiplicity, 1)
self.assertEqual(spc5.multiplicity, 2)
self.assertEqual(spc6.multiplicity, 1)
self.assertEqual(spc7.multiplicity, 2)
self.assertEqual(spc8.multiplicity, 1)
def test_as_dict(self):
"""Test Species.as_dict()"""
spc_dict = self.spc3.as_dict()
expected_dict = {'optical_isomers': None,
'number_of_rotors': 0,
'neg_freqs_trshed': [],
'external_symmetry': None,
'multiplicity': 1,
'arkane_file': None,
'E0': None,
'mol': """1 C u0 p0 c0 {2,S} {3,S} {4,S} {5,S}
2 N u0 p1 c0 {1,S} {6,S} {7,S}
3 H u0 p0 c0 {1,S}
4 H u0 p0 c0 {1,S}
5 H u0 p0 c0 {1,S}
6 H u0 p0 c0 {2,S}
7 H u0 p0 c0 {2,S}
""",
'generate_thermo': True,
'label': 'methylamine',
'long_thermo_description': spc_dict['long_thermo_description'],
'charge': 0,
'is_ts': False,
'final_xyz': '',
't1': None,
'bond_corrections': {'C-H': 3, 'C-N': 1, 'H-N': 2},
'rotors_dict': {}}
self.assertEqual(spc_dict, expected_dict)
def test_from_dict(self):
"""Test Species.from_dict()"""
species_dict = self.spc2.as_dict()
spc = ARCSpecies(species_dict=species_dict)
self.assertEqual(spc.multiplicity, 2)
self.assertEqual(spc.charge, 0)
self.assertEqual(spc.label, 'OH')
self.assertEqual(spc.mol.toSMILES(), '[OH]')
self.assertFalse(spc.is_ts)
def test_determine_rotor_type(self):
"""Test that we correctly determine whether a rotor is FreeRotor or HinderedRotor"""
free_path = os.path.join(arc_path, 'arc', 'testing', 'rotor_scans', 'CH3C(O)O_FreeRotor.out')
hindered_path = os.path.join(arc_path, 'arc', 'testing', 'rotor_scans', 'H2O2.out')
self.assertEqual(determine_rotor_type(free_path), 'FreeRotor')
self.assertEqual(determine_rotor_type(hindered_path), 'HinderedRotor')
def test_rotor_symmetry(self):
"""Test that ARC automatically determines a correct rotor symmetry"""
path1 = os.path.join(arc_path, 'arc', 'testing', 'rotor_scans', 'OOC1CCOCO1.out') # symmetry = 1; min at -10 o
path2 = os.path.join(arc_path, 'arc', 'testing', 'rotor_scans', 'H2O2.out') # symmetry = 1
path3 = os.path.join(arc_path, 'arc', 'testing', 'rotor_scans', 'N2O3.out') # symmetry = 2
path4 = os.path.join(arc_path, 'arc', 'testing', 'rotor_scans', 'sBuOH.out') # symmetry = 3
path5 = os.path.join(arc_path, 'arc', 'testing', 'rotor_scans', 'CH3C(O)O_FreeRotor.out') # symmetry = 6
symmetry1, _ = determine_rotor_symmetry(rotor_path=path1, label='label', pivots=[3,4])
symmetry2, _ = determine_rotor_symmetry(rotor_path=path2, label='label', pivots=[3,4])
symmetry3, _ = determine_rotor_symmetry(rotor_path=path3, label='label', pivots=[3,4])
symmetry4, _ = determine_rotor_symmetry(rotor_path=path4, label='label', pivots=[3,4])
symmetry5, _ = determine_rotor_symmetry(rotor_path=path5, label='label', pivots=[3,4])
self.assertEqual(symmetry1, 1)
self.assertEqual(symmetry2, 1)
self.assertEqual(symmetry3, 2)
self.assertEqual(symmetry4, 3)
self.assertEqual(symmetry5, 6)
def test_xyz_from_file(self):
"""Test parsing xyz from a file and saving it in the .initial_xyz attribute"""
self.assertTrue(' N -2.36276900 2.14528400 -0.76917500' in self.spc7.initial_xyz)
def test_check_species_xyz(self):
"""Test the check_xyz() function"""
xyz = """
C -0.67567701 1.18507660 0.04672449
H -0.25592948 1.62415961 0.92757746
H -2.26870864 1.38030564 0.05865317
O -0.36671999 -0.21081064 0.01630374
H -0.73553821 -0.63718986 0.79332805
C -0.08400571 1.86907236 -1.19973252
H -0.50375517 1.42998100 -2.08057962
H -0.31518819 2.91354759 -1.17697025
H 0.97802159 1.73893214 -1.20769117
O -3.69788377 1.55609096 0.07050345
O -4.28667752 0.37487691 0.04916102
H -4.01978712 -0.12970163 0.82103635
"""
expected_xyz1 = """ C -0.67567701 1.18507660 0.04672449
H -0.25592948 1.62415961 0.92757746
H -2.26870864 1.38030564 0.05865317
O -0.36671999 -0.21081064 0.01630374
H -0.73553821 -0.63718986 0.79332805
C -0.08400571 1.86907236 -1.19973252
H -0.50375517 1.42998100 -2.08057962
H -0.31518819 2.91354759 -1.17697025
H 0.97802159 1.73893214 -1.20769117
O -3.69788377 1.55609096 0.07050345
O -4.28667752 0.37487691 0.04916102
H -4.01978712 -0.12970163 0.82103635"""
new_xyz1 = check_species_xyz(xyz)
self.assertEqual(new_xyz1, expected_xyz1)
xyz_path = os.path.join(arc_path, 'arc', 'testing', 'xyz', 'CH3C(O)O.xyz')
expected_xyz2 = """O -0.53466300 -1.24850800 -0.02156300
O -0.79314200 1.04818800 0.18134200
C -0.02397300 0.01171700 -0.37827400
C 1.40511900 0.21728200 0.07675200
H -0.09294500 0.02877800 -1.47163200
H 2.04132100 -0.57108600 -0.32806800
H 1.45535600 0.19295200 1.16972300
H 1.77484100 1.18704300 -0.25986700
H -0.43701200 -1.34990600 0.92900600
H -1.69944700 0.93441600 -0.11271200"""
new_xyz2 = check_species_xyz(xyz_path)
self.assertEqual(new_xyz2, expected_xyz2)
def test_get_min_energy_conformer(self):
"""Test that the xyz with the minimum specified energy is returned from get_min_energy_conformer()"""
xyzs = ['xyz1', 'xyz2', 'xyz3']
energies = [-5, -30, -1.5]
min_xyz = get_min_energy_conformer(xyzs, energies)
self.assertEqual(min_xyz, 'xyz2')
def test_mol_from_xyz_atom_id_1(self):
"""Test that atom ids are saved properly when loading both xyz and smiles."""
mol = self.spc6.mol
mol_list = self.spc6.mol_list
self.assertEqual(len(mol_list), 1)
res = mol_list[0]
self.assertTrue(mol.atomIDValid())
self.assertTrue(res.atomIDValid())
self.assertTrue(mol.isIsomorphic(res))
self.assertTrue(mol.isIdentical(res))
def test_mol_from_xyz_atom_id_2(self):
"""Test that atom ids are saved properly when loading both xyz and smiles."""
mol = self.spc8.mol
mol_list = self.spc8.mol_list
self.assertEqual(len(mol_list), 2)
res1, res2 = mol_list
self.assertTrue(mol.atomIDValid())
self.assertTrue(res1.atomIDValid())
self.assertTrue(res2.atomIDValid())
self.assertTrue(mol.isIsomorphic(res1))
self.assertTrue(mol.isIdentical(res1))
# Check that atom ordering is consistent, ignoring specific oxygen ordering
mol_ids = [(a.element.symbol, a.id) if a.element.symbol != 'O' else (a.element.symbol,) for a in mol.atoms]
res1_ids = [(a.element.symbol, a.id) if a.element.symbol != 'O' else (a.element.symbol,) for a in res1.atoms]
res2_ids = [(a.element.symbol, a.id) if a.element.symbol != 'O' else (a.element.symbol,) for a in res2.atoms]
self.assertEqual(mol_ids, res1_ids)
self.assertEqual(mol_ids, res2_ids)
def test_preserving_multiplicity(self):
"""Test that multiplicity is being preserved, especially when it is guessed differently from xyz"""
multiplicity_list = [2, 2, 1, 1, 1, 1, 1, 2, 1]
for i, spc in enumerate([self.spc1, self.spc2, self.spc3, self.spc4, self.spc5, self.spc6, self.spc7,
self.spc8, self.spc9]):
self.assertEqual(spc.multiplicity, multiplicity_list[i])
self.assertEqual(spc.mol.multiplicity, multiplicity_list[i])
self.assertTrue(all([structure.multiplicity == spc.multiplicity for structure in spc.mol_list]))
def test_append_conformers(self):
"""Test that ARC correctly parses its own conformer files"""
ess_settings = {'gaussian': 'server1', 'molpro': 'server2', 'qchem': 'server1', 'ssh': False}
project_directory = os.path.join(arc_path, 'Projects', 'arc_project_for_testing_delete_after_usage4')
spc1 = ARCSpecies(label=str('vinoxy'), smiles=str('C=C[O]'))
rmgdb = make_rmg_database_object()
sched1 = Scheduler(project='project_test', ess_settings=ess_settings, species_list=[spc1],
composite_method='', conformer_level=default_levels_of_theory['conformer'],
opt_level=default_levels_of_theory['opt'], freq_level=default_levels_of_theory['freq'],
sp_level=default_levels_of_theory['sp'], scan_level=default_levels_of_theory['scan'],
ts_guess_level=default_levels_of_theory['ts_guesses'], rmgdatabase=rmgdb,
project_directory=project_directory, generate_conformers=True, testing=True,
orbitals_level=default_levels_of_theory['orbitals'])
xyzs = ["""O 1.09068700 0.26516800 -0.16706300
C 2.92204100 -1.18335700 -0.38884900
C 2.27655500 -0.00373900 0.08543500
H 2.36544800 -1.88781000 -0.99914600
H 3.96112000 -1.38854500 -0.14958800
H 2.87813500 0.68828400 0.70399400
""",
"""O 1.19396100 -0.06003700 0.03890100
C 3.18797000 0.77061300 -0.87352700
C 2.43591200 -0.04439300 0.02171600
H 4.27370000 0.76090200 -0.86286100
H 2.66641700 1.41155700 -1.57757300
H 3.00398000 -0.68336800 0.72359800
""",
"""O 1.35241100 -1.02956000 -0.24056200
C -0.72084300 0.01308200 0.09573000
C 0.69217700 0.01185100 -0.09044300
H -1.25803800 -0.93018100 0.10926800
H -1.26861200 0.94177100 0.22420100
H 1.20290400 0.99303700 -0.09819400
""",
"""O -1.40102900 -0.98575100 -0.11588500
C 0.72457000 -0.01076700 0.06448800
C -0.69494600 0.03450000 -0.06206300
H 1.22539000 -0.97248000 0.11741200
H 1.31277400 0.90087100 0.10878400
H -1.16675800 1.03362600 -0.11273700"""]
energies = [0, 5, 5, 5] # J/mol
sched1.save_conformers_file(label='vinoxy', xyzs=xyzs)
self.assertTrue(os.path.isfile(os.path.join(project_directory, 'output', 'Species', 'vinoxy', 'geometry',
'conformers_before_optimization.txt')))
sched1.save_conformers_file(label='vinoxy', xyzs=xyzs, energies=energies)
self.assertTrue(os.path.isfile(os.path.join(project_directory, 'output', 'Species', 'vinoxy', 'geometry',
'conformers_after_optimization.txt')))
spc2 = ARCSpecies(label=str('vinoxy'), smiles=str('C=C[O]'), conformers_path=os.path.join(project_directory,
'output', 'Species', 'vinoxy', 'geometry', 'conformers_before_optimization.txt'))
spc3 = ARCSpecies(label=str('vinoxy'), smiles=str('C=C[O]'), conformers_path=os.path.join(project_directory,
'output', 'Species', 'vinoxy', 'geometry', 'conformers_after_optimization.txt'))
self.assertEqual(spc2.conformers[2], xyzs[2])
self.assertEqual(spc3.conformers[2], xyzs[2])
self.assertEqual(spc3.conformer_energies[2], energies[2])
def test_the_number_of_atoms_property(self):
"""Test that the number_of_atoms property functions correctly"""
self.assertEqual(self.spc1.number_of_atoms, 6)
self.assertEqual(self.spc2.number_of_atoms, 2)
self.assertEqual(self.spc3.number_of_atoms, 7)
self.assertEqual(self.spc4.number_of_atoms, 9)
self.assertEqual(self.spc5.number_of_atoms, 6)
self.assertEqual(self.spc6.number_of_atoms, 8)
self.assertEqual(self.spc7.number_of_atoms, 24)
self.assertEqual(self.spc8.number_of_atoms, 5)
self.assertEqual(self.spc9.number_of_atoms, 2)
xyz10 = """N 0.82269400 0.19834500 -0.33588000
C -0.57469800 -0.02442800 0.04618900
H -1.08412400 -0.56416500 -0.75831900
H -0.72300600 -0.58965300 0.98098100
H -1.07482500 0.94314300 0.15455500
H 1.31266200 -0.68161600 -0.46770200
H 1.32129900 0.71837500 0.38017700
"""
spc10 = ARCSpecies(label='spc10', xyz=xyz10)
self.assertEqual(spc10.number_of_atoms, 7)
@classmethod
def tearDownClass(cls):
"""
A function that is run ONCE after all unit tests in this class.
Delete all project directories created during these unit tests
"""
projects = ['arc_project_for_testing_delete_after_usage4']
for project in projects:
project_directory = os.path.join(arc_path, 'Projects', project)
shutil.rmtree(project_directory)
class TestTSGuess(unittest.TestCase):
"""
Contains unit tests for the TSGuess class
"""
@classmethod
def setUpClass(cls):
"""
A method that is run before all unit tests in this class.
"""
cls.maxDiff = None
spc1 = Species().fromSMILES(str('CON=O'))
spc1.label = str('CONO')
spc2 = Species().fromSMILES(str('C[N+](=O)[O-]'))
spc2.label = str('CNO2')
rmg_reaction = Reaction(reactants=[spc1], products=[spc2])
cls.tsg1 = TSGuess(rmg_reaction=rmg_reaction, method='AutoTST', family='H_Abstraction')
xyz = """N 0.9177905887 0.5194617797 0.0000000000
H 1.8140204898 1.0381941417 0.0000000000
H -0.4763167868 0.7509348722 0.0000000000
N 0.9992350860 -0.7048575683 0.0000000000
N -1.4430010939 0.0274543367 0.0000000000
H -0.6371484821 -0.7497769134 0.0000000000
H -2.0093636431 0.0331190314 -0.8327683174
H -2.0093636431 0.0331190314 0.8327683174"""
cls.tsg2 = TSGuess(xyz=xyz)
def test_as_dict(self):
"""Test TSGuess.as_dict()"""
tsg_dict = self.tsg1.as_dict()
expected_dict = {'method': u'autotst',
'energy': None,
'family': 'H_Abstraction',
'index': None,
'rmg_reaction': u'CON=O <=> [O-][N+](=O)C',
'success': None,
't0': None,
'execution_time': None}
self.assertEqual(tsg_dict, expected_dict)
def test_from_dict(self):
"""Test TSGuess.from_dict()
Also tests that the round trip to and from a dictionary ended in an RMG Reaction object"""
ts_dict = self.tsg1.as_dict()
tsg = TSGuess(ts_dict=ts_dict)
self.assertEqual(tsg.method, 'autotst')
self.assertTrue(isinstance(tsg.rmg_reaction, Reaction))
################################################################################
if __name__ == '__main__':
unittest.main(testRunner=unittest.TextTestRunner(verbosity=2))
| 51.855639
| 119
| 0.570699
|
4a03ac7ddde42681aedea307d4cd1b814c863003
| 236
|
py
|
Python
|
Exercicios/ex029.py
|
mauroalbuquerque/Python-CursoEmVideo
|
5a9fcbd878af49d7b8aa3f7d904b1f22e643edd8
|
[
"MIT"
] | null | null | null |
Exercicios/ex029.py
|
mauroalbuquerque/Python-CursoEmVideo
|
5a9fcbd878af49d7b8aa3f7d904b1f22e643edd8
|
[
"MIT"
] | null | null | null |
Exercicios/ex029.py
|
mauroalbuquerque/Python-CursoEmVideo
|
5a9fcbd878af49d7b8aa3f7d904b1f22e643edd8
|
[
"MIT"
] | null | null | null |
#Radar de Trânsito
velocidade = float(input('Digite a velocidade do veiculo: '))
if velocidade > 80:
multa = (velocidade - 80) * 7
print(velocidade, 'Multa: R$ {}'.format(multa))
else:
print('Velocidade dentro do limite!')
| 26.222222
| 61
| 0.669492
|
4a03acc5d36129a3f64d2a850e3cf53f8b3182f3
| 1,228
|
py
|
Python
|
AppTaskQueue/test/unit/test_taskqueue_server.py
|
Honcharov12/appscale
|
be1cf90fcd24f1a5a88848f7eb73331b6e4e66d9
|
[
"Apache-2.0"
] | null | null | null |
AppTaskQueue/test/unit/test_taskqueue_server.py
|
Honcharov12/appscale
|
be1cf90fcd24f1a5a88848f7eb73331b6e4e66d9
|
[
"Apache-2.0"
] | 1
|
2019-10-15T15:57:53.000Z
|
2019-10-15T15:57:53.000Z
|
AppTaskQueue/test/unit/test_taskqueue_server.py
|
Honcharov12/appscale
|
be1cf90fcd24f1a5a88848f7eb73331b6e4e66d9
|
[
"Apache-2.0"
] | 1
|
2019-08-27T05:19:48.000Z
|
2019-08-27T05:19:48.000Z
|
#!/usr/bin/env python
import unittest
from mock import MagicMock, patch
from appscale.common import file_io
from appscale.taskqueue import distributed_tq
class TestDistributedTaskQueue(unittest.TestCase):
"""
A set of test cases for the distributed taskqueue module
"""
def setUp(self):
self._read_patcher = patch.object(
file_io, 'read', return_value='192.168.0.1')
self.read_mock = self._read_patcher.start()
def tearDown(self):
self._read_patcher.stop()
@staticmethod
def test_distributed_tq_initialization():
db_access = MagicMock()
zk_client = MagicMock()
distributed_tq.DistributedTaskQueue(db_access, zk_client)
# TODO:
# def test_fetch_queue_stats(self):
# def test_delete(self):
# def test_purge_queue(self):
# def test_query_and_own_tasks(self):
# def test_bulk_add(self):
# def test_modify_task_lease(self):
# def test_update_queue(self):
# def test_fetch_queue(self):
# def test_query_tasks(self):
# def test_fetch_task(self):
# def test_force_run(self):
# def test_delete_queue(self):
# def test_pause_queue(self):
# def test_delete_group(self):
# def test_update_storage_limit(self):
if __name__ == "__main__":
unittest.main()
| 25.583333
| 61
| 0.732899
|
4a03ae89961de0ac9090db7b314318529674159f
| 9,437
|
py
|
Python
|
airflow/providers/qubole/hooks/qubole.py
|
Adil-Ibragimov/airflow
|
e565368f2e988a06b0398b77e66859c138905ae7
|
[
"Apache-2.0"
] | 1
|
2020-08-05T18:30:40.000Z
|
2020-08-05T18:30:40.000Z
|
airflow/providers/qubole/hooks/qubole.py
|
Adil-Ibragimov/airflow
|
e565368f2e988a06b0398b77e66859c138905ae7
|
[
"Apache-2.0"
] | 20
|
2021-01-23T12:33:08.000Z
|
2021-12-07T22:30:37.000Z
|
airflow/providers/qubole/hooks/qubole.py
|
Adil-Ibragimov/airflow
|
e565368f2e988a06b0398b77e66859c138905ae7
|
[
"Apache-2.0"
] | 1
|
2020-09-10T09:51:46.000Z
|
2020-09-10T09:51:46.000Z
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
"""Qubole hook"""
import datetime
import logging
import os
import pathlib
import time
from qds_sdk.commands import (
Command,
DbExportCommand,
DbImportCommand,
DbTapQueryCommand,
HadoopCommand,
HiveCommand,
PigCommand,
PrestoCommand,
ShellCommand,
SparkCommand,
SqlCommand,
)
from qds_sdk.qubole import Qubole
from airflow.configuration import conf
from airflow.exceptions import AirflowException
from airflow.hooks.base_hook import BaseHook
from airflow.utils.state import State
log = logging.getLogger(__name__)
COMMAND_CLASSES = {
"hivecmd": HiveCommand,
"prestocmd": PrestoCommand,
"hadoopcmd": HadoopCommand,
"shellcmd": ShellCommand,
"pigcmd": PigCommand,
"sparkcmd": SparkCommand,
"dbtapquerycmd": DbTapQueryCommand,
"dbexportcmd": DbExportCommand,
"dbimportcmd": DbImportCommand,
"sqlcmd": SqlCommand,
}
POSITIONAL_ARGS = {'hadoopcmd': ['sub_command'], 'shellcmd': ['parameters'], 'pigcmd': ['parameters']}
def flatten_list(list_of_lists):
"""Flatten the list"""
return [element for array in list_of_lists for element in array]
def filter_options(options):
"""Remove options from the list"""
options_to_remove = ["help", "print-logs-live", "print-logs"]
return [option for option in options if option not in options_to_remove]
def get_options_list(command_class):
"""Get options list"""
options_list = [option.get_opt_string().strip("--") for option in command_class.optparser.option_list]
return filter_options(options_list)
def build_command_args():
"""Build Command argument from command and options"""
command_args, hyphen_args = {}, set()
for cmd in COMMAND_CLASSES:
# get all available options from the class
opts_list = get_options_list(COMMAND_CLASSES[cmd])
# append positional args if any for the command
if cmd in POSITIONAL_ARGS:
opts_list += POSITIONAL_ARGS[cmd]
# get args with a hyphen and replace them with underscore
for index, opt in enumerate(opts_list):
if "-" in opt:
opts_list[index] = opt.replace("-", "_")
hyphen_args.add(opts_list[index])
command_args[cmd] = opts_list
return command_args, list(hyphen_args)
COMMAND_ARGS, HYPHEN_ARGS = build_command_args()
class QuboleHook(BaseHook):
"""Hook for Qubole communication"""
def __init__(self, *args, **kwargs): # pylint: disable=unused-argument
super().__init__()
conn = self.get_connection(kwargs['qubole_conn_id'])
Qubole.configure(api_token=conn.password, api_url=conn.host)
self.task_id = kwargs['task_id']
self.dag_id = kwargs['dag'].dag_id
self.kwargs = kwargs
self.cls = COMMAND_CLASSES[self.kwargs['command_type']]
self.cmd = None
self.task_instance = None
@staticmethod
def handle_failure_retry(context):
"""Handle retries in case of failures"""
ti = context['ti']
cmd_id = ti.xcom_pull(key='qbol_cmd_id', task_ids=ti.task_id)
if cmd_id is not None:
cmd = Command.find(cmd_id)
if cmd is not None:
if cmd.status == 'done':
log.info(
'Command ID: %s has been succeeded, hence marking this ' 'TI as Success.', cmd_id
)
ti.state = State.SUCCESS
elif cmd.status == 'running':
log.info('Cancelling the Qubole Command Id: %s', cmd_id)
cmd.cancel()
def execute(self, context):
"""Execute call"""
args = self.cls.parse(self.create_cmd_args(context))
self.cmd = self.cls.create(**args)
self.task_instance = context['task_instance']
context['task_instance'].xcom_push(key='qbol_cmd_id', value=self.cmd.id)
self.log.info("Qubole command created with Id: %s and Status: %s", self.cmd.id, self.cmd.status)
while not Command.is_done(self.cmd.status):
time.sleep(Qubole.poll_interval)
self.cmd = self.cls.find(self.cmd.id)
self.log.info("Command Id: %s and Status: %s", self.cmd.id, self.cmd.status)
if 'fetch_logs' in self.kwargs and self.kwargs['fetch_logs'] is True:
self.log.info("Logs for Command Id: %s \n%s", self.cmd.id, self.cmd.get_log())
if self.cmd.status != 'done':
raise AirflowException(
'Command Id: {0} failed with Status: {1}'.format(self.cmd.id, self.cmd.status)
)
def kill(self, ti):
"""
Kill (cancel) a Qubole command
:param ti: Task Instance of the dag, used to determine the Quboles command id
:return: response from Qubole
"""
if self.cmd is None:
if not ti and not self.task_instance:
raise Exception("Unable to cancel Qubole Command, context is unavailable!")
elif not ti:
ti = self.task_instance
cmd_id = ti.xcom_pull(key="qbol_cmd_id", task_ids=ti.task_id)
self.cmd = self.cls.find(cmd_id)
if self.cls and self.cmd:
self.log.info('Sending KILL signal to Qubole Command Id: %s', self.cmd.id)
self.cmd.cancel()
def get_results(self, ti=None, fp=None, inline=True, delim=None, fetch=True):
"""
Get results (or just s3 locations) of a command from Qubole and save into a file
:param ti: Task Instance of the dag, used to determine the Quboles command id
:param fp: Optional file pointer, will create one and return if None passed
:param inline: True to download actual results, False to get s3 locations only
:param delim: Replaces the CTL-A chars with the given delim, defaults to ','
:param fetch: when inline is True, get results directly from s3 (if large)
:return: file location containing actual results or s3 locations of results
"""
if fp is None:
iso = datetime.datetime.utcnow().isoformat()
logpath = os.path.expanduser(conf.get('logging', 'BASE_LOG_FOLDER'))
resultpath = logpath + '/' + self.dag_id + '/' + self.task_id + '/results'
pathlib.Path(resultpath).mkdir(parents=True, exist_ok=True)
fp = open(resultpath + '/' + iso, 'wb')
if self.cmd is None:
cmd_id = ti.xcom_pull(key="qbol_cmd_id", task_ids=self.task_id)
self.cmd = self.cls.find(cmd_id)
self.cmd.get_results(fp, inline, delim, fetch)
fp.flush()
fp.close()
return fp.name
def get_log(self, ti):
"""
Get Logs of a command from Qubole
:param ti: Task Instance of the dag, used to determine the Quboles command id
:return: command log as text
"""
if self.cmd is None:
cmd_id = ti.xcom_pull(key="qbol_cmd_id", task_ids=self.task_id)
Command.get_log_id(cmd_id)
def get_jobs_id(self, ti):
"""
Get jobs associated with a Qubole commands
:param ti: Task Instance of the dag, used to determine the Quboles command id
:return: Job information associated with command
"""
if self.cmd is None:
cmd_id = ti.xcom_pull(key="qbol_cmd_id", task_ids=self.task_id)
Command.get_jobs_id(cmd_id)
def create_cmd_args(self, context):
"""Creates command arguments"""
args = []
cmd_type = self.kwargs['command_type']
inplace_args = None
tags = {self.dag_id, self.task_id, context['run_id']}
positional_args_list = flatten_list(POSITIONAL_ARGS.values())
for key, value in self.kwargs.items():
if key in COMMAND_ARGS[cmd_type]:
if key in HYPHEN_ARGS:
args.append("--{0}={1}".format(key.replace('_', '-'), value))
elif key in positional_args_list:
inplace_args = value
elif key == 'tags':
self._add_tags(tags, value)
else:
args.append("--{0}={1}".format(key, value))
if key == 'notify' and value is True:
args.append("--notify")
args.append("--tags={0}".format(','.join(filter(None, tags))))
if inplace_args is not None:
args += inplace_args.split(' ')
return args
@staticmethod
def _add_tags(tags, value):
if isinstance(value, str):
tags.add(value)
elif isinstance(value, (list, tuple)):
tags.update(value)
| 36.157088
| 106
| 0.628484
|
4a03af238ce56ec75363907d46056d18b56d24eb
| 9,259
|
py
|
Python
|
src/onelogin/saml2/idp_metadata_parser.py
|
colinjeanne/python3-saml
|
c45c0f8ae029254642db036cf06058ec9e302098
|
[
"MIT"
] | null | null | null |
src/onelogin/saml2/idp_metadata_parser.py
|
colinjeanne/python3-saml
|
c45c0f8ae029254642db036cf06058ec9e302098
|
[
"MIT"
] | null | null | null |
src/onelogin/saml2/idp_metadata_parser.py
|
colinjeanne/python3-saml
|
c45c0f8ae029254642db036cf06058ec9e302098
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
""" OneLogin_Saml2_IdPMetadataParser class
Copyright (c) 2014, OneLogin, Inc.
All rights reserved.
Metadata class of OneLogin's Python Toolkit.
"""
from copy import deepcopy
try:
import urllib.request as urllib2
except ImportError:
import urllib2
from onelogin.saml2.constants import OneLogin_Saml2_Constants
from onelogin.saml2.xml_utils import OneLogin_Saml2_XML
from onelogin.saml2.utils import OneLogin_Saml2_Utils
class OneLogin_Saml2_IdPMetadataParser(object):
"""
A class that contain methods related to obtaining and parsing metadata from IdP
"""
@staticmethod
def get_metadata(url):
"""
Gets the metadata XML from the provided URL
:param url: Url where the XML of the Identity Provider Metadata is published.
:type url: string
:returns: metadata XML
:rtype: string
"""
valid = False
response = urllib2.urlopen(url)
xml = response.read()
if xml:
try:
dom = OneLogin_Saml2_XML.to_etree(xml)
idp_descriptor_nodes = OneLogin_Saml2_XML.query(dom, '//md:IDPSSODescriptor')
if idp_descriptor_nodes:
valid = True
except:
pass
if not valid:
raise Exception('Not valid IdP XML found from URL: %s' % (url))
return xml
@staticmethod
def parse_remote(url, **kwargs):
"""
Gets the metadata XML from the provided URL and parse it, returning a dict with extracted data
:param url: Url where the XML of the Identity Provider Metadata is published.
:type url: string
:returns: settings dict with extracted data
:rtype: dict
"""
idp_metadata = OneLogin_Saml2_IdPMetadataParser.get_metadata(url)
return OneLogin_Saml2_IdPMetadataParser.parse(idp_metadata, **kwargs)
@staticmethod
def parse(
idp_metadata,
required_sso_binding=OneLogin_Saml2_Constants.BINDING_HTTP_REDIRECT,
required_slo_binding=OneLogin_Saml2_Constants.BINDING_HTTP_REDIRECT,
index=0):
"""
Parses the Identity Provider metadata and return a dict with extracted data.
If there are multiple <IDPSSODescriptor> tags, parse only the first.
Parses only those SSO endpoints with the same binding as given by
the `required_sso_binding` parameter.
Parses only those SLO endpoints with the same binding as given by
the `required_slo_binding` parameter.
If the metadata specifies multiple SSO endpoints with the required
binding, extract only the first (the same holds true for SLO
endpoints).
:param idp_metadata: XML of the Identity Provider Metadata.
:type idp_metadata: string
:param required_sso_binding: Parse only POST or REDIRECT SSO endpoints.
:type required_sso_binding: one of OneLogin_Saml2_Constants.BINDING_HTTP_REDIRECT
or OneLogin_Saml2_Constants.BINDING_HTTP_POST
:param required_slo_binding: Parse only POST or REDIRECT SLO endpoints.
:type required_slo_binding: one of OneLogin_Saml2_Constants.BINDING_HTTP_REDIRECT
or OneLogin_Saml2_Constants.BINDING_HTTP_POST
:param index: If the metadata contains more than 1 certificate, use index to get the right certificate.
:type index: number
:returns: settings dict with extracted data
:rtype: dict
"""
data = {}
dom = OneLogin_Saml2_XML.to_etree(idp_metadata)
entity_descriptor_nodes = OneLogin_Saml2_XML.query(dom, '//md:EntityDescriptor')
idp_entity_id = want_authn_requests_signed = idp_name_id_format = idp_sso_url = idp_slo_url = idp_x509_cert = None
if len(entity_descriptor_nodes) > 0:
for entity_descriptor_node in entity_descriptor_nodes:
idp_descriptor_nodes = OneLogin_Saml2_XML.query(entity_descriptor_node, './md:IDPSSODescriptor')
if len(idp_descriptor_nodes) > 0:
idp_descriptor_node = idp_descriptor_nodes[0]
idp_entity_id = entity_descriptor_node.get('entityID', None)
want_authn_requests_signed = entity_descriptor_node.get('WantAuthnRequestsSigned', None)
name_id_format_nodes = OneLogin_Saml2_XML.query(idp_descriptor_node, './md:NameIDFormat')
if len(name_id_format_nodes) > 0:
idp_name_id_format = name_id_format_nodes[0].text
sso_nodes = OneLogin_Saml2_XML.query(
idp_descriptor_node,
"./md:SingleSignOnService[@Binding='%s']" % required_sso_binding
)
if len(sso_nodes) > 0:
idp_sso_url = sso_nodes[0].get('Location', None)
slo_nodes = OneLogin_Saml2_XML.query(
idp_descriptor_node,
"./md:SingleLogoutService[@Binding='%s']" % required_slo_binding
)
if len(slo_nodes) > 0:
idp_slo_url = slo_nodes[0].get('Location', None)
# Attempt to extract the cert/public key to be used for
# verifying signatures (as opposed to extracing a key to be
# used for encryption), by specifying `use=signing` in the
# XPath expression. If that does not yield a cert, retry
# using a more relaxed XPath expression (the `use` attribute
# is optional according to the saml-metadata-2.0-os spec).
cert_nodes = OneLogin_Saml2_XML.query(
idp_descriptor_node,
"./md:KeyDescriptor[@use='signing']/ds:KeyInfo/ds:X509Data/ds:X509Certificate"
)
if not cert_nodes:
cert_nodes = OneLogin_Saml2_XML.query(
idp_descriptor_node,
"./md:KeyDescriptor/ds:KeyInfo/ds:X509Data/ds:X509Certificate"
)
if len(cert_nodes) > 0:
idp_x509_cert = OneLogin_Saml2_Utils.format_cert(cert_nodes[index].text, False)
data['idp'] = {}
if idp_entity_id is not None:
data['idp']['entityId'] = idp_entity_id
if idp_sso_url is not None:
data['idp']['singleSignOnService'] = {}
data['idp']['singleSignOnService']['url'] = idp_sso_url
data['idp']['singleSignOnService']['binding'] = required_sso_binding
if idp_slo_url is not None:
data['idp']['singleLogoutService'] = {}
data['idp']['singleLogoutService']['url'] = idp_slo_url
data['idp']['singleLogoutService']['binding'] = required_slo_binding
if idp_x509_cert is not None:
data['idp']['x509cert'] = idp_x509_cert
if want_authn_requests_signed is not None:
data['security'] = {}
data['security']['authnRequestsSigned'] = want_authn_requests_signed
if idp_name_id_format:
data['sp'] = {}
data['sp']['NameIDFormat'] = idp_name_id_format
break
return data
@staticmethod
def merge_settings(settings, new_metadata_settings):
"""
Will update the settings with the provided new settings data extracted from the IdP metadata
:param settings: Current settings dict data
:type settings: string
:param new_metadata_settings: Settings to be merged (extracted from IdP metadata after parsing)
:type new_metadata_settings: string
:returns: merged settings
:rtype: dict
"""
for d in (settings, new_metadata_settings):
if not isinstance(d, dict):
raise TypeError('Both arguments must be dictionaries.')
# Guarantee to not modify original data (`settings.copy()` would not
# be sufficient, as it's just a shallow copy).
result_settings = deepcopy(settings)
# Merge `new_metadata_settings` into `result_settings`.
dict_deep_merge(result_settings, new_metadata_settings)
return result_settings
def dict_deep_merge(a, b, path=None):
"""Deep-merge dictionary `b` into dictionary `a`.
Kudos to http://stackoverflow.com/a/7205107/145400
"""
if path is None:
path = []
for key in b:
if key in a:
if isinstance(a[key], dict) and isinstance(b[key], dict):
dict_deep_merge(a[key], b[key], path + [str(key)])
elif a[key] == b[key]:
# Key conflict, but equal value.
pass
else:
# Key/value conflict. Prioritize b over a.
a[key] = b[key]
else:
a[key] = b[key]
return a
| 39.738197
| 122
| 0.598661
|
4a03b0b4b278059869a25f28237d155a190ad1bc
| 3,243
|
py
|
Python
|
servicecatalog_puppet/workflow/service_control_policies/do_terminate_service_control_policies_task.py
|
mtrampic/aws-service-catalog-puppet
|
faa6ebe15929dc0040b85e5fd3313161821daa36
|
[
"Apache-2.0"
] | 2
|
2019-04-12T23:28:46.000Z
|
2019-04-15T15:35:04.000Z
|
servicecatalog_puppet/workflow/service_control_policies/do_terminate_service_control_policies_task.py
|
mtrampic/aws-service-catalog-puppet
|
faa6ebe15929dc0040b85e5fd3313161821daa36
|
[
"Apache-2.0"
] | null | null | null |
servicecatalog_puppet/workflow/service_control_policies/do_terminate_service_control_policies_task.py
|
mtrampic/aws-service-catalog-puppet
|
faa6ebe15929dc0040b85e5fd3313161821daa36
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
import functools
import luigi
from servicecatalog_puppet import constants
from servicecatalog_puppet.workflow import dependency
from servicecatalog_puppet.workflow.service_control_policies import (
service_control_policies_base_task,
get_or_create_policy_task,
)
from servicecatalog_puppet.workflow.manifest import manifest_mixin
class DoTerminateServiceControlPoliciesTask(
service_control_policies_base_task.ServiceControlPoliciesBaseTask,
manifest_mixin.ManifestMixen,
dependency.DependenciesMixin,
):
service_control_policy_name = luigi.Parameter()
puppet_account_id = luigi.Parameter()
region = luigi.Parameter()
account_id = luigi.Parameter()
ou_name = luigi.Parameter()
content = luigi.DictParameter()
description = luigi.Parameter()
requested_priority = luigi.IntParameter()
def params_for_results_display(self):
return {
"puppet_account_id": self.puppet_account_id,
"service_control_policy_name": self.service_control_policy_name,
"region": self.region,
"account_id": self.account_id,
"ou_name": self.ou_name,
"cache_invalidator": self.cache_invalidator,
}
def requires(self):
return dict(
policy=get_or_create_policy_task.GetOrCreatePolicyTask(
puppet_account_id=self.puppet_account_id,
region=self.region,
policy_name=self.service_control_policy_name,
policy_description=self.description,
policy_content=self.content,
tags=self.manifest.get(constants.SERVICE_CONTROL_POLICIES)
.get(self.service_control_policy_name)
.get("tags", []),
)
)
def api_calls_used(self):
return [
f"organizations.detach_policy_{self.region}",
]
@functools.lru_cache(maxsize=32)
def target(self):
with self.organizations_policy_client() as orgs:
if self.account_id != "":
return self.account_id
else:
if str(self.ou_name).startswith("/"):
return orgs.convert_path_to_ou(self.ou_name)
else:
return self.ou_name
def has_policy_attached(self, orgs):
paginator = orgs.get_paginator("list_policies_for_target")
for page in paginator.paginate(
TargetId=self.target(), Filter="SERVICE_CONTROL_POLICY"
):
for policy in page.get("Policies", []):
if policy.get("Name") == self.service_control_policy_name:
return True
return False
def run(self):
with self.organizations_policy_client() as orgs:
self.info("Ensuring attachments for policies")
policy_id = self.load_from_input("policy").get("Id")
if self.has_policy_attached(orgs):
orgs.detach_policy(PolicyId=policy_id, TargetId=self.target())
self.write_output("terminated")
else:
self.write_output("skipped")
| 35.25
| 78
| 0.647549
|
4a03b42c9c7e751e7b4b6bff44cfb3957eb03cd1
| 1,218
|
py
|
Python
|
KvasirAPI/test/conftest.py
|
KvasirSecurity/kvasirapi-python
|
ec8c5818bd5913f3afd150f25eaec6e7cc732f4c
|
[
"BSD-3-Clause"
] | null | null | null |
KvasirAPI/test/conftest.py
|
KvasirSecurity/kvasirapi-python
|
ec8c5818bd5913f3afd150f25eaec6e7cc732f4c
|
[
"BSD-3-Clause"
] | 2
|
2017-07-11T23:18:36.000Z
|
2021-03-25T21:47:25.000Z
|
KvasirAPI/test/conftest.py
|
KvasirSecurity/kvasirapi-python
|
ec8c5818bd5913f3afd150f25eaec6e7cc732f4c
|
[
"BSD-3-Clause"
] | 1
|
2017-05-13T07:08:17.000Z
|
2017-05-13T07:08:17.000Z
|
# -*- coding: utf-8 -*-
##--------------------------------------#
## API for Kvasir
##
## (c) 2010-2014 Cisco Systems, Inc.
##
## Configuration pytest fixtures
##
## Author: Kurt Grutzmacher <kgrutzma@cisco.com>
##--------------------------------------#
import pytest
import KvasirAPI
@pytest.fixture(scope="module")
def configure_string():
"""KvasirAPI configuration using a string"""
test_config = '''customer:
id: 11-ACME-01
full-name: ACME Widgets, Inc.
short-name: ACME
possessive: ACME Widget, Inc's
short-capital: ACME
possessive-capital: ACME's
instances:
internal:
url: "http://username:password@localhost:8000/internal/"
name: Internal Network
test_type: internal
start: May 2, 2011
end: May 6, 2011
filter_type: assetgroup
filter_value: organization
external:
url: "http://username:password@localhost:8000/external/"
start: May 2, 2011
end: May 6, 2011
name: External Network
test_type: external
web2py: /opt/web2py/
api_type: jsonrpc
'''
return KvasirAPI.API(test_config)
@pytest.fixture(scope="module")
def configure_file():
"""KvasirAPI configuration from a yaml file"""
return KvasirAPI.API("test/test.yaml")
| 21.75
| 60
| 0.642857
|
4a03b4c61983484addfe6d3808434789e784b66a
| 121
|
py
|
Python
|
00-basics/00-function_return_value.py
|
ralexrivero/python_fundation
|
34a855db7380d3d91db6a8f02d97f287d038ef5f
|
[
"Apache-2.0"
] | 1
|
2021-09-19T04:09:48.000Z
|
2021-09-19T04:09:48.000Z
|
00-basics/00-function_return_value.py
|
ralexrivero/python_fundation
|
34a855db7380d3d91db6a8f02d97f287d038ef5f
|
[
"Apache-2.0"
] | null | null | null |
00-basics/00-function_return_value.py
|
ralexrivero/python_fundation
|
34a855db7380d3d91db6a8f02d97f287d038ef5f
|
[
"Apache-2.0"
] | null | null | null |
#
# function that returns a value
#
def cube(x):
return x * x * x
# call function and pass argument
print(cube(3))
| 12.1
| 33
| 0.652893
|
4a03b5321495d761b986fe89e401d248a0536ff1
| 7,001
|
py
|
Python
|
homeassistant/components/sensibo/coordinator.py
|
edenhaus/home-assistant
|
9d42a425fc6d88482161d93f78c5125a3f6059eb
|
[
"Apache-2.0"
] | null | null | null |
homeassistant/components/sensibo/coordinator.py
|
edenhaus/home-assistant
|
9d42a425fc6d88482161d93f78c5125a3f6059eb
|
[
"Apache-2.0"
] | null | null | null |
homeassistant/components/sensibo/coordinator.py
|
edenhaus/home-assistant
|
9d42a425fc6d88482161d93f78c5125a3f6059eb
|
[
"Apache-2.0"
] | null | null | null |
"""DataUpdateCoordinator for the Sensibo integration."""
from __future__ import annotations
from dataclasses import dataclass
from datetime import timedelta
from typing import Any
from pysensibo import SensiboClient
from pysensibo.exceptions import AuthenticationError, SensiboError
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_API_KEY
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import DEFAULT_SCAN_INTERVAL, DOMAIN, LOGGER, TIMEOUT
MAX_POSSIBLE_STEP = 1000
@dataclass
class MotionSensor:
"""Dataclass for motionsensors."""
id: str
alive: bool | None = None
fw_ver: str | None = None
fw_type: str | None = None
is_main_sensor: bool | None = None
battery_voltage: int | None = None
humidity: int | None = None
temperature: float | None = None
model: str | None = None
class SensiboDataUpdateCoordinator(DataUpdateCoordinator):
"""A Sensibo Data Update Coordinator."""
def __init__(self, hass: HomeAssistant, entry: ConfigEntry) -> None:
"""Initialize the Sensibo coordinator."""
self.client = SensiboClient(
entry.data[CONF_API_KEY],
session=async_get_clientsession(hass),
timeout=TIMEOUT,
)
super().__init__(
hass,
LOGGER,
name=DOMAIN,
update_interval=timedelta(seconds=DEFAULT_SCAN_INTERVAL),
)
async def _async_update_data(self) -> dict[str, dict[str, Any]]:
"""Fetch data from Sensibo."""
devices = []
try:
data = await self.client.async_get_devices()
for dev in data["result"]:
devices.append(dev)
except AuthenticationError as error:
raise ConfigEntryAuthFailed from error
except SensiboError as error:
raise UpdateFailed from error
device_data: dict[str, dict[str, Any]] = {}
for dev in devices:
unique_id = dev["id"]
mac = dev["macAddress"]
name = dev["room"]["name"]
temperature = dev["measurements"].get("temperature")
humidity = dev["measurements"].get("humidity")
ac_states = dev["acState"]
target_temperature = ac_states.get("targetTemperature")
hvac_mode = ac_states.get("mode")
running = ac_states.get("on")
fan_mode = ac_states.get("fanLevel")
swing_mode = ac_states.get("swing")
available = dev["connectionStatus"].get("isAlive", True)
capabilities = dev["remoteCapabilities"]
hvac_modes = list(capabilities["modes"])
if hvac_modes:
hvac_modes.append("off")
current_capabilities = capabilities["modes"][ac_states.get("mode")]
fan_modes = current_capabilities.get("fanLevels")
swing_modes = current_capabilities.get("swing")
temperature_unit_key = dev.get("temperatureUnit") or ac_states.get(
"temperatureUnit"
)
temperatures_list = (
current_capabilities["temperatures"]
.get(temperature_unit_key, {})
.get("values", [0, 1])
)
if temperatures_list:
diff = MAX_POSSIBLE_STEP
for i in range(len(temperatures_list) - 1):
if temperatures_list[i + 1] - temperatures_list[i] < diff:
diff = temperatures_list[i + 1] - temperatures_list[i]
temperature_step = diff
active_features = list(ac_states)
full_features = set()
for mode in capabilities["modes"]:
if "temperatures" in capabilities["modes"][mode]:
full_features.add("targetTemperature")
if "swing" in capabilities["modes"][mode]:
full_features.add("swing")
if "fanLevels" in capabilities["modes"][mode]:
full_features.add("fanLevel")
state = hvac_mode if hvac_mode else "off"
fw_ver = dev["firmwareVersion"]
fw_type = dev["firmwareType"]
model = dev["productModel"]
calibration_temp = dev["sensorsCalibration"].get("temperature")
calibration_hum = dev["sensorsCalibration"].get("humidity")
# Sky plus supports functionality to use motion sensor as sensor for temp and humidity
if main_sensor := dev["mainMeasurementsSensor"]:
measurements = main_sensor["measurements"]
temperature = measurements.get("temperature")
humidity = measurements.get("humidity")
motion_sensors = [
MotionSensor(
id=motionsensor["id"],
alive=motionsensor["connectionStatus"].get("isAlive"),
fw_ver=motionsensor.get("firmwareVersion"),
fw_type=motionsensor.get("firmwareType"),
is_main_sensor=motionsensor.get("isMainSensor"),
battery_voltage=motionsensor["measurements"].get("batteryVoltage"),
humidity=motionsensor["measurements"].get("humidity"),
temperature=motionsensor["measurements"].get("temperature"),
model=motionsensor.get("productModel"),
)
for motionsensor in dev["motionSensors"]
if dev["motionSensors"]
]
device_data[unique_id] = {
"id": unique_id,
"mac": mac,
"name": name,
"ac_states": ac_states,
"temp": temperature,
"humidity": humidity,
"target_temp": target_temperature,
"hvac_mode": hvac_mode,
"on": running,
"fan_mode": fan_mode,
"swing_mode": swing_mode,
"available": available,
"hvac_modes": hvac_modes,
"fan_modes": fan_modes,
"swing_modes": swing_modes,
"temp_unit": temperature_unit_key,
"temp_list": temperatures_list,
"temp_step": temperature_step,
"active_features": active_features,
"full_features": full_features,
"state": state,
"fw_ver": fw_ver,
"fw_type": fw_type,
"model": model,
"calibration_temp": calibration_temp,
"calibration_hum": calibration_hum,
"full_capabilities": capabilities,
"motion_sensors": motion_sensors,
}
return device_data
| 39.778409
| 98
| 0.581488
|
4a03b5762441dbc31c1d186f22fa004e2bc7aafd
| 2,165
|
py
|
Python
|
sampledb/scripts/create_action.py
|
FlorianRhiem/sampledb
|
3363adbe5f2771d1178a5b6d530be960ce41c560
|
[
"MIT"
] | null | null | null |
sampledb/scripts/create_action.py
|
FlorianRhiem/sampledb
|
3363adbe5f2771d1178a5b6d530be960ce41c560
|
[
"MIT"
] | null | null | null |
sampledb/scripts/create_action.py
|
FlorianRhiem/sampledb
|
3363adbe5f2771d1178a5b6d530be960ce41c560
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
Script for creating an action in SampleDB.
Usage: python -m sampledb create_action <instrument_id> <type: sample or measurement > <name> <description> <schema_file_name>
"""
import json
import sys
from .. import create_app
from ..logic.actions import create_action, ActionType
from ..logic.instruments import get_instrument
from ..logic.schemas import validate_schema
from ..logic.errors import InstrumentDoesNotExistError, ValidationError
def main(arguments):
if len(arguments) != 5:
print(__doc__)
exit(1)
instrument_id, action_type, name, description, schema_file_name = arguments
if instrument_id == 'None':
instrument_id = None
else:
try:
instrument_id = int(instrument_id)
except ValueError:
print("Error: instrument_id must be an integer or 'None'", file=sys.stderr)
exit(1)
if action_type == 'sample':
action_type = ActionType.SAMPLE_CREATION
elif action_type == 'measurement':
action_type = ActionType.MEASUREMENT
elif action_type == 'simulation':
action_type = ActionType.SIMULATION
else:
print('Error: action type must be "sample" or "measurement"', file=sys.stderr)
exit(1)
app = create_app()
with app.app_context():
if instrument_id is not None:
try:
get_instrument(instrument_id)
except InstrumentDoesNotExistError:
print('Error: no instrument with this id exists', file=sys.stderr)
exit(1)
with open(schema_file_name, 'r', encoding='utf-8') as schema_file:
schema = json.load(schema_file)
try:
validate_schema(schema)
except ValidationError as e:
print('Error: invalid schema: {}'.format(str(e)), file=sys.stderr)
exit(1)
action = create_action(
instrument_id=instrument_id,
action_type=action_type,
name=name,
description=description,
schema=schema
)
print("Success: the action has been created in SampleDB (#{})".format(action.id))
| 34.919355
| 126
| 0.638799
|
4a03b62c2aacdab302ef8ee5673422edeb662fa8
| 1,278
|
py
|
Python
|
test/test_ds.py
|
JalexChang/cross-media-attribution
|
09a94774798c0d05d9142fde056de72e69872acb
|
[
"BSD-2-Clause"
] | null | null | null |
test/test_ds.py
|
JalexChang/cross-media-attribution
|
09a94774798c0d05d9142fde056de72e69872acb
|
[
"BSD-2-Clause"
] | null | null | null |
test/test_ds.py
|
JalexChang/cross-media-attribution
|
09a94774798c0d05d9142fde056de72e69872acb
|
[
"BSD-2-Clause"
] | null | null | null |
import numpy
import unittest
from mta.ds.touch_row import TouchRow
from mta.ds.rating_row import RatingRow
class TestDs(unittest.TestCase):
def setUp(self):
self.ratin_rows = numpy.loadtxt('cust_ratings',delimiter=',')
self.touch_rows = numpy.loadtxt('cust_touchs',delimiter=',')
def test_rating_row(self):
ratings = self.ratin_rows
ratings = RatingRow(ratings,[max(ratings[:,0])+1,max(ratings[:,1])+1])
row_list = ratings.to_list()
matrix = ratings.to_matrix()
self.assertEqual( (len(row_list),len(row_list[0])) , (13,3) , 'to_list()')
self.assertEqual( (len(matrix),len(matrix[0])) , (5,5) , 'to_matrix()')
self.assertEqual( ratings.mean() , self.ratin_rows[:,2].mean() , 'mean()')
self.assertEqual( ratings.range(), 4 , 'range()')
def test_touch_row(self):
touchs = self.touch_rows
touchs = TouchRow(touchs,[max(touchs[:,0])+1,max(touchs[:,1])+1])
row_list = touchs.to_list()
matrix = touchs.to_matrix()
self.assertEqual( (len(row_list),len(row_list[0])) , (9,2) , 'to_list()')
self.assertEqual( (len(matrix),len(matrix[0])) , (5,3) , 'test to_matrix()')
if __name__ == '__main__' :
unittest.main()
| 36.514286
| 84
| 0.612676
|
4a03b641051e696f18a4c0b44bbad00dea329f17
| 128
|
py
|
Python
|
ConfigSpace/__version__.py
|
franchuterivera/ConfigSpace
|
26a55ff019859412a11bb68a2eb177e8ba65e00a
|
[
"BSD-3-Clause"
] | null | null | null |
ConfigSpace/__version__.py
|
franchuterivera/ConfigSpace
|
26a55ff019859412a11bb68a2eb177e8ba65e00a
|
[
"BSD-3-Clause"
] | null | null | null |
ConfigSpace/__version__.py
|
franchuterivera/ConfigSpace
|
26a55ff019859412a11bb68a2eb177e8ba65e00a
|
[
"BSD-3-Clause"
] | null | null | null |
"""Version information."""
# The following line *must* be the last in the module, exactly as formatted:
__version__ = "0.4.19"
| 25.6
| 76
| 0.710938
|
4a03b6d49b8e357e522278097b7b9f254f18e251
| 80
|
py
|
Python
|
missing_video_finder/exception.py
|
KylianCadet/missing-video-finder
|
d3defe710dc372d5132f2d9a2c4e304c7ecda1e5
|
[
"MIT"
] | 3
|
2021-04-28T13:46:13.000Z
|
2021-05-07T11:23:31.000Z
|
missing_video_finder/exception.py
|
KylianCadet/missing-video-finder
|
d3defe710dc372d5132f2d9a2c4e304c7ecda1e5
|
[
"MIT"
] | null | null | null |
missing_video_finder/exception.py
|
KylianCadet/missing-video-finder
|
d3defe710dc372d5132f2d9a2c4e304c7ecda1e5
|
[
"MIT"
] | null | null | null |
class NotAuthenticated(Exception):
pass
class APIError(Exception):
pass
| 16
| 34
| 0.75
|
4a03b8574e800d2390d8de3058f1cce1dbc4a6d8
| 17,594
|
py
|
Python
|
mriqc/viz/utils.py
|
erramuzpe/mriqc
|
03eb869b0966cf27fe85db88a970f8ab8640c9e9
|
[
"BSD-3-Clause"
] | 1
|
2019-08-17T21:20:48.000Z
|
2019-08-17T21:20:48.000Z
|
mriqc/viz/utils.py
|
erramuzpe/mriqc
|
03eb869b0966cf27fe85db88a970f8ab8640c9e9
|
[
"BSD-3-Clause"
] | null | null | null |
mriqc/viz/utils.py
|
erramuzpe/mriqc
|
03eb869b0966cf27fe85db88a970f8ab8640c9e9
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
#
# @Author: oesteban
# @Date: 2016-01-05 11:32:01
# @Email: code@oscaresteban.es
# @Last modified by: oesteban
""" Visualization utilities """
import math
import os.path as op
import numpy as np
import nibabel as nb
import matplotlib.pyplot as plt
from matplotlib.gridspec import GridSpec
from matplotlib.backends.backend_pdf import FigureCanvasPdf as FigureCanvas
import seaborn as sns
DEFAULT_DPI = 300
DINA4_LANDSCAPE = (11.69, 8.27)
DINA4_PORTRAIT = (8.27, 11.69)
def plot_slice(dslice, spacing=None, cmap='Greys_r', label=None,
ax=None, vmax=None, vmin=None, annotate=False):
from matplotlib.cm import get_cmap
if isinstance(cmap, (str, bytes)):
cmap = get_cmap(cmap)
est_vmin, est_vmax = _get_limits(dslice)
if not vmin:
vmin = est_vmin
if not vmax:
vmax = est_vmax
if ax is None:
ax = plt.gca()
if spacing is None:
spacing = [1.0, 1.0]
phys_sp = np.array(spacing) * dslice.shape
ax.imshow(np.swapaxes(dslice, 0, 1), vmin=vmin, vmax=vmax, cmap=cmap,
interpolation='nearest', origin='lower',
extent=[0, phys_sp[0], 0, phys_sp[1]])
ax.set_xticklabels([])
ax.set_yticklabels([])
ax.grid(False)
ax.axis('off')
bgcolor = cmap(min(vmin, 0.0))
fgcolor = cmap(vmax)
if annotate:
ax.text(.95, .95, 'R', color=fgcolor, transform=ax.transAxes,
horizontalalignment='center', verticalalignment='top',
size=18, bbox=dict(boxstyle="square,pad=0", ec=bgcolor, fc=bgcolor))
ax.text(.05, .95, 'L', color=fgcolor, transform=ax.transAxes,
horizontalalignment='center', verticalalignment='top',
size=18, bbox=dict(boxstyle="square,pad=0", ec=bgcolor, fc=bgcolor))
if label is not None:
ax.text(.98, .01, label, color=fgcolor, transform=ax.transAxes,
horizontalalignment='right', verticalalignment='bottom',
size=18, bbox=dict(boxstyle="square,pad=0", ec=bgcolor, fc=bgcolor))
return ax
def plot_slice_tern(dslice, prev=None, post=None,
spacing=None, cmap='Greys_r', label=None, ax=None,
vmax=None, vmin=None):
from matplotlib.cm import get_cmap
if isinstance(cmap, (str, bytes)):
cmap = get_cmap(cmap)
est_vmin, est_vmax = _get_limits(dslice)
if not vmin:
vmin = est_vmin
if not vmax:
vmax = est_vmax
if ax is None:
ax = plt.gca()
if spacing is None:
spacing = [1.0, 1.0]
else:
spacing = [spacing[1], spacing[0]]
phys_sp = np.array(spacing) * dslice.shape
if prev is None:
prev = np.ones_like(dslice)
if post is None:
post = np.ones_like(dslice)
combined = np.swapaxes(np.vstack((prev, dslice, post)), 0, 1)
ax.imshow(combined, vmin=vmin, vmax=vmax, cmap=cmap,
interpolation='nearest', origin='lower',
extent=[0, phys_sp[1] * 3, 0, phys_sp[0]])
ax.set_xticklabels([])
ax.set_yticklabels([])
ax.grid(False)
if label is not None:
ax.text(.5, .05, label,
transform=ax.transAxes,
horizontalalignment='center',
verticalalignment='top',
size=14,
bbox=dict(boxstyle="square,pad=0", ec='k', fc='k'),
color='w')
def plot_spikes(in_file, in_fft, spikes_list, cols=3,
labelfmt='t={0:.3f}s (z={1:d})',
out_file=None):
from mpl_toolkits.axes_grid1 import make_axes_locatable
nii = nb.as_closest_canonical(nb.load(in_file))
fft = nb.load(in_fft).get_data()
data = nii.get_data()
zooms = nii.header.get_zooms()[:2]
tstep = nii.header.get_zooms()[-1]
ntpoints = data.shape[-1]
if len(spikes_list) > cols * 7:
cols += 1
nspikes = len(spikes_list)
rows = 1
if nspikes > cols:
rows = math.ceil(nspikes / cols)
fig = plt.figure(figsize=(7 * cols, 5 * rows))
for i, (t, z) in enumerate(spikes_list):
prev = None
pvft = None
if t > 0:
prev = data[..., z, t - 1]
pvft = fft[..., z, t - 1]
post = None
psft = None
if t < (ntpoints - 1):
post = data[..., z, t + 1]
psft = fft[..., z, t + 1]
ax1 = fig.add_subplot(rows, cols, i + 1)
divider = make_axes_locatable(ax1)
ax2 = divider.new_vertical(size="100%", pad=0.1)
fig.add_axes(ax2)
plot_slice_tern(data[..., z, t], prev=prev, post=post, spacing=zooms,
ax=ax2,
label=labelfmt.format(t * tstep, z))
plot_slice_tern(fft[..., z, t], prev=pvft, post=psft, vmin=-5, vmax=5,
cmap=get_parula(), ax=ax1)
plt.tight_layout()
if out_file is None:
fname, ext = op.splitext(op.basename(in_file))
if ext == '.gz':
fname, _ = op.splitext(fname)
out_file = op.abspath('%s.svg' % fname)
fig.savefig(out_file, format='svg', dpi=300, bbox_inches='tight')
return out_file
def plot_mosaic(img, out_file=None, ncols=8, title=None, overlay_mask=None,
bbox_mask_file=None, only_plot_noise=False, annotate=True,
vmin=None, vmax=None, cmap='Greys_r', plot_sagittal=True,
fig=None, zmax=128):
if isinstance(img, (str, bytes)):
nii = nb.as_closest_canonical(nb.load(img))
img_data = nii.get_data()
zooms = nii.header.get_zooms()
else:
img_data = img
zooms = [1.0, 1.0, 1.0]
out_file = 'mosaic.svg'
# Remove extra dimensions
img_data = np.squeeze(img_data)
if img_data.shape[2] > zmax and bbox_mask_file is None:
lowthres = np.percentile(img_data, 5)
mask_file = np.ones_like(img_data)
mask_file[img_data <= lowthres] = 0
img_data = _bbox(img_data, mask_file)
if bbox_mask_file is not None:
bbox_data = nb.as_closest_canonical(
nb.load(bbox_mask_file)).get_data()
img_data = _bbox(img_data, bbox_data)
z_vals = np.array(list(range(0, img_data.shape[2])))
# Reduce the number of slices shown
if len(z_vals) > zmax:
rem = 15
# Crop inferior and posterior
if not bbox_mask_file:
# img_data = img_data[..., rem:-rem]
z_vals = z_vals[rem:-rem]
else:
# img_data = img_data[..., 2 * rem:]
z_vals = z_vals[2 * rem:]
while len(z_vals) > zmax:
# Discard one every two slices
# img_data = img_data[..., ::2]
z_vals = z_vals[::2]
n_images = len(z_vals)
nrows = math.ceil(n_images / ncols)
if plot_sagittal:
nrows += 1
if overlay_mask:
overlay_data = nb.as_closest_canonical(
nb.load(overlay_mask)).get_data()
# create figures
if fig is None:
fig = plt.figure(figsize=(22, nrows * 3))
est_vmin, est_vmax = _get_limits(img_data,
only_plot_noise=only_plot_noise)
if not vmin:
vmin = est_vmin
if not vmax:
vmax = est_vmax
naxis = 1
for z_val in z_vals:
ax = fig.add_subplot(nrows, ncols, naxis)
if overlay_mask:
ax.set_rasterized(True)
plot_slice(img_data[:, :, z_val], vmin=vmin, vmax=vmax,
cmap=cmap, ax=ax, spacing=zooms[:2],
label='%d' % z_val, annotate=annotate)
if overlay_mask:
from matplotlib import cm
msk_cmap = cm.Reds # @UndefinedVariable
msk_cmap._init()
alphas = np.linspace(0, 0.75, msk_cmap.N + 3)
msk_cmap._lut[:, -1] = alphas
plot_slice(overlay_data[:, :, z_val], vmin=0, vmax=1,
cmap=msk_cmap, ax=ax, spacing=zooms[:2])
naxis += 1
if plot_sagittal:
naxis = ncols * (nrows - 1) + 1
step = int(img_data.shape[0] / (ncols + 1))
start = step
stop = img_data.shape[0] - step
if step == 0:
step = 1
for x_val in list(range(start, stop, step))[:ncols]:
ax = fig.add_subplot(nrows, ncols, naxis)
plot_slice(img_data[x_val, ...], vmin=vmin, vmax=vmax,
cmap=cmap, ax=ax, label='%d' % x_val,
spacing=[zooms[0], zooms[2]])
naxis += 1
fig.subplots_adjust(
left=0.05, right=0.95, bottom=0.05, top=0.95, wspace=0.05,
hspace=0.05)
if title:
fig.suptitle(title, fontsize='10')
fig.subplots_adjust(wspace=0.002, hspace=0.002)
if out_file is None:
fname, ext = op.splitext(op.basename(img))
if ext == ".gz":
fname, _ = op.splitext(fname)
out_file = op.abspath(fname + '_mosaic.svg')
fig.savefig(out_file, format='svg', dpi=300, bbox_inches='tight')
return out_file
def plot_fd(fd_file, fd_radius, mean_fd_dist=None, figsize=DINA4_LANDSCAPE):
fd_power = _calc_fd(fd_file, fd_radius)
fig = plt.Figure(figsize=figsize)
FigureCanvas(fig)
if mean_fd_dist:
grid = GridSpec(2, 4)
else:
grid = GridSpec(1, 2, width_ratios=[3, 1])
grid.update(hspace=1.0, right=0.95, left=0.1, bottom=0.2)
ax = fig.add_subplot(grid[0, :-1])
ax.plot(fd_power)
ax.set_xlim((0, len(fd_power)))
ax.set_ylabel("Frame Displacement [mm]")
ax.set_xlabel("Frame number")
ylim = ax.get_ylim()
ax = fig.add_subplot(grid[0, -1])
sns.distplot(fd_power, vertical=True, ax=ax)
ax.set_ylim(ylim)
if mean_fd_dist:
ax = fig.add_subplot(grid[1, :])
sns.distplot(mean_fd_dist, ax=ax)
ax.set_xlabel("Mean Frame Displacement (over all subjects) [mm]")
mean_fd = fd_power.mean()
label = r'$\overline{{\text{{FD}}}}$ = {0:g}'.format(mean_fd)
plot_vline(mean_fd, label, ax=ax)
return fig
def plot_dist(
main_file, mask_file, xlabel, distribution=None, xlabel2=None,
figsize=DINA4_LANDSCAPE):
data = _get_values_inside_a_mask(main_file, mask_file)
fig = plt.Figure(figsize=figsize)
FigureCanvas(fig)
gsp = GridSpec(2, 1)
ax = fig.add_subplot(gsp[0, 0])
sns.distplot(data.astype(np.double), kde=False, bins=100, ax=ax)
ax.set_xlabel(xlabel)
ax = fig.add_subplot(gsp[1, 0])
sns.distplot(np.array(distribution).astype(np.double), ax=ax)
cur_val = np.median(data)
label = "{0!g}".format(cur_val)
plot_vline(cur_val, label, ax=ax)
ax.set_xlabel(xlabel2)
return fig
def plot_vline(cur_val, label, ax):
ax.axvline(cur_val)
ylim = ax.get_ylim()
vloc = (ylim[0] + ylim[1]) / 2.0
xlim = ax.get_xlim()
pad = (xlim[0] + xlim[1]) / 100.0
ax.text(cur_val - pad, vloc, label, color="blue", rotation=90,
verticalalignment='center', horizontalalignment='right')
def _calc_rows_columns(ratio, n_images):
rows = 2
for _ in range(100):
columns = math.floor(ratio * rows)
total = (rows - 1) * columns
if total > n_images:
rows = np.ceil(n_images / columns) + 1
break
rows += 1
return int(rows), int(columns)
def _calc_fd(fd_file, fd_radius):
from math import pi
lines = open(fd_file, 'r').readlines()
rows = [[float(x) for x in line.split()] for line in lines]
cols = np.array([list(col) for col in zip(*rows)])
translations = np.transpose(np.abs(np.diff(cols[0:3, :])))
rotations = np.transpose(np.abs(np.diff(cols[3:6, :])))
fd_power = np.sum(translations, axis=1) + \
(fd_radius * pi / 180) * np.sum(rotations, axis=1)
# FD is zero for the first time point
fd_power = np.insert(fd_power, 0, 0)
return fd_power
def _get_mean_fd_distribution(fd_files, fd_radius):
mean_fds = []
max_fds = []
for fd_file in fd_files:
fd_power = _calc_fd(fd_file, fd_radius)
mean_fds.append(fd_power.mean())
max_fds.append(fd_power.max())
return mean_fds, max_fds
def _get_values_inside_a_mask(main_file, mask_file):
main_nii = nb.load(main_file)
main_data = main_nii.get_data()
nan_mask = np.logical_not(np.isnan(main_data))
mask = nb.load(mask_file).get_data() > 0
data = main_data[np.logical_and(nan_mask, mask)]
return data
def plot_segmentation(anat_file, segmentation, out_file,
**kwargs):
from nilearn.plotting import plot_anat
vmax = kwargs.get('vmax')
vmin = kwargs.get('vmin')
if kwargs.get('saturate', False):
vmax = np.percentile(nb.load(anat_file).get_data().reshape(-1), 70)
if vmax is None and vmin is None:
vmin = np.percentile(nb.load(anat_file).get_data().reshape(-1), 10)
vmax = np.percentile(nb.load(anat_file).get_data().reshape(-1), 99)
disp = plot_anat(
anat_file,
display_mode=kwargs.get('display_mode', 'ortho'),
cut_coords=kwargs.get('cut_coords', 8),
title=kwargs.get('title'),
vmax=vmax, vmin=vmin)
disp.add_contours(
segmentation,
levels=kwargs.get('levels', [1]),
colors=kwargs.get('colors', 'r'))
disp.savefig(out_file)
disp.close()
disp = None
return out_file
def _get_limits(nifti_file, only_plot_noise=False):
if isinstance(nifti_file, str):
nii = nb.as_closest_canonical(nb.load(nifti_file))
data = nii.get_data()
else:
data = nifti_file
data_mask = np.logical_not(np.isnan(data))
if only_plot_noise:
data_mask = np.logical_and(data_mask, data != 0)
vmin = np.percentile(data[data_mask], 0)
vmax = np.percentile(data[data_mask], 61)
else:
vmin = np.percentile(data[data_mask], 0.5)
vmax = np.percentile(data[data_mask], 99.5)
return vmin, vmax
def _bbox(img_data, bbox_data):
B = np.argwhere(bbox_data)
(ystart, xstart, zstart), (ystop, xstop, zstop) = B.min(0), B.max(0) + 1
return img_data[ystart:ystop, xstart:xstop, zstart:zstop]
def get_parula():
from matplotlib.colors import LinearSegmentedColormap
cm_data = [
[0.2081, 0.1663, 0.5292],
[0.2116238095, 0.1897809524, 0.5776761905],
[0.212252381, 0.2137714286, 0.6269714286],
[0.2081, 0.2386, 0.6770857143],
[0.1959047619, 0.2644571429, 0.7279],
[0.1707285714, 0.2919380952, 0.779247619],
[0.1252714286, 0.3242428571, 0.8302714286],
[0.0591333333, 0.3598333333, 0.8683333333],
[0.0116952381, 0.3875095238, 0.8819571429],
[0.0059571429, 0.4086142857, 0.8828428571],
[0.0165142857, 0.4266, 0.8786333333],
[0.032852381, 0.4430428571, 0.8719571429],
[0.0498142857, 0.4585714286, 0.8640571429],
[0.0629333333, 0.4736904762, 0.8554380952],
[0.0722666667, 0.4886666667, 0.8467],
[0.0779428571, 0.5039857143, 0.8383714286],
[0.079347619, 0.5200238095, 0.8311809524],
[0.0749428571, 0.5375428571, 0.8262714286],
[0.0640571429, 0.5569857143, 0.8239571429],
[0.0487714286, 0.5772238095, 0.8228285714],
[0.0343428571, 0.5965809524, 0.819852381],
[0.0265, 0.6137, 0.8135],
[0.0238904762, 0.6286619048, 0.8037619048],
[0.0230904762, 0.6417857143, 0.7912666667],
[0.0227714286, 0.6534857143, 0.7767571429],
[0.0266619048, 0.6641952381, 0.7607190476],
[0.0383714286, 0.6742714286, 0.743552381],
[0.0589714286, 0.6837571429, 0.7253857143],
[0.0843, 0.6928333333, 0.7061666667],
[0.1132952381, 0.7015, 0.6858571429],
[0.1452714286, 0.7097571429, 0.6646285714],
[0.1801333333, 0.7176571429, 0.6424333333],
[0.2178285714, 0.7250428571, 0.6192619048],
[0.2586428571, 0.7317142857, 0.5954285714],
[0.3021714286, 0.7376047619, 0.5711857143],
[0.3481666667, 0.7424333333, 0.5472666667],
[0.3952571429, 0.7459, 0.5244428571],
[0.4420095238, 0.7480809524, 0.5033142857],
[0.4871238095, 0.7490619048, 0.4839761905],
[0.5300285714, 0.7491142857, 0.4661142857],
[0.5708571429, 0.7485190476, 0.4493904762],
[0.609852381, 0.7473142857, 0.4336857143],
[0.6473, 0.7456, 0.4188],
[0.6834190476, 0.7434761905, 0.4044333333],
[0.7184095238, 0.7411333333, 0.3904761905],
[0.7524857143, 0.7384, 0.3768142857],
[0.7858428571, 0.7355666667, 0.3632714286],
[0.8185047619, 0.7327333333, 0.3497904762],
[0.8506571429, 0.7299, 0.3360285714],
[0.8824333333, 0.7274333333, 0.3217],
[0.9139333333, 0.7257857143, 0.3062761905],
[0.9449571429, 0.7261142857, 0.2886428571],
[0.9738952381, 0.7313952381, 0.266647619],
[0.9937714286, 0.7454571429, 0.240347619],
[0.9990428571, 0.7653142857, 0.2164142857],
[0.9955333333, 0.7860571429, 0.196652381],
[0.988, 0.8066, 0.1793666667],
[0.9788571429, 0.8271428571, 0.1633142857],
[0.9697, 0.8481380952, 0.147452381],
[0.9625857143, 0.8705142857, 0.1309],
[0.9588714286, 0.8949, 0.1132428571],
[0.9598238095, 0.9218333333, 0.0948380952],
[0.9661, 0.9514428571, 0.0755333333],
[0.9763, 0.9831, 0.0538]]
return LinearSegmentedColormap.from_list('parula', cm_data)
| 32.105839
| 84
| 0.59742
|
4a03b871514beda83a37e8345adf4331a3f93073
| 1,032
|
py
|
Python
|
task2/gen_cap.py
|
Save404/captcha
|
70bf55285f2cf6102bac7995c9f782b640bd9f53
|
[
"MIT"
] | 3
|
2018-01-16T13:24:05.000Z
|
2018-06-02T07:39:32.000Z
|
task2/gen_cap.py
|
save404/captcha
|
70bf55285f2cf6102bac7995c9f782b640bd9f53
|
[
"MIT"
] | null | null | null |
task2/gen_cap.py
|
save404/captcha
|
70bf55285f2cf6102bac7995c9f782b640bd9f53
|
[
"MIT"
] | 1
|
2018-03-28T10:52:21.000Z
|
2018-03-28T10:52:21.000Z
|
#-*- coding: utf-8 -*-
import random
import string
import numpy as np
import matplotlib.pyplot as plt
from PIL import Image
from image import ImageCaptcha
chars = string.digits + string.ascii_lowercase + string.ascii_uppercase
#生成随机验证码文本
def random_captcha_text(char_set=chars, captcha_size=5):
captcha_text = []
for i in range(captcha_size):
c = random.choice(char_set)
captcha_text.append(c)
return ''.join(captcha_text)
#验证码数值化
def gen_captcha_text_and_image():
captcha_text = random_captcha_text()
captcha = ImageCaptcha().generate(captcha_text)
#ImageCaptcha().write(captcha_text, captcha_text + '.png')
captcha_image = Image.open(captcha)
captcha_image = np.array(captcha_image)
return captcha_text, captcha_image
if __name__ == '__main__':
while True:
text, image = gen_captcha_text_and_image()
print(text)
print(image.shape)
#原文本显示在左上角
f = plt.figure()
ax = f.add_subplot(111)
ax.text(0.1, 0.9, text, ha='center', va='center', transform=ax.transAxes)
plt.imshow(image)
plt.show()
| 21.957447
| 75
| 0.745155
|
4a03b9aed73aa4c385bd813a476915476ff90a3f
| 240
|
py
|
Python
|
HackyEaster/he2022/level8/ch35/payload-unpickle.py
|
tbrup/ctf-writeups
|
dfac11abb3051af657ed3384c3c389c14a40c10e
|
[
"MIT"
] | null | null | null |
HackyEaster/he2022/level8/ch35/payload-unpickle.py
|
tbrup/ctf-writeups
|
dfac11abb3051af657ed3384c3c389c14a40c10e
|
[
"MIT"
] | null | null | null |
HackyEaster/he2022/level8/ch35/payload-unpickle.py
|
tbrup/ctf-writeups
|
dfac11abb3051af657ed3384c3c389c14a40c10e
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
#
# Pickle deserialization RCE payload.
# To be invoked with command to execute at it's first parameter.
# Otherwise, the default one will be used.
#
import pickle
with open('rce.png', 'rb') as inF:
pickle.load(inF)
| 20
| 64
| 0.7125
|
4a03bb80aa92dac9b41b72f5f5979100473424fe
| 1,056
|
py
|
Python
|
qiskit/aqua/components/uncertainty_problems/__init__.py
|
pistoia/qiskit-aqua
|
c7900ffdabc1499145739bfab29a392709bee1a0
|
[
"Apache-2.0"
] | 1
|
2019-01-03T20:06:19.000Z
|
2019-01-03T20:06:19.000Z
|
qiskit_aqua/components/uncertainty_problems/__init__.py
|
jodyburksphd/qiskit-aqua
|
d1050e3362276894b0e3442717f0f2a774a177b0
|
[
"Apache-2.0"
] | null | null | null |
qiskit_aqua/components/uncertainty_problems/__init__.py
|
jodyburksphd/qiskit-aqua
|
d1050e3362276894b0e3442717f0f2a774a177b0
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright 2018 IBM.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
from .uncertainty_problem import UncertaintyProblem
from .european_call_delta import EuropeanCallDelta
from .european_call_expected_value import EuropeanCallExpectedValue
from .fixed_income_expected_value import FixedIncomeExpectedValue
__all__ = ['UncertaintyProblem',
'EuropeanCallDelta',
'EuropeanCallExpectedValue',
'FixedIncomeExpectedValue']
| 39.111111
| 79
| 0.710227
|
4a03bcb3bd8c6e28dbe929f292e23f8ce9517be0
| 3,085
|
py
|
Python
|
tests/conftest.py
|
leoil/nequip
|
83b888797025c94b9963a508bc213a7c98da5bcb
|
[
"MIT"
] | 1
|
2022-03-13T10:17:53.000Z
|
2022-03-13T10:17:53.000Z
|
tests/conftest.py
|
leoil/nequip
|
83b888797025c94b9963a508bc213a7c98da5bcb
|
[
"MIT"
] | null | null | null |
tests/conftest.py
|
leoil/nequip
|
83b888797025c94b9963a508bc213a7c98da5bcb
|
[
"MIT"
] | null | null | null |
from typing import List
import numpy as np
import pathlib
import pytest
import tempfile
from ase.atoms import Atoms
from ase.build import molecule
from ase.calculators.singlepoint import SinglePointCalculator
from ase.io import write
import torch
from torch_geometric.data import Batch
from nequip.utils.test import set_irreps_debug
from nequip.data import AtomicData, ASEDataset
# For good practice, we *should* do this:
# See https://docs.pytest.org/en/stable/fixture.html#using-fixtures-from-other-projects
# pytest_plugins = ['e3nn.util.test']
# But doing so exposes float_tolerance to doctests, which don't support parametrized, autouse fixtures.
# Importing directly somehow only brings in the fixture later, preventing the issue.
from e3nn.util.test import float_tolerance
# Suppress linter errors
float_tolerance = float_tolerance
# - Ampere and TF32 -
# Many of the tests for NequIP involve numerically checking
# algebraic properties— normalization, equivariance,
# continuity, etc.
# With the added numerical noise of TF32, some of those tests fail
# with the current (and usually generous) thresholds.
#
# Thus we go on the assumption that PyTorch + NVIDIA got everything
# right, that this setting DOES NOT AFFECT the model outputs except
# for increased numerical noise, and only test without it.
#
# TODO: consider running tests with and without
# TODO: check how much thresholds have to be changed to accomidate TF32
torch.backends.cuda.matmul.allow_tf32 = False
torch.backends.cudnn.allow_tf32 = False
@pytest.fixture(scope="session")
def BENCHMARK_ROOT():
return pathlib.Path(__file__).parent / "../benchmark_data/"
@pytest.fixture(scope="session")
def temp_data(float_tolerance):
with tempfile.TemporaryDirectory() as tmpdirname:
yield tmpdirname
@pytest.fixture(scope="session")
def CH3CHO(float_tolerance) -> AtomicData:
atoms = molecule("CH3CHO")
data = AtomicData.from_ase(atoms, r_max=2.0)
return atoms, data
@pytest.fixture(scope="session")
def molecules() -> List[Atoms]:
atoms_list = []
for i in range(8):
atoms = molecule("CH3CHO" if i % 2 == 0 else "H2")
atoms.rattle()
atoms.calc = SinglePointCalculator(
energy=np.random.random(),
forces=np.random.random((len(atoms), 3)),
stress=None,
magmoms=None,
atoms=atoms,
)
atoms_list.append(atoms)
return atoms_list
@pytest.fixture(scope="session")
def nequip_dataset(molecules, temp_data, float_tolerance):
with tempfile.NamedTemporaryFile(suffix=".xyz") as fp:
for atoms in molecules:
write(fp.name, atoms, format="extxyz", append=True)
a = ASEDataset(
file_name=fp.name,
root=temp_data,
extra_fixed_fields={"r_max": 3.0},
ase_args=dict(format="extxyz"),
)
yield a
@pytest.fixture(scope="session")
def atomic_batch(nequip_dataset):
return Batch.from_data_list([nequip_dataset.data[0], nequip_dataset.data[1]])
# Use debug mode
set_irreps_debug(True)
| 30.544554
| 103
| 0.719611
|
4a03bd498577b45bb9f1da681116cbf102a4510e
| 836
|
py
|
Python
|
tcex/threat_intelligence/mappings/group/group_types/email.py
|
kdeltared/tcex
|
818c0d09256764f871e42d9ca5916f92d941d882
|
[
"Apache-2.0"
] | null | null | null |
tcex/threat_intelligence/mappings/group/group_types/email.py
|
kdeltared/tcex
|
818c0d09256764f871e42d9ca5916f92d941d882
|
[
"Apache-2.0"
] | null | null | null |
tcex/threat_intelligence/mappings/group/group_types/email.py
|
kdeltared/tcex
|
818c0d09256764f871e42d9ca5916f92d941d882
|
[
"Apache-2.0"
] | null | null | null |
"""ThreatConnect TI Email"""
from ..group import Group
class Email(Group):
"""Unique API calls for Email API Endpoints
Args:
tcex (TcEx): An instantiated instance of TcEx object.
body (str): The body for this Email.
from_addr (str, kwargs): The **from** address for this Email.
header (str): The header for this Email.
name (str, kwargs): [Required for Create] The name for this Group.
owner (str, kwargs): The name for this Group. Default to default Org when not provided
subject (str): The subject for this Email.
to (str, kwargs): The **to** address for this Email.
"""
def __init__(self, tcex, **kwargs):
"""Initialize Class properties."""
super().__init__(tcex, sub_type='Email', api_entity='email', api_branch='emails', **kwargs)
| 38
| 99
| 0.639952
|
4a03bd4dad851d2371c9be55bde0f89ae453adf6
| 2,683
|
py
|
Python
|
show-versions.py
|
mqelibari/Document_searcher
|
1d4f0800471a76e7fb4b0f1d17e84b3df8e8d70a
|
[
"MIT"
] | null | null | null |
show-versions.py
|
mqelibari/Document_searcher
|
1d4f0800471a76e7fb4b0f1d17e84b3df8e8d70a
|
[
"MIT"
] | null | null | null |
show-versions.py
|
mqelibari/Document_searcher
|
1d4f0800471a76e7fb4b0f1d17e84b3df8e8d70a
|
[
"MIT"
] | null | null | null |
#!/usr/local/bin/python3
import sys
from pathlib import Path
import glob
import os
global ver
def change_working_dir():
home = str(Path.home())
home += "/commands/"
os.chdir(home)
return home
def get_py_files(path):
files = [f for f in glob.glob(pathname=(path + "*.py"))]
for idx, file in enumerate(files):
filename = file.rsplit("/", 1)[1]
files[idx] = filename
return files
def get_func_return_value(file):
vn = os.popen(f"python3 -c 'from {file[:-3]} import version; import sys; sys.stdout.write(version())'").read()
return vn
def get_var_value(line, file, var_and_val):
split_at_eq = line.split("=")
for idx, i in enumerate(split_at_eq):
split_at_eq[idx] = split_at_eq[idx].replace(" ", "").replace("\n", "").split(",")
for idx, i in enumerate(split_at_eq):
if "version" in i:
var_and_val.append([file, split_at_eq[1][idx]])
def search_for_var_or_func_in_file(files):
var_and_val = []
for file in files:
with open(file, "r") as f:
a = f.readlines()
definition = False
for line in a:
if definition and "version(" in line:
definition = False
vn = get_func_return_value(file)
var_and_val.append([file, vn])
if "version =" in line or "version," in line or " version" in line or "version=" in line and "version():" not in line:
get_var_value(line, file, var_and_val)
if "def version" in line:
definition = True
return var_and_val
def get_var_val(files, version_list):
for file in files:
if "version" in file.__dict__.keys() and isinstance(file.__dict__["version"], str):
version_list.append([file, file.__dict__["version"]])
def get_version(files):
ver_func = []
for file in files:
try:
exec(f"import {file[:-3]}")
exec(f"global ver; ver = {file[:-3]}.version")
if callable(ver):
exec(f"global ver; ver = {file[:-3]}.version()")
ver_func.append([file, ver])
else:
exec(f"global ver; ver = {file[:-3]}.__dict__['version']")
ver_func.append([file, ver])
except ImportError:
print("Could not Import File!")
return ver_func
if __name__ == "__main__":
sys.path.insert(0, "/Users/fq/commands")
os.chdir("/Users/fq/commands")
new_work_dir = change_working_dir()
py_files = get_py_files(new_work_dir)
#var_and_val = search_for_var_or_func_in_file(py_files)
var_and_val = get_version(py_files)
print(var_and_val)
| 30.146067
| 130
| 0.595975
|
4a03bd7fcee05d9fc2e55d15ab7511b1e71605fa
| 92
|
py
|
Python
|
examples/setuptools-test_loader/a_package/some_tests.py
|
andreztz/SomePackage
|
eb12009e32d8da6e8699b539c1db89e93a515887
|
[
"BSD-2-Clause"
] | 1
|
2019-03-13T15:07:31.000Z
|
2019-03-13T15:07:31.000Z
|
examples/setuptools-test_loader/a_package/some_tests.py
|
andreztz/SomePackage
|
eb12009e32d8da6e8699b539c1db89e93a515887
|
[
"BSD-2-Clause"
] | null | null | null |
examples/setuptools-test_loader/a_package/some_tests.py
|
andreztz/SomePackage
|
eb12009e32d8da6e8699b539c1db89e93a515887
|
[
"BSD-2-Clause"
] | null | null | null |
import unittest
class TheTestCase(unittest.TestCase):
def test_foo(self):
pass
| 15.333333
| 37
| 0.706522
|
4a03bf19ddfe7b027204b03a6c498f19f863d9d9
| 4,965
|
py
|
Python
|
nova/tests/unit/virt/xenapi/image/test_bittorrent.py
|
cloud-zuiwanyuan/nova
|
0b59a2d9dc22e4fb172810019dba5ece09bb4526
|
[
"Apache-2.0"
] | 1
|
2019-07-29T10:30:24.000Z
|
2019-07-29T10:30:24.000Z
|
nova/tests/unit/virt/xenapi/image/test_bittorrent.py
|
cloud-zuiwanyuan/nova
|
0b59a2d9dc22e4fb172810019dba5ece09bb4526
|
[
"Apache-2.0"
] | 1
|
2016-04-04T18:41:59.000Z
|
2016-04-04T18:41:59.000Z
|
nova/tests/unit/virt/xenapi/image/test_bittorrent.py
|
cloud-zuiwanyuan/nova
|
0b59a2d9dc22e4fb172810019dba5ece09bb4526
|
[
"Apache-2.0"
] | 2
|
2015-12-04T23:51:46.000Z
|
2016-06-07T20:01:59.000Z
|
# Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from mox3 import mox
import six
from nova import context
from nova import test
from nova.tests.unit.virt.xenapi import stubs
from nova.virt.xenapi import driver as xenapi_conn
from nova.virt.xenapi import fake
from nova.virt.xenapi.image import bittorrent
from nova.virt.xenapi import vm_utils
class TestBittorrentStore(stubs.XenAPITestBaseNoDB):
def setUp(self):
super(TestBittorrentStore, self).setUp()
self.store = bittorrent.BittorrentStore()
self.mox = mox.Mox()
self.flags(torrent_base_url='http://foo',
connection_url='test_url',
connection_password='test_pass',
group='xenserver')
self.context = context.RequestContext(
'user', 'project', auth_token='foobar')
fake.reset()
stubs.stubout_session(self.stubs, fake.SessionBase)
driver = xenapi_conn.XenAPIDriver(False)
self.session = driver._session
self.stubs.Set(
vm_utils, 'get_sr_path', lambda *a, **kw: '/fake/sr/path')
def test_download_image(self):
instance = {'uuid': '00000000-0000-0000-0000-000000007357'}
params = {'image_id': 'fake_image_uuid',
'sr_path': '/fake/sr/path',
'torrent_download_stall_cutoff': 600,
'torrent_listen_port_end': 6891,
'torrent_listen_port_start': 6881,
'torrent_max_last_accessed': 86400,
'torrent_max_seeder_processes_per_host': 1,
'torrent_seed_chance': 1.0,
'torrent_seed_duration': 3600,
'torrent_url': 'http://foo/fake_image_uuid.torrent',
'uuid_stack': ['uuid1']}
self.stubs.Set(vm_utils, '_make_uuid_stack',
lambda *a, **kw: ['uuid1'])
self.mox.StubOutWithMock(self.session, 'call_plugin_serialized')
self.session.call_plugin_serialized(
'bittorrent.py', 'download_vhd', **params)
self.mox.ReplayAll()
self.store.download_image(self.context, self.session,
instance, 'fake_image_uuid')
self.mox.VerifyAll()
def test_upload_image(self):
self.assertRaises(NotImplementedError, self.store.upload_image,
self.context, self.session, mox.IgnoreArg, 'fake_image_uuid',
['fake_vdi_uuid'])
class LookupTorrentURLTestCase(test.NoDBTestCase):
def setUp(self):
super(LookupTorrentURLTestCase, self).setUp()
self.store = bittorrent.BittorrentStore()
self.image_id = 'fakeimageid'
def test_default_fetch_url_no_base_url_set(self):
self.flags(torrent_base_url=None,
group='xenserver')
exc = self.assertRaises(
RuntimeError, self.store._lookup_torrent_url_fn)
self.assertEqual('Cannot create default bittorrent URL without'
' xenserver.torrent_base_url configuration option'
' set.',
six.text_type(exc))
def test_default_fetch_url_base_url_is_set(self):
self.flags(torrent_base_url='http://foo',
group='xenserver')
lookup_fn = self.store._lookup_torrent_url_fn()
self.assertEqual('http://foo/fakeimageid.torrent',
lookup_fn(self.image_id))
def test_invalid_base_url_warning_logged(self):
self.flags(torrent_base_url='www.foo.com',
group='xenserver')
# Make sure a warning is logged when an invalid base URL is set,
# where invalid means it does not contain any slash characters
warnings = []
def fake_warn(msg):
warnings.append(msg)
self.stubs.Set(bittorrent.LOG, 'warning', fake_warn)
lookup_fn = self.store._lookup_torrent_url_fn()
self.assertEqual('fakeimageid.torrent',
lookup_fn(self.image_id))
self.assertTrue(any('does not contain a slash character' in msg for
msg in warnings),
'_lookup_torrent_url_fn() did not log a warning '
'message when the torrent_base_url did not contain a '
'slash character.')
| 37.613636
| 78
| 0.622155
|
4a03bf968a41c0f5f2cd8de84f0b967ccd85da06
| 1,021
|
py
|
Python
|
accounts/migrations/0004_auto_20200404_1315.py
|
BVPIEEE/hackathon_portal
|
0a67704ae42d5680c6f4d05ca9c63a1f06f556c0
|
[
"Apache-2.0"
] | 3
|
2020-04-07T12:25:59.000Z
|
2020-04-16T18:21:27.000Z
|
accounts/migrations/0004_auto_20200404_1315.py
|
BVPIEEE/hackathon_portal
|
0a67704ae42d5680c6f4d05ca9c63a1f06f556c0
|
[
"Apache-2.0"
] | 3
|
2021-03-30T12:57:08.000Z
|
2021-09-22T18:48:35.000Z
|
accounts/migrations/0004_auto_20200404_1315.py
|
BVPIEEE/hackathon_portal
|
0a67704ae42d5680c6f4d05ca9c63a1f06f556c0
|
[
"Apache-2.0"
] | 1
|
2020-04-16T18:21:38.000Z
|
2020-04-16T18:21:38.000Z
|
# Generated by Django 3.0.4 on 2020-04-04 07:45
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0003_auto_20200402_1849'),
]
operations = [
migrations.RemoveField(
model_name='phaseselectionmodel',
name='final',
),
migrations.RemoveField(
model_name='phaseselectionmodel',
name='section1',
),
migrations.RemoveField(
model_name='phaseselectionmodel',
name='section2',
),
migrations.RemoveField(
model_name='phaseselectionmodel',
name='section3',
),
migrations.RemoveField(
model_name='phaseselectionmodel',
name='section4',
),
migrations.AddField(
model_name='phaseselectionmodel',
name='round',
field=models.IntegerField(default=1),
),
]
| 26.179487
| 50
| 0.536729
|
4a03c008d7bd6fe1b25466b8281d9ae2e9c1adf4
| 28
|
py
|
Python
|
hover/hover/test/test_core.py
|
DouglasOrr/Snippets
|
026e15a422b518ee7d9ce4849f971c4403ad9fe8
|
[
"MIT"
] | null | null | null |
hover/hover/test/test_core.py
|
DouglasOrr/Snippets
|
026e15a422b518ee7d9ce4849f971c4403ad9fe8
|
[
"MIT"
] | 1
|
2020-04-11T18:07:19.000Z
|
2020-04-11T18:07:19.000Z
|
hover/hover/test/test_core.py
|
DouglasOrr/Snippets
|
026e15a422b518ee7d9ce4849f971c4403ad9fe8
|
[
"MIT"
] | null | null | null |
from .. import core # noqa
| 14
| 27
| 0.642857
|
4a03c0687f3876d35fa4901d77cfef13dbdf35a4
| 2,571
|
py
|
Python
|
utils/model_helper.py
|
Ian-Liao/SegCaps
|
1b67516e5a19ba30286c9fb5fec12c639ef140bc
|
[
"Apache-2.0"
] | null | null | null |
utils/model_helper.py
|
Ian-Liao/SegCaps
|
1b67516e5a19ba30286c9fb5fec12c639ef140bc
|
[
"Apache-2.0"
] | 11
|
2020-09-26T01:22:00.000Z
|
2022-03-12T00:42:16.000Z
|
utils/model_helper.py
|
Ian-Liao/SegCaps
|
1b67516e5a19ba30286c9fb5fec12c639ef140bc
|
[
"Apache-2.0"
] | 1
|
2020-07-24T17:28:08.000Z
|
2020-07-24T17:28:08.000Z
|
'''
Capsules for Object Segmentation (SegCaps)
Original Paper by Rodney LaLonde and Ulas Bagci (https://arxiv.org/abs/1804.04241)
Code written by: Rodney LaLonde
If you use significant portions of this code or the ideas from our paper, please cite it :)
If you have any questions, please email me at lalonde@knights.ucf.edu.
This is a helper file for choosing which model to create.
'''
import tensorflow as tf
def create_model(args, input_shape, enable_decoder=True):
# If using CPU or single GPU
if args.gpus <= 1:
if args.net == 'unet':
from models.unet import UNet
model = UNet(input_shape)
return [model]
elif args.net == 'tiramisu':
from models.densenets import DenseNetFCN
model = DenseNetFCN(input_shape)
return [model]
elif args.net == 'segcapsr1':
from segcapsnet.capsnet import CapsNetR1
model_list = CapsNetR1(input_shape)
return model_list
elif args.net == 'segcapsr3':
from segcapsnet.capsnet import CapsNetR3
model_list = CapsNetR3(input_shape, args.num_class, enable_decoder)
return model_list
elif args.net == 'capsbasic':
from segcapsnet.capsnet import CapsNetBasic
model_list = CapsNetBasic(input_shape)
return model_list
else:
raise Exception('Unknown network type specified: {}'.format(args.net))
# If using multiple GPUs
else:
with tf.device("/cpu:0"):
if args.net == 'unet':
from models.unet import UNet
model = UNet(input_shape)
return [model]
elif args.net == 'tiramisu':
from models.densenets import DenseNetFCN
model = DenseNetFCN(input_shape)
return [model]
elif args.net == 'segcapsr1':
from segcapsnet.capsnet import CapsNetR1
model_list = CapsNetR1(input_shape)
return model_list
elif args.net == 'segcapsr3':
from segcapsnet.capsnet import CapsNetR3
model_list = CapsNetR3(input_shape, args.num_class, enable_decoder)
return model_list
elif args.net == 'capsbasic':
from segcapsnet.capsnet import CapsNetBasic
model_list = CapsNetBasic(input_shape)
return model_list
else:
raise Exception('Unknown network type specified: {}'.format(args.net))
| 40.809524
| 91
| 0.604823
|
4a03c0ca7500c2ccc23a17aa968a8d5d2b6368ab
| 219
|
py
|
Python
|
src/tests/test_base_common.py
|
charlestian/NetEaseMusic
|
5d8fa4747fcecabb5a09174ff6838718d62c2b31
|
[
"MIT"
] | 19
|
2015-04-18T15:16:58.000Z
|
2021-04-12T06:19:48.000Z
|
src/tests/test_base_common.py
|
charlestian/NetEaseMusic
|
5d8fa4747fcecabb5a09174ff6838718d62c2b31
|
[
"MIT"
] | null | null | null |
src/tests/test_base_common.py
|
charlestian/NetEaseMusic
|
5d8fa4747fcecabb5a09174ff6838718d62c2b31
|
[
"MIT"
] | 12
|
2015-04-18T15:16:59.000Z
|
2016-04-20T17:22:58.000Z
|
# -*- coding: utf8 -*-
import pytest
from base.common import *
def test_singleton():
@singleton
class A():
def __init__(self):
pass
a1 = A()
a2 = A()
assert id(a1) == id(a2)
| 12.882353
| 27
| 0.515982
|
4a03c20b289ca5d0f14b8a113d227b58c06bd8cb
| 727
|
py
|
Python
|
accounts/migrations/0004_auto_20200823_0225.py
|
AnjalBam/iw-capstone-project
|
38065885ada3d236823689c8a9e43f581d6f5a97
|
[
"MIT"
] | null | null | null |
accounts/migrations/0004_auto_20200823_0225.py
|
AnjalBam/iw-capstone-project
|
38065885ada3d236823689c8a9e43f581d6f5a97
|
[
"MIT"
] | 9
|
2021-04-08T20:15:32.000Z
|
2022-03-12T00:50:21.000Z
|
accounts/migrations/0004_auto_20200823_0225.py
|
AnjalBam/iw-capstone-project
|
38065885ada3d236823689c8a9e43f581d6f5a97
|
[
"MIT"
] | 1
|
2020-09-12T05:25:23.000Z
|
2020-09-12T05:25:23.000Z
|
# Generated by Django 3.1 on 2020-08-23 02:25
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0003_merge_20200818_0143'),
]
operations = [
migrations.AlterField(
model_name='education',
name='college',
field=models.CharField(max_length=30),
),
migrations.AlterField(
model_name='education',
name='semester',
field=models.IntegerField(default=3),
preserve_default=False,
),
migrations.AlterField(
model_name='education',
name='year',
field=models.IntegerField(),
),
]
| 24.233333
| 50
| 0.562586
|
4a03c3831b54147adc94ab3d8a911a6f40537f4c
| 4,668
|
py
|
Python
|
get_aws_range.py
|
hoxhaeris/aws_helper
|
fe52754c83620284acc688abc6cf87809ca4268b
|
[
"MIT"
] | 1
|
2021-02-24T16:20:52.000Z
|
2021-02-24T16:20:52.000Z
|
get_aws_range.py
|
hoxhaeris/aws_helper
|
fe52754c83620284acc688abc6cf87809ca4268b
|
[
"MIT"
] | null | null | null |
get_aws_range.py
|
hoxhaeris/aws_helper
|
fe52754c83620284acc688abc6cf87809ca4268b
|
[
"MIT"
] | null | null | null |
import argparse
import json
import requests
def parse_filters():
class ParseFilterString(argparse.Action):
def __call__(self, parser, args, values, option_string=None):
filters = values.split(',')
filters_dict = {}
for input_filter in filters:
filters_dict.setdefault(input_filter.split('=')[0])
filters_dict[input_filter.split('=')[0]] = input_filter.split('=')[1]
values = filters_dict
setattr(args, self.dest, values)
return ParseFilterString
def cli_parser():
my_parser = argparse.ArgumentParser(fromfile_prefix_chars='@')
my_parser.add_argument('-f', '--filters',
type=str,
help='Define Custom filters. To get the list of possible keys, use the '
'--get_available_filters option. Use this format for the filter: --filters '
'service=S3,region=eu-central-1. Mind the spaces in the input string, use quotes when '
'applicable',
action=parse_filters(),
default={})
my_parser.add_argument('--get_available_keys_for_filter',
help='List all available AWS services from which you can filter',
default=None)
my_parser.add_argument('--get_available_filters',
help='List all available AWS possible keys per filter, that you can choose from',
action="store_true")
my_parser.add_argument('-p', '--print_selected_range',
help='Print the selected IP range in stdout, instead of writing it to a file(default)',
action="store_true")
my_parser.add_argument('--file',
type=str,
help='The file you want the result to be writen to. By default, it will use the current '
'directory',
default=None)
args = my_parser.parse_args()
return args
class AWSRanges:
def __init__(self,
cli_filters: dict = None,
url: str = "https://ip-ranges.amazonaws.com/ip-ranges.json"
):
self.url = url
self.cli_filters = cli_filters
def get_range(self):
data = requests.get(url=self.url)
return data.json()['prefixes']
def get_available_keys_per_filter(self, filter_key):
service_set = set()
for key in self.get_range():
service_set.add(key[filter_key])
return list(service_set)
def get_available_filters(self):
filters_set = []
for prefix in self.get_range():
for key in prefix:
filters_set.append(key)
break
return filters_set
def filter_range(self):
for aws_range in self.get_range():
include_range = True
for filter_key, value in self.cli_filters.items():
if not aws_range[filter_key] == value:
include_range = False
if include_range is True:
yield aws_range
def range_to_ip_prefix(self):
return (key['ip_prefix'] for key in self.filter_range())
def write_to_file(self, object_to_write=None, file: str = None):
if object_to_write is None:
object_to_write = self.range_to_ip_prefix()
if file is None:
name_string = ""
for key, value in self.cli_filters.items():
name_string = f"{name_string}_{key}_{value}"
file = f"aws_range{name_string}"
with open(file, 'w') as f:
for entry in object_to_write:
f.write(entry)
f.write('\n')
if __name__ == "__main__":
cli_args = cli_parser()
aws_range_to_route = AWSRanges(cli_filters=cli_args.filters)
if cli_args.get_available_keys_for_filter is not None:
print(json.dumps(
aws_range_to_route.get_available_keys_per_filter(filter_key=cli_args.get_available_keys_for_filter),
indent=4))
exit(0)
if cli_args.print_selected_range is True:
for i in aws_range_to_route.range_to_ip_prefix():
print(i)
exit(0)
if cli_args.get_available_filters is True:
print(json.dumps(aws_range_to_route.get_available_filters(), indent=4))
aws_range_to_route.write_to_file(file=cli_args.file)
| 40.591304
| 120
| 0.565338
|
4a03c3d1c01560ac2599543179ec71e4b77c3f80
| 4,304
|
py
|
Python
|
tools/c7n_azure/c7n_azure/constants.py
|
chris-angeli-rft/cloud-custodian
|
5ff331b114a591dbaf6d672e30ceefb7ae64a5dd
|
[
"Apache-2.0"
] | 2
|
2020-10-20T11:05:54.000Z
|
2021-05-09T15:24:01.000Z
|
tools/c7n_azure/c7n_azure/constants.py
|
chris-angeli-rft/cloud-custodian
|
5ff331b114a591dbaf6d672e30ceefb7ae64a5dd
|
[
"Apache-2.0"
] | 1
|
2021-04-30T21:13:50.000Z
|
2021-04-30T21:13:50.000Z
|
tools/c7n_azure/c7n_azure/constants.py
|
chris-angeli-rft/cloud-custodian
|
5ff331b114a591dbaf6d672e30ceefb7ae64a5dd
|
[
"Apache-2.0"
] | 1
|
2020-12-28T23:21:30.000Z
|
2020-12-28T23:21:30.000Z
|
# Copyright 2019 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Azure Functions
"""
# Docker version from https://hub.docker.com/r/microsoft/azure-functions/
FUNCTION_DOCKER_VERSION = 'DOCKER|mcr.microsoft.com/azure-functions/python:2.0-python3.6-appservice'
FUNCTION_EXT_VERSION = '~2'
FUNCTION_EVENT_TRIGGER_MODE = 'azure-event-grid'
FUNCTION_TIME_TRIGGER_MODE = 'azure-periodic'
FUNCTION_KEY_URL = 'hostruntime/admin/host/systemkeys/_master?api-version=2018-02-01'
FUNCTION_AUTOSCALE_NAME = 'cloud_custodian_default'
"""
Azure Container Host
"""
CONTAINER_EVENT_TRIGGER_MODE = 'container-event'
CONTAINER_TIME_TRIGGER_MODE = 'container-periodic'
ENV_CONTAINER_STORAGE_RESOURCE_ID = 'AZURE_CONTAINER_STORAGE_RESOURCE_ID'
ENV_CONTAINER_QUEUE_NAME = 'AZURE_CONTAINER_QUEUE_NAME'
ENV_CONTAINER_POLICY_URI = 'AZURE_CONTAINER_POLICY_URI'
ENV_CONTAINER_OPTION_LOG_GROUP = 'AZURE_CONTAINER_LOG_GROUP'
ENV_CONTAINER_OPTION_METRICS = 'AZURE_CONTAINER_METRICS'
ENV_CONTAINER_OPTION_OUTPUT_DIR = 'AZURE_CONTAINER_OUTPUT_DIR'
"""
Event Grid Mode
"""
EVENT_GRID_UPN_CLAIM_JMES_PATH = \
'data.claims."http://schemas.xmlsoap.org/ws/2005/05/identity/claims/upn"'
EVENT_GRID_NAME_CLAIM_JMES_PATH = \
'data.claims."http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name"'
EVENT_GRID_SP_NAME_JMES_PATH = 'data.claims.appid'
EVENT_GRID_SERVICE_ADMIN_JMES_PATH = \
'data.claims."http://schemas.xmlsoap.org/ws/2005/05/identity/claims/emailaddress"'
EVENT_GRID_PRINCIPAL_TYPE_JMES_PATH = 'data.authorization.evidence.principalType'
EVENT_GRID_PRINCIPAL_ROLE_JMES_PATH = 'data.authorization.evidence.role'
EVENT_GRID_EVENT_TIME_PATH = 'eventTime'
"""
Environment Variables
"""
ENV_TENANT_ID = 'AZURE_TENANT_ID'
ENV_CLIENT_ID = 'AZURE_CLIENT_ID'
ENV_SUB_ID = 'AZURE_SUBSCRIPTION_ID'
ENV_CLIENT_SECRET = 'AZURE_CLIENT_SECRET'
ENV_KEYVAULT_CLIENT_ID = 'AZURE_KEYVAULT_CLIENT_ID'
ENV_KEYVAULT_SECRET_ID = 'AZURE_KEYVAULT_SECRET'
ENV_ACCESS_TOKEN = 'AZURE_ACCESS_TOKEN'
ENV_USE_MSI = 'AZURE_USE_MSI'
ENV_FUNCTION_TENANT_ID = 'AZURE_FUNCTION_TENANT_ID'
ENV_FUNCTION_CLIENT_ID = 'AZURE_FUNCTION_CLIENT_ID'
ENV_FUNCTION_CLIENT_SECRET = 'AZURE_FUNCTION_CLIENT_SECRET'
ENV_FUNCTION_SUB_ID = 'AZURE_FUNCTION_SUBSCRIPTION_ID'
ENV_FUNCTION_MANAGEMENT_GROUP_NAME = 'AZURE_FUNCTION_MANAGEMENT_GROUP_NAME'
# Allow disabling SSL cert validation (ex: custom domain for ASE functions)
ENV_CUSTODIAN_DISABLE_SSL_CERT_VERIFICATION = 'CUSTODIAN_DISABLE_SSL_CERT_VERIFICATION'
"""
Authentication Resource
"""
RESOURCE_ACTIVE_DIRECTORY = 'https://management.core.windows.net/'
RESOURCE_STORAGE = 'https://storage.azure.com/'
RESOURCE_VAULT = 'https://vault.azure.net'
"""
Threading Variable
"""
DEFAULT_MAX_THREAD_WORKERS = 3
DEFAULT_CHUNK_SIZE = 20
"""
Custom Retry Code Variables
"""
DEFAULT_MAX_RETRY_AFTER = 30
"""
KeyVault url templates
"""
TEMPLATE_KEYVAULT_URL = 'https://{0}.vault.azure.net'
"""
Azure Functions Host Configuration
"""
FUNCTION_HOST_CONFIG = {
"version": "2.0",
"healthMonitor": {
"enabled": True,
"healthCheckInterval": "00:00:10",
"healthCheckWindow": "00:02:00",
"healthCheckThreshold": 6,
"counterThreshold": 0.80
},
"functionTimeout": "00:10:00",
"logging": {
"fileLoggingMode": "always",
"logLevel": {
"default": "Debug"
}
},
"extensions": {
"http": {
"routePrefix": "api",
"maxConcurrentRequests": 5,
"maxOutstandingRequests": 30
}
}
}
FUNCTION_EXTENSION_BUNDLE_CONFIG = {
"id": "Microsoft.Azure.Functions.ExtensionBundle",
"version": "[1.*, 2.0.0)"
}
"""
Azure Storage
"""
BLOB_TYPE = 'blob'
QUEUE_TYPE = 'queue'
TABLE_TYPE = 'table'
FILE_TYPE = 'file'
RESOURCE_GROUPS_TYPE = 'resourceGroups'
| 29.888889
| 100
| 0.760455
|
4a03c3dcab5b19ff1eb9fc2a8cddd7a44955c52c
| 18,787
|
py
|
Python
|
kms/google/cloud/kms_v1/gapic/transports/key_management_service_grpc_transport.py
|
q-logic/google-cloud-python
|
a65065c89c059bc564bbdd79288a48970907c399
|
[
"Apache-2.0"
] | null | null | null |
kms/google/cloud/kms_v1/gapic/transports/key_management_service_grpc_transport.py
|
q-logic/google-cloud-python
|
a65065c89c059bc564bbdd79288a48970907c399
|
[
"Apache-2.0"
] | 40
|
2019-07-16T10:04:48.000Z
|
2020-01-20T09:04:59.000Z
|
kms/google/cloud/kms_v1/gapic/transports/key_management_service_grpc_transport.py
|
q-logic/google-cloud-python
|
a65065c89c059bc564bbdd79288a48970907c399
|
[
"Apache-2.0"
] | 2
|
2019-07-18T00:05:31.000Z
|
2019-11-27T14:17:22.000Z
|
# -*- coding: utf-8 -*-
#
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import google.api_core.grpc_helpers
from google.cloud.kms_v1.proto import service_pb2_grpc
from google.iam.v1 import iam_policy_pb2_grpc as iam_policy_pb2_grpc
class KeyManagementServiceGrpcTransport(object):
"""gRPC transport class providing stubs for
google.cloud.kms.v1 KeyManagementService API.
The transport provides access to the raw gRPC stubs,
which can be used to take advantage of advanced
features of gRPC.
"""
# The scopes needed to make gRPC calls to all of the methods defined
# in this service.
_OAUTH_SCOPES = (
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/cloudkms",
)
def __init__(
self, channel=None, credentials=None, address="cloudkms.googleapis.com:443"
):
"""Instantiate the transport class.
Args:
channel (grpc.Channel): A ``Channel`` instance through
which to make calls. This argument is mutually exclusive
with ``credentials``; providing both will raise an exception.
credentials (google.auth.credentials.Credentials): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If none
are specified, the client will attempt to ascertain the
credentials from the environment.
address (str): The address where the service is hosted.
"""
# If both `channel` and `credentials` are specified, raise an
# exception (channels come with credentials baked in already).
if channel is not None and credentials is not None:
raise ValueError(
"The `channel` and `credentials` arguments are mutually " "exclusive."
)
# Create the channel.
if channel is None:
channel = self.create_channel(
address=address,
credentials=credentials,
options={
"grpc.max_send_message_length": -1,
"grpc.max_receive_message_length": -1,
}.items(),
)
self._channel = channel
# gRPC uses objects called "stubs" that are bound to the
# channel and provide a basic method for each RPC.
self._stubs = {
"key_management_service_stub": service_pb2_grpc.KeyManagementServiceStub(
channel
),
"iam_policy_stub": iam_policy_pb2_grpc.IAMPolicyStub(channel),
}
@classmethod
def create_channel(
cls, address="cloudkms.googleapis.com:443", credentials=None, **kwargs
):
"""Create and return a gRPC channel object.
Args:
address (str): The host for the channel to use.
credentials (~.Credentials): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
kwargs (dict): Keyword arguments, which are passed to the
channel creation.
Returns:
grpc.Channel: A gRPC channel object.
"""
return google.api_core.grpc_helpers.create_channel(
address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs
)
@property
def channel(self):
"""The gRPC channel used by the transport.
Returns:
grpc.Channel: A gRPC channel object.
"""
return self._channel
@property
def list_key_rings(self):
"""Return the gRPC stub for :meth:`KeyManagementServiceClient.list_key_rings`.
Lists ``KeyRings``.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["key_management_service_stub"].ListKeyRings
@property
def list_import_jobs(self):
"""Return the gRPC stub for :meth:`KeyManagementServiceClient.list_import_jobs`.
Lists ``ImportJobs``.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["key_management_service_stub"].ListImportJobs
@property
def list_crypto_keys(self):
"""Return the gRPC stub for :meth:`KeyManagementServiceClient.list_crypto_keys`.
Lists ``CryptoKeys``.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["key_management_service_stub"].ListCryptoKeys
@property
def list_crypto_key_versions(self):
"""Return the gRPC stub for :meth:`KeyManagementServiceClient.list_crypto_key_versions`.
Lists ``CryptoKeyVersions``.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["key_management_service_stub"].ListCryptoKeyVersions
@property
def get_key_ring(self):
"""Return the gRPC stub for :meth:`KeyManagementServiceClient.get_key_ring`.
Returns metadata for a given ``KeyRing``.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["key_management_service_stub"].GetKeyRing
@property
def get_import_job(self):
"""Return the gRPC stub for :meth:`KeyManagementServiceClient.get_import_job`.
Returns metadata for a given ``ImportJob``.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["key_management_service_stub"].GetImportJob
@property
def get_crypto_key(self):
"""Return the gRPC stub for :meth:`KeyManagementServiceClient.get_crypto_key`.
Returns metadata for a given ``CryptoKey``, as well as its ``primary``
``CryptoKeyVersion``.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["key_management_service_stub"].GetCryptoKey
@property
def get_crypto_key_version(self):
"""Return the gRPC stub for :meth:`KeyManagementServiceClient.get_crypto_key_version`.
Returns metadata for a given ``CryptoKeyVersion``.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["key_management_service_stub"].GetCryptoKeyVersion
@property
def create_key_ring(self):
"""Return the gRPC stub for :meth:`KeyManagementServiceClient.create_key_ring`.
Create a new ``KeyRing`` in a given Project and Location.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["key_management_service_stub"].CreateKeyRing
@property
def create_import_job(self):
"""Return the gRPC stub for :meth:`KeyManagementServiceClient.create_import_job`.
Create a new ``ImportJob`` within a ``KeyRing``.
``ImportJob.import_method`` is required.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["key_management_service_stub"].CreateImportJob
@property
def create_crypto_key(self):
"""Return the gRPC stub for :meth:`KeyManagementServiceClient.create_crypto_key`.
Create a new ``CryptoKey`` within a ``KeyRing``.
``CryptoKey.purpose`` and ``CryptoKey.version_template.algorithm`` are
required.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["key_management_service_stub"].CreateCryptoKey
@property
def create_crypto_key_version(self):
"""Return the gRPC stub for :meth:`KeyManagementServiceClient.create_crypto_key_version`.
Create a new ``CryptoKeyVersion`` in a ``CryptoKey``.
The server will assign the next sequential id. If unset, ``state`` will
be set to ``ENABLED``.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["key_management_service_stub"].CreateCryptoKeyVersion
@property
def import_crypto_key_version(self):
"""Return the gRPC stub for :meth:`KeyManagementServiceClient.import_crypto_key_version`.
Imports a new ``CryptoKeyVersion`` into an existing ``CryptoKey`` using
the wrapped key material provided in the request.
The version ID will be assigned the next sequential id within the
``CryptoKey``.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["key_management_service_stub"].ImportCryptoKeyVersion
@property
def update_crypto_key(self):
"""Return the gRPC stub for :meth:`KeyManagementServiceClient.update_crypto_key`.
Update a ``CryptoKey``.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["key_management_service_stub"].UpdateCryptoKey
@property
def update_crypto_key_version(self):
"""Return the gRPC stub for :meth:`KeyManagementServiceClient.update_crypto_key_version`.
Update a ``CryptoKeyVersion``'s metadata.
``state`` may be changed between ``ENABLED`` and ``DISABLED`` using this
method. See ``DestroyCryptoKeyVersion`` and ``RestoreCryptoKeyVersion``
to move between other states.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["key_management_service_stub"].UpdateCryptoKeyVersion
@property
def encrypt(self):
"""Return the gRPC stub for :meth:`KeyManagementServiceClient.encrypt`.
Encrypts data, so that it can only be recovered by a call to
``Decrypt``. The ``CryptoKey.purpose`` must be ``ENCRYPT_DECRYPT``.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["key_management_service_stub"].Encrypt
@property
def decrypt(self):
"""Return the gRPC stub for :meth:`KeyManagementServiceClient.decrypt`.
Decrypts data that was protected by ``Encrypt``. The
``CryptoKey.purpose`` must be ``ENCRYPT_DECRYPT``.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["key_management_service_stub"].Decrypt
@property
def update_crypto_key_primary_version(self):
"""Return the gRPC stub for :meth:`KeyManagementServiceClient.update_crypto_key_primary_version`.
Update the version of a ``CryptoKey`` that will be used in ``Encrypt``.
Returns an error if called on an asymmetric key.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["key_management_service_stub"].UpdateCryptoKeyPrimaryVersion
@property
def destroy_crypto_key_version(self):
"""Return the gRPC stub for :meth:`KeyManagementServiceClient.destroy_crypto_key_version`.
Schedule a ``CryptoKeyVersion`` for destruction.
Upon calling this method, ``CryptoKeyVersion.state`` will be set to
``DESTROY_SCHEDULED`` and ``destroy_time`` will be set to a time 24
hours in the future, at which point the ``state`` will be changed to
``DESTROYED``, and the key material will be irrevocably destroyed.
Before the ``destroy_time`` is reached, ``RestoreCryptoKeyVersion`` may
be called to reverse the process.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["key_management_service_stub"].DestroyCryptoKeyVersion
@property
def restore_crypto_key_version(self):
"""Return the gRPC stub for :meth:`KeyManagementServiceClient.restore_crypto_key_version`.
Restore a ``CryptoKeyVersion`` in the ``DESTROY_SCHEDULED`` state.
Upon restoration of the CryptoKeyVersion, ``state`` will be set to
``DISABLED``, and ``destroy_time`` will be cleared.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["key_management_service_stub"].RestoreCryptoKeyVersion
@property
def get_public_key(self):
"""Return the gRPC stub for :meth:`KeyManagementServiceClient.get_public_key`.
Returns the public key for the given ``CryptoKeyVersion``. The
``CryptoKey.purpose`` must be ``ASYMMETRIC_SIGN`` or
``ASYMMETRIC_DECRYPT``.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["key_management_service_stub"].GetPublicKey
@property
def asymmetric_decrypt(self):
"""Return the gRPC stub for :meth:`KeyManagementServiceClient.asymmetric_decrypt`.
Decrypts data that was encrypted with a public key retrieved from
``GetPublicKey`` corresponding to a ``CryptoKeyVersion`` with
``CryptoKey.purpose`` ASYMMETRIC\_DECRYPT.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["key_management_service_stub"].AsymmetricDecrypt
@property
def asymmetric_sign(self):
"""Return the gRPC stub for :meth:`KeyManagementServiceClient.asymmetric_sign`.
Signs data using a ``CryptoKeyVersion`` with ``CryptoKey.purpose``
ASYMMETRIC\_SIGN, producing a signature that can be verified with the
public key retrieved from ``GetPublicKey``.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["key_management_service_stub"].AsymmetricSign
@property
def set_iam_policy(self):
"""Return the gRPC stub for :meth:`KeyManagementServiceClient.set_iam_policy`.
Sets the access control policy on the specified resource. Replaces
any existing policy.
Can return Public Errors: NOT_FOUND, INVALID_ARGUMENT and
PERMISSION_DENIED
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["iam_policy_stub"].SetIamPolicy
@property
def get_iam_policy(self):
"""Return the gRPC stub for :meth:`KeyManagementServiceClient.get_iam_policy`.
Gets the access control policy for a resource. Returns an empty policy
if the resource exists and does not have a policy set.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["iam_policy_stub"].GetIamPolicy
@property
def test_iam_permissions(self):
"""Return the gRPC stub for :meth:`KeyManagementServiceClient.test_iam_permissions`.
Returns permissions that a caller has on the specified resource. If the
resource does not exist, this will return an empty set of
permissions, not a NOT_FOUND error.
Note: This operation is designed to be used for building
permission-aware UIs and command-line tools, not for authorization
checking. This operation may "fail open" without warning.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["iam_policy_stub"].TestIamPermissions
| 37.349901
| 105
| 0.65359
|
4a03c7ab4f434e25a78237ef8ae01d111572dcf1
| 1,405
|
py
|
Python
|
output/models/ms_data/model_groups/mg_f017_xsd/mg_f017.py
|
tefra/xsdata-w3c-tests
|
b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f
|
[
"MIT"
] | 1
|
2021-08-14T17:59:21.000Z
|
2021-08-14T17:59:21.000Z
|
output/models/ms_data/model_groups/mg_f017_xsd/mg_f017.py
|
tefra/xsdata-w3c-tests
|
b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f
|
[
"MIT"
] | 4
|
2020-02-12T21:30:44.000Z
|
2020-04-15T20:06:46.000Z
|
output/models/ms_data/model_groups/mg_i017_xsd/mg_i017.py
|
tefra/xsdata-w3c-tests
|
b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f
|
[
"MIT"
] | null | null | null |
from dataclasses import dataclass, field
from typing import Optional
@dataclass
class Foo:
class Meta:
name = "foo"
c1: Optional[object] = field(
default=None,
metadata={
"type": "Element",
"namespace": "",
}
)
c2: Optional[object] = field(
default=None,
metadata={
"type": "Element",
"namespace": "",
}
)
c3: Optional[object] = field(
default=None,
metadata={
"type": "Element",
"namespace": "",
}
)
c4: Optional[object] = field(
default=None,
metadata={
"type": "Element",
"namespace": "",
}
)
s1: Optional[object] = field(
default=None,
metadata={
"type": "Element",
"namespace": "",
}
)
s2: Optional[object] = field(
default=None,
metadata={
"type": "Element",
"namespace": "",
}
)
s3: Optional[object] = field(
default=None,
metadata={
"type": "Element",
"namespace": "",
}
)
s4: Optional[object] = field(
default=None,
metadata={
"type": "Element",
"namespace": "",
}
)
@dataclass
class Doc(Foo):
class Meta:
name = "doc"
| 19.513889
| 40
| 0.43274
|
4a03c7afe7bc80d82d4d1a038bf6475666c283f9
| 724
|
py
|
Python
|
venv/Lib/site-packages/commands/mixins.py
|
The-Fragment/FragmentFembot
|
bca0027b423753eb162590e8fd440a2c1e65d133
|
[
"MIT"
] | null | null | null |
venv/Lib/site-packages/commands/mixins.py
|
The-Fragment/FragmentFembot
|
bca0027b423753eb162590e8fd440a2c1e65d133
|
[
"MIT"
] | 3
|
2021-03-23T04:58:47.000Z
|
2021-04-02T02:40:54.000Z
|
venv/Lib/site-packages/commands/mixins.py
|
The-Fragment/FragmentFembot
|
bca0027b423753eb162590e8fd440a2c1e65d133
|
[
"MIT"
] | null | null | null |
from django.http import JsonResponse
class AjaxMixin(object):
@staticmethod
def success(results, meta=None, status=200):
if isinstance(results, list) or isinstance(results, tuple):
content = {'results': results}
elif results is not None:
content = {'result': results}
else:
content = {}
if meta: content.update(meta)
return JsonResponse(content, status=status)
@staticmethod
def error(message, meta=None, status=400):
content = {'error': message}
if meta: content.update(meta)
return JsonResponse(content, status=status)
@staticmethod
def errors(fields, meta=None, status=400):
content = {'errors': fields}
if meta: content.update(meta)
return JsonResponse(content, status=status)
| 24.965517
| 61
| 0.722376
|
4a03c80a1d73d6f8a7a8ce51b0ccb14e037671e0
| 411
|
py
|
Python
|
src/update_handlers/messages_in_channels.py
|
Arseny-Tokmancev/channels-watchbot
|
102edc07c9d8c306f47b6a5b8318fa0ba56534f0
|
[
"MIT"
] | 1
|
2020-11-10T22:50:14.000Z
|
2020-11-10T22:50:14.000Z
|
src/update_handlers/messages_in_channels.py
|
Arseny-Tokmancev/channels-watchbot
|
102edc07c9d8c306f47b6a5b8318fa0ba56534f0
|
[
"MIT"
] | null | null | null |
src/update_handlers/messages_in_channels.py
|
Arseny-Tokmancev/channels-watchbot
|
102edc07c9d8c306f47b6a5b8318fa0ba56534f0
|
[
"MIT"
] | 1
|
2022-01-31T19:23:03.000Z
|
2022-01-31T19:23:03.000Z
|
from pyrogram import filters
from django.utils import timezone
from data.models import Channel
def register(app):
@app.on_message(filters.channel)
def update_time(client, message):
for channel in Channel.objects.filter(channel_id=message.chat.id):
channel.last_message_time = timezone.now()
channel.alerts_left = channel.alert_times + 1
channel.save()
| 34.25
| 74
| 0.698297
|
4a03c845099c38b919de262ba7245934300ac4df
| 1,029
|
py
|
Python
|
platalea/fix_json.py
|
egpbos/platalea
|
6ed7139095043135805c446f1fca089cb5eb9ba9
|
[
"Apache-2.0"
] | 2
|
2021-03-13T12:19:36.000Z
|
2021-11-13T06:33:21.000Z
|
platalea/fix_json.py
|
egpbos/platalea
|
6ed7139095043135805c446f1fca089cb5eb9ba9
|
[
"Apache-2.0"
] | 97
|
2020-03-13T16:12:01.000Z
|
2021-11-23T09:00:47.000Z
|
platalea/fix_json.py
|
egpbos/platalea
|
6ed7139095043135805c446f1fca089cb5eb9ba9
|
[
"Apache-2.0"
] | 2
|
2021-02-02T18:06:50.000Z
|
2021-02-22T12:08:11.000Z
|
import json
import glob
from shutil import copyfile
import pandas as pd
import logging
import io
logging.basicConfig(level=logging.INFO)
def fix():
paths = glob.glob("experiments/*/result.json")
for path in paths:
logging.info("Fixing {}".format(path))
copyfile(path, path + ".orig")
with open(path, 'w') as out:
data = [eval(line) for line in open(path + ".orig")]
for datum in data:
print(json.dumps(datum), file=out)
def load_results():
tables = []
for file in glob.glob("experiments/vq*/result.json"):
data = [flat(json.loads(line)) for line in open(file)]
table = pd.read_json(io.StringIO(json.dumps(data)), orient='records')
table['path'] = file
tables.append(table)
return tables
def flat(rec):
return dict(epoch=rec['epoch'],
medr=rec['medr'],
recall1=rec['recall']['1'],
recall5=rec['recall']['5'],
recall10=rec['recall']['10'])
| 27.078947
| 77
| 0.580175
|
4a03c8fe8c529bef47b6a7dab98b62aa67a49131
| 3,751
|
py
|
Python
|
test/functional/rpc_blockchain.py
|
8899kkA/REBELCoin
|
42e21696292c98ffdaf959d6c0c060460aa188a7
|
[
"MIT"
] | null | null | null |
test/functional/rpc_blockchain.py
|
8899kkA/REBELCoin
|
42e21696292c98ffdaf959d6c0c060460aa188a7
|
[
"MIT"
] | null | null | null |
test/functional/rpc_blockchain.py
|
8899kkA/REBELCoin
|
42e21696292c98ffdaf959d6c0c060460aa188a7
|
[
"MIT"
] | 1
|
2022-01-30T23:45:34.000Z
|
2022-01-30T23:45:34.000Z
|
#!/usr/bin/env python3
# Copyright (c) 2014-2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test RPCs related to blockchainstate.
Test the following RPCs:
- getblockchaininfo
- gettxoutsetinfo
- getdifficulty
- getbestblockhash
- getblockhash
- getblockheader
- getchaintxstats
- getnetworkhashps
- verifychain
Tests correspond to code in rpc/blockchain.cpp.
"""
from decimal import Decimal
import http.client
import subprocess
from test_framework.test_framework import rebelcoinTestFramework
from test_framework.util import (
assert_equal,
assert_greater_than,
assert_greater_than_or_equal,
assert_raises,
assert_raises_rpc_error,
assert_is_hex_string,
assert_is_hash_string,
)
class BlockchainTest(rebelcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
def run_test(self):
#self._test_getblockchaininfo()
self._test_gettxoutsetinfo()
self._test_getblockheader()
#self._test_getdifficulty()
self.nodes[0].verifychain(0)
def _test_getblockchaininfo(self):
self.log.info("Test getblockchaininfo")
keys = [
'bestblockhash',
'blocks',
'chain',
'chainwork',
'difficulty',
'headers',
'verificationprogress',
'warnings',
]
res = self.nodes[0].getblockchaininfo()
# result should have these additional pruning keys if manual pruning is enabled
assert_equal(sorted(res.keys()), sorted(keys))
def _test_gettxoutsetinfo(self):
node = self.nodes[0]
res = node.gettxoutsetinfo()
assert_equal(res['total_amount'], Decimal('50000.00000000'))
assert_equal(res['transactions'], 200)
assert_equal(res['height'], 200)
assert_equal(res['txouts'], 200)
assert_equal(res['bestblock'], node.getblockhash(200))
size = res['disk_size']
assert_greater_than_or_equal(size, 6400)
assert_greater_than_or_equal(64000, size)
assert_equal(len(res['bestblock']), 64)
assert_equal(len(res['hash_serialized_2']), 64)
def _test_getblockheader(self):
node = self.nodes[0]
assert_raises_rpc_error(-5, "Block not found",
node.getblockheader, "nonsense")
besthash = node.getbestblockhash()
secondbesthash = node.getblockhash(199)
header = node.getblockheader(besthash)
assert_equal(header['hash'], besthash)
assert_equal(header['height'], 200)
assert_equal(header['confirmations'], 1)
assert_equal(header['previousblockhash'], secondbesthash)
assert_is_hex_string(header['chainwork'])
assert_is_hash_string(header['hash'])
assert_is_hash_string(header['previousblockhash'])
assert_is_hash_string(header['merkleroot'])
assert_is_hash_string(header['bits'], length=None)
assert isinstance(header['time'], int)
#assert isinstance(header['mediantime'], int)
assert isinstance(header['nonce'], int)
assert isinstance(header['version'], int)
#assert isinstance(int(header['versionHex'], 16), int)
assert isinstance(header['difficulty'], Decimal)
def _test_getdifficulty(self):
difficulty = self.nodes[0].getdifficulty()
# 1 hash in 2 should be valid, so difficulty should be 1/2**31
# binary => decimal => binary math is why we do this check
assert abs(difficulty * 2**31 - 1) < 0.0001
if __name__ == '__main__':
BlockchainTest().main()
| 33.19469
| 87
| 0.66089
|
4a03c9028fd33b4348fa433998f02515543f422c
| 1,915
|
py
|
Python
|
notify.py
|
Na0ki/ipnotify
|
4104c15b81a69e9def604ddc50c14d193965b3b6
|
[
"MIT"
] | null | null | null |
notify.py
|
Na0ki/ipnotify
|
4104c15b81a69e9def604ddc50c14d193965b3b6
|
[
"MIT"
] | null | null | null |
notify.py
|
Na0ki/ipnotify
|
4104c15b81a69e9def604ddc50c14d193965b3b6
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python3
import re
import urllib.parse
import urllib.request
from configparser import ConfigParser
def load():
"""
.config からIP通知のリストを取得する
:return: ConfigParser object
"""
config = ConfigParser()
try:
with open('/etc/ipnotify/config') as f:
config.read_file(f)
except IOError:
raise Exception("failed to open config file")
return config
def request(info):
"""
Google DomainsのAPIに問い合わせ
:param info: {domain: username: password:} のdict
:return: 成功の可否
"""
base_url = 'https://domains.google.com/nic/update'
manager = urllib.request.HTTPPasswordMgrWithDefaultRealm()
manager.add_password(None, base_url, info['username'], info['password'])
handler = urllib.request.HTTPBasicAuthHandler(manager)
opener = urllib.request.build_opener(handler)
urllib.request.install_opener(opener)
params = urllib.parse.urlencode({"hostname": info["domain"]}).encode('UTF-8')
response = urllib.request.urlopen(base_url, params)
if response.status is not 200:
print("Invalid Status Code", response.status)
return False
body = response.read().decode("utf-8")
matched = re.match(r"^(good|nochg)\s.+$", body)
if matched is None:
print("Something went wrong!\n\tREASON: ", body)
return False
print("Notification succeeded! ", body)
return True
def main():
"""
メイン関数
:return:
"""
config = load()
for sec in config.sections():
username = config.get(sec, "username")
password = config.get(sec, "password")
if not username or not password:
raise Exception("username or password is undefined")
request({"domain": sec, "username": username, "password": password})
if __name__ == '__main__':
try:
main()
except Exception as error:
print("An Error Occurred: ", error)
| 26.597222
| 81
| 0.643864
|
4a03c996dcac9e7d1a40a547ffcd8d857ea3688d
| 1,436
|
py
|
Python
|
urls.py
|
GFH24/demo-master-master
|
c5b55bf26fea1333567cfd5b2e4fc1c238e65ded
|
[
"Apache-2.0"
] | null | null | null |
urls.py
|
GFH24/demo-master-master
|
c5b55bf26fea1333567cfd5b2e4fc1c238e65ded
|
[
"Apache-2.0"
] | null | null | null |
urls.py
|
GFH24/demo-master-master
|
c5b55bf26fea1333567cfd5b2e4fc1c238e65ded
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云(BlueKing) available.
Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and limitations under the License.
urls config
"""
from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
# admin.autodiscover()
# 公共URL配置
urlpatterns = patterns(
'',
# Django后台数据库管理
url(r'^admin/', include(admin.site.urls)),
# 用户登录鉴权--请勿修改
url(r'^account/', include('account.urls')),
# 应用功能开关控制--请勿修改
url(r'^app_control/', include('app_control.urls')),
# 在home_application(根应用)里开始开发你的应用的主要功能
url(r'^', include('home_application.urls')),
#url(r'^','home_application.views.index'),
)
handler404 = 'error_pages.views.error_404'
handler500 = 'error_pages.views.error_500'
handler403 = 'error_pages.views.error_403'
handler401 = 'error_pages.views.error_401'
| 38.810811
| 115
| 0.746518
|
4a03ca21a6365fb62415b616a91fd6a1d2ce3ad3
| 3,388
|
py
|
Python
|
maelstrom/tests/test_db_utils.py
|
maelstromio/maelstrom-py
|
b88e73496195d59960c2cff43b97aa6329d39f48
|
[
"MIT"
] | null | null | null |
maelstrom/tests/test_db_utils.py
|
maelstromio/maelstrom-py
|
b88e73496195d59960c2cff43b97aa6329d39f48
|
[
"MIT"
] | null | null | null |
maelstrom/tests/test_db_utils.py
|
maelstromio/maelstrom-py
|
b88e73496195d59960c2cff43b97aa6329d39f48
|
[
"MIT"
] | null | null | null |
from uuid import uuid4
from maelstrom.search import Search
from maelstrom.tests.account import Account
from maelstrom.lookup import LookUp
from maelstrom.data import Data
from test import CassandraTestCase
class DBUnitTests(CassandraTestCase):
"""
The Data class is the generic protoype for the database models. As such,
correct testing database functionality on it will test database
transactions, while other model-specific tests should be written
in a separate testing class.
"""
test_setup = False
def setUp(self):
self.ids_used = []
CassandraTestCase.setUp(self, ['127.0.0.1'], 'test')
#Search.rebuild()
#LookUp.rebuild()
#Data.rebuild()
#c.start(['192.241.181.163', '107.170.88.98'], 'gradfly')
# try:
# c.start(['192.241.181.163', '107.170.88.98'], 'gradfly')
# except Exception:
# from time import sleep
# print 'failed'
# sleep(50000)
# finally:
# c.start(['192.241.181.163', '107.170.88.98'], 'gradfly')
def test_get_and_put_by_id(self):
init_id = uuid4()
self.ids_used.append(init_id)
data = Data(id=init_id, contents="john ")
data.commit()
test_data = Data.get_by_id(init_id)
# for i,j in zip(user_1.__dict__.values(), test_user_1.__dict__.values()):
# print i,j, i == j
self.assertEqual(data, test_data)
def test_batch_and_get(self):
ids = [uuid4(), uuid4(), uuid4()]
self.ids_used += list(ids)
names = ['bob', 'joe', 'sara']
datum = [Data(id=i, contents=name) for i, name in zip(ids, names)]
datum = {a.id: a for a in datum}
Data.multi_update_data(datum)
datum_test = Data.multi_get_by_id(ids).values()
#no order is guaranteed by Cassandra so we sort it
datum_test.sort(key=(lambda a: a.id))
datum_true = sorted(datum.values(), key=lambda a: a.id)
if len(ids) != len(datum):
self.assertTrue(False)
print datum_true
print datum_test
is_equal = True
for a, a_test in zip(datum_true, datum_test):
is_equal = is_equal and (a == a_test)
self.assertTrue(is_equal)
def test_batch_vs_iter_single(self):
ids = [uuid4(), uuid4(), uuid4()]
self.ids_used += list(ids)
names = ['bob', 'joe', 'sara']
datum = [Data(id=i, contents=name) for i, name in zip(ids, names)]
datum = {a.id: a for a in datum}
Data.multi_update_data(datum)
datum_batch = Data.multi_get_by_id(ids).values()
datum_single = []
for i in ids:
datum_single.append(Data.get_by_id(i))
datum_batch.sort(key=(lambda a: a.id))
datum_single.sort(key=(lambda a: a.id))
print datum_single
print datum_batch
self.assertTrue(datum_batch == datum_single)
def test_delete(self):
init_id = uuid4()
self.ids_used.append(init_id)
data = Data(id=init_id, contents="testtesttest")
data.commit()
data.delete(init_id)
if not Data.get_by_id(init_id):
self.assertTrue(True)
else:
self.assertTrue(False)
def tearDown(self):
Data.multi_delete(self.ids_used)
CassandraTestCase.tearDown(self)
#c.stop()
| 33.215686
| 82
| 0.601535
|
4a03ca80dc09466dbde024d775c7c53c6d0e78bc
| 3,633
|
py
|
Python
|
webexteamssdk/webexteamssdk/models/mixins/webhook.py
|
Steeve135/WebexBot
|
6188ca2cfccd8885c5c2e492f17a6e935dee416e
|
[
"MIT"
] | null | null | null |
webexteamssdk/webexteamssdk/models/mixins/webhook.py
|
Steeve135/WebexBot
|
6188ca2cfccd8885c5c2e492f17a6e935dee416e
|
[
"MIT"
] | 4
|
2020-03-24T16:20:45.000Z
|
2021-06-01T22:56:24.000Z
|
venv/lib/python3.7/site-packages/webexteamssdk/models/mixins/webhook.py
|
miceder/dnacbot
|
fb8fd60674cadfec60691b49c13d890bcf6141d7
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""Webex Teams Webhook data model.
Copyright (c) 2016-2018 Cisco and/or its affiliates.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
from builtins import *
from webexteamssdk.utils import WebexTeamsDateTime
class WebhookBasicPropertiesMixin(object):
"""Webhook basic properties."""
@property
def id(self):
"""Webhook ID."""
return self._json_data.get('id')
@property
def name(self):
"""A user-friendly name for this webhook."""
return self._json_data.get('name')
@property
def targetUrl(self):
"""The URL that receives POST requests for each event."""
return self._json_data.get('targetUrl')
@property
def resource(self):
"""The resource type for the webhook."""
return self._json_data.get('resource')
@property
def event(self):
"""The event type for the webhook."""
return self._json_data.get('event')
@property
def filter(self):
"""The filter that defines the webhook scope."""
return self._json_data.get('filter')
@property
def secret(self):
"""Secret used to generate payload signature."""
return self._json_data.get('secret')
@property
def orgId(self):
"""The ID of the organization that owns the webhook."""
return self._json_data.get('orgId')
@property
def createdBy(self):
"""The ID of the person that added the webhook."""
return self._json_data.get('createdBy')
@property
def appId(self):
"""Identifies the application that added the webhook."""
return self._json_data.get('appId')
@property
def ownedBy(self):
"""Indicates if the webhook is owned by the `org` or the `creator`.
Webhooks owned by the creator can only receive events that are
accessible to the creator of the webhook. Those owned by the
organization will receive events that are visible to anyone in the
organization.
"""
return self._json_data.get('ownedBy')
@property
def status(self):
"""Indicates if the webhook is active.
A webhook that cannot reach your URL is disabled.
"""
return self._json_data.get('status')
@property
def created(self):
"""Creation date and time in ISO8601 format."""
created = self._json_data.get('created')
if created:
return WebexTeamsDateTime.strptime(created)
else:
return None
| 30.275
| 78
| 0.676851
|
4a03cbb143a75cebc707619084dbc7320ae37388
| 2,177
|
py
|
Python
|
neural_guided_symbolic_regression/utils/timeout.py
|
deepneuralmachine/google-research
|
d2ce2cf0f5c004f8d78bfeddf6e88e88f4840231
|
[
"Apache-2.0"
] | 23,901
|
2018-10-04T19:48:53.000Z
|
2022-03-31T21:27:42.000Z
|
neural_guided_symbolic_regression/utils/timeout.py
|
deepneuralmachine/google-research
|
d2ce2cf0f5c004f8d78bfeddf6e88e88f4840231
|
[
"Apache-2.0"
] | 891
|
2018-11-10T06:16:13.000Z
|
2022-03-31T10:42:34.000Z
|
neural_guided_symbolic_regression/utils/timeout.py
|
deepneuralmachine/google-research
|
d2ce2cf0f5c004f8d78bfeddf6e88e88f4840231
|
[
"Apache-2.0"
] | 6,047
|
2018-10-12T06:31:02.000Z
|
2022-03-31T13:59:28.000Z
|
# coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Timeout function."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import threading
class FunctionTimeoutError(Exception):
"""Timeout Error."""
pass
class RunWithTimeout(object):
"""Runs a python function with a timeout and gets a returned value.
NOTE(leeley): This class is forked from answer in
https://stackoverflow.com/questions/46858493/python-run-a-function-with-a-timeout-and-get-a-returned-value
I added a FunctionTimeoutError when time limit is reached.
"""
def __init__(self, function, args, name=None):
"""Initializer.
Args:
function: Callable, function to run.
args: Tuple of function arguments.
name: String, the name of the function. Default None, function.__name__
will be used.
"""
self.function = function
self.args = args
self.answer = None
if name is None:
self.name = function.__name__
else:
self.name = name
def _worker(self):
self.answer = self.function(*self.args)
def run(self, time_limit_seconds):
"""Runs function.
Args:
time_limit_seconds: Float, timeout limit in seconds.
Returns:
output of function.
Raises:
FunctionTimeoutError: If output of the answer is None.
"""
thread = threading.Thread(target=self._worker)
thread.start()
thread.join(time_limit_seconds)
if self.answer is None:
raise FunctionTimeoutError(
'%s timeout after %f.' % (self.name, time_limit_seconds))
return self.answer
| 28.272727
| 108
| 0.710611
|
4a03cde6961c687b3d56f052b895483fb951f81f
| 805
|
py
|
Python
|
GreedyBestFirstSearch.py
|
azaharyan/DeepCube
|
d7b7182b6e140150630e058b169b3cf8713f7564
|
[
"MIT"
] | 1
|
2022-02-27T13:50:32.000Z
|
2022-02-27T13:50:32.000Z
|
GreedyBestFirstSearch.py
|
azaharyan/DeepCube
|
d7b7182b6e140150630e058b169b3cf8713f7564
|
[
"MIT"
] | null | null | null |
GreedyBestFirstSearch.py
|
azaharyan/DeepCube
|
d7b7182b6e140150630e058b169b3cf8713f7564
|
[
"MIT"
] | null | null | null |
import time
import numpy as np
from cubeEnv import CubeEnv
class GreedyBestFirstSearch:
def __init__(self, model, max_time_in_seconds):
self.model = model
self.max_time_in_seconds = max_time_in_seconds
def gbfs(self, state):
if state.is_solved():
return []
state = CubeEnv(cube=state.cube.copy())
path = []
start_time = time.time()
while start_time + self.max_time_in_seconds > time.time():
_, probabilities = self.model.predict(np.array(state.get_one_hot_state()).flatten()[None, :])
best_action_index = probabilities[0].argmax()
state.step(best_action_index)
path.append(best_action_index)
if state.is_solved():
return path
return None
| 27.758621
| 105
| 0.62236
|
4a03d1a08a967aa09bc8e011830611abc333dca0
| 2,583
|
py
|
Python
|
mittab/libs/tests/tab_logic/test_outround_pairing.py
|
DanielS6/mit-tab
|
f2b5bb609546514582697b998b8b50a66bc8a396
|
[
"MIT"
] | 9
|
2015-01-22T01:19:15.000Z
|
2017-11-01T20:09:47.000Z
|
mittab/libs/tests/tab_logic/test_outround_pairing.py
|
DanielS6/mit-tab
|
f2b5bb609546514582697b998b8b50a66bc8a396
|
[
"MIT"
] | 152
|
2018-04-06T14:32:51.000Z
|
2022-02-11T22:12:53.000Z
|
mittab/libs/tests/tab_logic/test_outround_pairing.py
|
DanielS6/mit-tab
|
f2b5bb609546514582697b998b8b50a66bc8a396
|
[
"MIT"
] | 13
|
2015-09-14T00:40:06.000Z
|
2018-01-24T04:05:32.000Z
|
import random
from django.test import TestCase
import pytest
from mittab.apps.tab.models import *
from mittab.libs import outround_tab_logic
@pytest.mark.django_db
class TestOutroundPairingLogic(TestCase):
fixtures = ["testing_finished_db"]
pytestmark = pytest.mark.django_db
def generate_checkins(self):
for r in Room.objects.all():
RoomCheckIn.objects.create(room=r,
round_number=0)
def test_break(self):
self.generate_checkins()
outround_tab_logic.perform_the_break()
def test_pairing(self):
self.generate_checkins()
outround_tab_logic.perform_the_break()
outround_tab_logic.pair(BreakingTeam.NOVICE)
outround_tab_logic.pair(BreakingTeam.VARSITY)
def enter_results(self, type_of_round):
outrounds = Outround.objects.filter(type_of_round=type_of_round).all()
for outround in outrounds:
if not outround.victor:
outround.victor = random.randint(1, 2)
outround.save()
def confirm_pairing(self, outrounds, num_teams):
for outround in outrounds:
assert (outround.gov_team.breaking_team.effective_seed + outround.opp_team.breaking_team.effective_seed) == (num_teams + 1)
def test_all_outrounds(self):
self.generate_checkins()
outround_tab_logic.perform_the_break()
var_teams_to_break = TabSettings.get("var_teams_to_break", 8)
while var_teams_to_break > 2:
outround_tab_logic.pair(BreakingTeam.VARSITY)
outrounds = Outround.objects.filter(
type_of_round=BreakingTeam.VARSITY,
num_teams=var_teams_to_break
)
self.confirm_pairing(
outrounds, var_teams_to_break
)
self.enter_results(BreakingTeam.VARSITY)
var_teams_to_break /= 2
def test_partials(self):
self.generate_checkins()
TabSettings.set("var_teams_to_break", 7)
outround_tab_logic.perform_the_break()
var_teams_to_break = 8
while var_teams_to_break > 2:
outround_tab_logic.pair(BreakingTeam.VARSITY)
outrounds = Outround.objects.filter(
type_of_round=BreakingTeam.VARSITY,
num_teams=var_teams_to_break
)
self.confirm_pairing(
outrounds, var_teams_to_break
)
self.enter_results(BreakingTeam.VARSITY)
var_teams_to_break /= 2
| 28.076087
| 135
| 0.640341
|
4a03d1dec4637edcf9d2b0b81c66c4c009e39bf0
| 7,946
|
py
|
Python
|
graph4nlp/pytorch/modules/loss/general_loss.py
|
IBM/graph4nlp
|
a9bf20b23fa1ec368d9bd40cc8c557f86a9f8297
|
[
"Apache-2.0"
] | 18
|
2020-09-09T03:33:29.000Z
|
2021-07-22T11:17:16.000Z
|
graph4nlp/pytorch/modules/loss/general_loss.py
|
IBM/graph4nlp
|
a9bf20b23fa1ec368d9bd40cc8c557f86a9f8297
|
[
"Apache-2.0"
] | null | null | null |
graph4nlp/pytorch/modules/loss/general_loss.py
|
IBM/graph4nlp
|
a9bf20b23fa1ec368d9bd40cc8c557f86a9f8297
|
[
"Apache-2.0"
] | 1
|
2021-02-19T19:46:03.000Z
|
2021-02-19T19:46:03.000Z
|
import torch.nn as nn
from .base import GeneralLossBase
class GeneralLoss(GeneralLossBase):
r"""
This general loss are backended on the pytorch loss function.
The detailed decription for each loss function can be found at:
`pytorch loss function <https://pytorch.org/docs/stable/nn.html#loss-functions>`
Parameters
----------
loss_type: str
the loss function to select (``NLL``,``BCEWithLogits``, ``MultiLabelMargin``,``SoftMargin``,``CrossEntropy`` )
`NLL loss<https://pytorch.org/docs/stable/_modules/torch/nn/modules/loss.html#NLLLoss>` measures
the negative log likelihood loss. It is useful to train a classification problem with C classes.
`BCEWithLogits loss<https://pytorch.org/docs/stable/_modules/torch/nn/modules/loss.html#BCEWithLogitsLoss>` combines a
`Sigmoid` layer and the `BCELoss` in one single class. This version is more numerically stable than using a plain `Sigmoid`
followed by a `BCELoss` as, by combining the operations into one layer, we take advantage of the log-sum-exp trick for numerical stability.
`BCE Loss<https://pytorch.org/docs/stable/_modules/torch/nn/modules/loss.html#BCELoss>` creates a criterion that measures
the Binary Cross Entropy between the target and the output.
`MultiLabelMargin loss<https://pytorch.org/docs/stable/_modules/torch/nn/modules/loss.html#MultiLabelMarginLoss>` creates a
criterion that optimizes a multi-class multi-classification hinge loss (margin-based loss) between
input :math:`x` (a 2D mini-batch `Tensor`) and output :math:`y` (which is a 2D `Tensor` of target class indices).
`SoftMargin loss<https://pytorch.org/docs/stable/_modules/torch/nn/modules/loss.html#SoftMarginLoss>` creates a criterion
that optimizes a two-class classification logistic loss between input tensor :math:`x` and target tensor :math:`y` (containing 1 or -1).
`CrossEntropy loss<https://pytorch.org/docs/stable/_modules/torch/nn/modules/loss.html#CrossEntropyLoss> `combines
pytorch function `nn.LogSoftmax` and `nn.NLLLoss` in one single class. It is useful when training a classification
problem with `C` classes.
weight: Tensor, optional
a manual rescaling weight given to the loss of each batch element. If given, has to be a Tensor of size `nbatch`.
This parameter is not suitable for ``SoftMargin`` loss functions.
size_average: bool, optional
By default,the losses are averaged over each loss element in the batch. Note that for some losses, there are
multiple elements per sample. If the field :attr:`size_average` is set to ``False``, the losses are instead summed
for each minibatch. Ignored when reduce is ``False``. Default: ``True``.
reduce: bool, optional
By default, the losses are averaged or summed over observations for each minibatch depending on :attr:`size_average`.
When :attr:`reduce` is ``False``, returns a loss per batch element instead and ignores :attr:`size_average`. Default: ``True``
reduction: string, optional
Specifies the reduction to apply to the output: ``'none'`` | ``'mean'`` | ``'sum'``. ``'none'``: no reduction will be applied,
``'mean'``: the sum of the output will be divided by the number of elements in the output, ``'sum'``: the output will be summed.
Note: :attr:`size_average` and :attr:`reduce` are in the process of being deprecated, and in the meantime, specifying either of
those two args will override :attr:`reduction`. Default: ``'mean'``
pos_weight:Tensor, optional
A weight of positive examples. Must be a vector with length equal to the number of classes. This paramter is only suitable for
``BCEWithLogits`` loss function.
ignore_index: int, optional
Specifies a target value that is ignored and does not contribute to the input gradient. When :attr:`size_average` is
``True``, the loss is averaged over non-ignored targets. This paramter is only suitable for ``CrossEntropy`` loss function.
"""
def __init__(self, loss_type, weight=None, size_average=None, ignore_index=-100,
reduce=None, reduction='mean', pos_weight=None):
super(GeneralLoss, self).__init__()
if loss_type=='NLL':
self.loss_function=nn.NLLLoss(weight, size_average, ignore_index, reduce, reduction)
if loss_type=='BCEWithLogits':
self.loss_function=nn.BCEWithLogitsLoss(weight, size_average, reduce, reduction,pos_weight)
if loss_type=='MultiLabelMargin':
self.loss_function=nn.MultiLabelMarginLoss(size_average, reduce, reduction)
if loss_type=='SoftMargin':
self.loss_function=nn.SoftMarginLoss(size_average, reduce, reduction)
if loss_type=='CrossEntropy':
self.loss_function=nn.CrossEntropyLoss(weight, size_average, ignore_index, reduce, reduction)
if loss_type=='BCE':
self.loss_function=nn.CrossEntropyLoss(weight, size_average, reduce, reduction)
def forward(self, input, target):
r"""
Compute the loss.
Parameters
----------
NLL loss:
Input: tensor.
:math:`(N, C)` where `C = number of classes`, or :math:`(N, C, d_1, d_2, ..., d_K)` with :math:`K \geq 1`
in the case of `K`-dimensional loss.
Target: tensor.
:math:`(N)` where each value is :math:`0 \leq \text{targets}[i] \leq C-1`, or :math:`(N, d_1, d_2, ..., d_K)`
with :math:`K \geq 1` in the case of K-dimensional loss.
Output: scalar.
If :attr:`reduction` is ``'none'``, then the same size as the target: :math:`(N)`, or :math:`(N, d_1, d_2, ..., d_K)`
with :math:`K \geq 1` in the case of K-dimensional loss.
BCE/BCEWithLogits loss:
Input: Tensor.
:math:`(N, *)` where :math:`*` means, any number of additional dimensions
Target: Tensor.
:math:`(N, *)`, same shape as the input
Output: scalar.
If :attr:`reduction` is ``'none'``, then :math:`(N, *)`, same shape as input.
MultiLabelMargin loss:
Input: Tensor.
:math:`(C)` or :math:`(N, C)` where `N` is the batch size and `C` is the number of classes.
Target: Tensor.
:math:`(C)` or :math:`(N, C)`, label targets padded by -1 ensuring same shape as the input.
Output: Scalar.
If :attr:`reduction` is ``'none'``, then :math:`(N)`.
SoftMargin loss:
Input: Tensor.
:math:`(*)` where :math:`*` means, any number of additional dimensions
Target: Tensor.
:math:`(*)`, same shape as the input
Output: scalar.
If :attr:`reduction` is ``'none'``, then same shape as the input
CrossEntropy:
Input: Tensor.
:math:`(N, C)` where `C = number of classes`, or :math:`(N, C, d_1, d_2, ..., d_K)` with :math:`K \geq 1`
in the case of `K`-dimensional loss.
Target: Tensor.
:math:`(N)` where each value is :math:`0 \leq \text{targets}[i] \leq C-1`, or :math:`(N, d_1, d_2, ..., d_K)`
with :math:`K \geq 1` in the case of K-dimensional loss.
Output: scalar.
If :attr:`reduction` is ``'none'``, then the same size as the target: :math:`(N)`, or :math:`(N, d_1, d_2, ..., d_K)`
with :math:`K \geq 1` in the case of K-dimensional loss.
"""
return self.loss_function(input, target)
| 55.566434
| 149
| 0.619683
|
4a03d26d4dfb23895b259d408439cd969920d8ae
| 129
|
py
|
Python
|
zlogo/__init__.py
|
zjykzj/zlogo
|
11c8d383c8580716cd3d32284f75510fd98e1206
|
[
"Apache-2.0"
] | 1
|
2021-01-28T08:06:57.000Z
|
2021-01-28T08:06:57.000Z
|
zlogo/__init__.py
|
zjykzj/zlogo
|
11c8d383c8580716cd3d32284f75510fd98e1206
|
[
"Apache-2.0"
] | null | null | null |
zlogo/__init__.py
|
zjykzj/zlogo
|
11c8d383c8580716cd3d32284f75510fd98e1206
|
[
"Apache-2.0"
] | 3
|
2020-10-12T06:25:20.000Z
|
2021-11-16T12:51:28.000Z
|
# -*- coding: utf-8 -*-
"""
@date: 2020/8/12 下午9:03
@file: __init__.py.py
@author: zj
@description:
"""
__version__ = '0.1.0'
| 11.727273
| 23
| 0.581395
|
4a03d346823e7135766ae2e6dc2b5671a24d1a78
| 20,941
|
py
|
Python
|
python_modules/dagster/dagster/__init__.py
|
schrockn/dagster
|
3bb4e9247a693e48d84e9c86f73d83633d91a5c7
|
[
"Apache-2.0"
] | null | null | null |
python_modules/dagster/dagster/__init__.py
|
schrockn/dagster
|
3bb4e9247a693e48d84e9c86f73d83633d91a5c7
|
[
"Apache-2.0"
] | null | null | null |
python_modules/dagster/dagster/__init__.py
|
schrockn/dagster
|
3bb4e9247a693e48d84e9c86f73d83633d91a5c7
|
[
"Apache-2.0"
] | null | null | null |
import importlib
import sys
import typing
from pep562 import pep562
import dagster._module_alias_map as _module_alias_map
# Imports of a key will return the module named by the corresponding value.
sys.meta_path.insert(
_module_alias_map.get_meta_path_insertion_index(),
_module_alias_map.AliasedModuleFinder(
{
"dagster.check": "dagster._check",
}
),
)
from dagster.builtins import Any, Bool, Float, Int, Nothing, String
from dagster.config import Enum, EnumValue, Field, Map, Permissive, Selector, Shape
from dagster.config.config_schema import ConfigSchema
from dagster.config.config_type import Array, Noneable, ScalarUnion
from dagster.core.asset_defs import (
AssetIn,
AssetOut,
AssetSelection,
AssetsDefinition,
SourceAsset,
asset,
build_assets_job,
load_assets_from_current_module,
load_assets_from_modules,
load_assets_from_package_module,
load_assets_from_package_name,
materialize,
materialize_to_memory,
multi_asset,
)
from dagster.core.definitions import (
AssetKey,
AssetMaterialization,
AssetObservation,
AssetSensorDefinition,
BoolMetadataValue,
CompositeSolidDefinition,
ConfigMapping,
DagsterAssetMetadataValue,
DagsterPipelineRunMetadataValue,
DailyPartitionsDefinition,
DefaultScheduleStatus,
DefaultSensorStatus,
DependencyDefinition,
DynamicOut,
DynamicOutput,
DynamicOutputDefinition,
DynamicPartitionsDefinition,
ExecutorDefinition,
ExecutorRequirement,
ExpectationResult,
Failure,
FloatMetadataValue,
GraphDefinition,
GraphIn,
GraphOut,
HookDefinition,
HourlyPartitionsDefinition,
In,
InputDefinition,
InputMapping,
IntMetadataValue,
JobDefinition,
JsonMetadataValue,
LoggerDefinition,
MarkdownMetadataValue,
Materialization,
MetadataEntry,
MetadataValue,
ModeDefinition,
MonthlyPartitionsDefinition,
MultiDependencyDefinition,
NodeInvocation,
OpDefinition,
Out,
Output,
OutputDefinition,
OutputMapping,
Partition,
PartitionScheduleDefinition,
PartitionSetDefinition,
PartitionedConfig,
PartitionsDefinition,
PathMetadataValue,
PipelineDefinition,
PipelineFailureSensorContext,
PresetDefinition,
PythonArtifactMetadataValue,
RepositoryData,
RepositoryDefinition,
ResourceDefinition,
RetryRequested,
RunFailureSensorContext,
RunRequest,
RunStatusSensorContext,
RunStatusSensorDefinition,
ScheduleDefinition,
ScheduleEvaluationContext,
ScheduleExecutionContext,
SensorDefinition,
SensorEvaluationContext,
SensorExecutionContext,
SkipReason,
SolidDefinition,
SolidInvocation,
StaticPartitionsDefinition,
TableColumn,
TableColumnConstraints,
TableConstraints,
TableMetadataValue,
TableRecord,
TableSchema,
TableSchemaMetadataValue,
TextMetadataValue,
TimeWindowPartitionsDefinition,
TypeCheck,
UrlMetadataValue,
WeeklyPartitionsDefinition,
asset_sensor,
build_init_logger_context,
build_reconstructable_job,
build_schedule_from_partitioned_job,
composite_solid,
config_mapping,
daily_partitioned_config,
daily_schedule,
default_executors,
dynamic_partitioned_config,
executor,
failure_hook,
graph,
hourly_partitioned_config,
hourly_schedule,
in_process_executor,
job,
lambda_solid,
logger,
make_values_resource,
monthly_partitioned_config,
monthly_schedule,
multiple_process_executor_requirements,
multiprocess_executor,
op,
pipeline,
pipeline_failure_sensor,
reconstructable,
repository,
resource,
run_failure_sensor,
run_status_sensor,
schedule,
schedule_from_partitions,
sensor,
solid,
static_partitioned_config,
success_hook,
weekly_partitioned_config,
weekly_schedule,
)
from dagster.core.definitions.configurable import configured
from dagster.core.definitions.policy import Backoff, Jitter, RetryPolicy
from dagster.core.definitions.run_status_sensor_definition import build_run_status_sensor_context
from dagster.core.definitions.schedule_definition import build_schedule_context
from dagster.core.definitions.sensor_definition import build_sensor_context
from dagster.core.definitions.step_launcher import StepLauncher
from dagster.core.definitions.unresolved_asset_job_definition import define_asset_job
from dagster.core.definitions.utils import (
config_from_files,
config_from_pkg_resources,
config_from_yaml_strings,
)
from dagster.core.definitions.version_strategy import SourceHashVersionStrategy, VersionStrategy
from dagster.core.errors import (
DagsterConfigMappingFunctionError,
DagsterError,
DagsterEventLogInvalidForRun,
DagsterExecutionInterruptedError,
DagsterExecutionStepExecutionError,
DagsterExecutionStepNotFoundError,
DagsterInvalidConfigDefinitionError,
DagsterInvalidConfigError,
DagsterInvalidDefinitionError,
DagsterInvariantViolationError,
DagsterResourceFunctionError,
DagsterRunNotFoundError,
DagsterStepOutputNotFoundError,
DagsterSubprocessError,
DagsterTypeCheckDidNotPass,
DagsterTypeCheckError,
DagsterUnknownPartitionError,
DagsterUnknownResourceError,
DagsterUnmetExecutorRequirementsError,
DagsterUserCodeExecutionError,
raise_execution_interrupts,
)
from dagster.core.events import DagsterEvent, DagsterEventType
from dagster.core.execution.api import (
execute_pipeline,
execute_pipeline_iterator,
reexecute_pipeline,
reexecute_pipeline_iterator,
)
from dagster.core.execution.build_resources import build_resources
from dagster.core.execution.context.compute import OpExecutionContext, SolidExecutionContext
from dagster.core.execution.context.hook import HookContext, build_hook_context
from dagster.core.execution.context.init import InitResourceContext, build_init_resource_context
from dagster.core.execution.context.input import InputContext, build_input_context
from dagster.core.execution.context.invocation import build_op_context, build_solid_context
from dagster.core.execution.context.logger import InitLoggerContext
from dagster.core.execution.context.output import OutputContext, build_output_context
from dagster.core.execution.context.system import TypeCheckContext
from dagster.core.execution.execute_in_process_result import ExecuteInProcessResult
from dagster.core.execution.plan.external_step import (
external_instance_from_step_run_ref,
run_step_from_ref,
step_context_to_step_run_ref,
step_run_ref_to_step_context,
)
from dagster.core.execution.results import (
CompositeSolidExecutionResult,
PipelineExecutionResult,
SolidExecutionResult,
)
from dagster.core.execution.validate_run_config import validate_run_config
from dagster.core.execution.with_resources import with_resources
from dagster.core.executor.base import Executor
from dagster.core.executor.init import InitExecutorContext
from dagster.core.instance import DagsterInstance
from dagster.core.launcher import DefaultRunLauncher
from dagster.core.log_manager import DagsterLogManager
from dagster.core.storage.event_log import (
EventLogEntry,
EventLogRecord,
EventRecordsFilter,
RunShardedEventsCursor,
)
from dagster.core.storage.file_manager import FileHandle, LocalFileHandle, local_file_manager
from dagster.core.storage.fs_io_manager import custom_path_fs_io_manager, fs_io_manager
from dagster.core.storage.io_manager import IOManager, IOManagerDefinition, io_manager
from dagster.core.storage.mem_io_manager import mem_io_manager
from dagster.core.storage.memoizable_io_manager import MemoizableIOManager
from dagster.core.storage.pipeline_run import (
DagsterRun,
DagsterRunStatus,
PipelineRun,
PipelineRunStatus,
)
from dagster.core.storage.root_input_manager import (
RootInputManager,
RootInputManagerDefinition,
root_input_manager,
)
from dagster.core.storage.tags import MEMOIZED_RUN_TAG
from dagster.core.types.config_schema import (
DagsterTypeLoader,
DagsterTypeMaterializer,
dagster_type_loader,
dagster_type_materializer,
)
from dagster.core.types.dagster_type import DagsterType, List, Optional, PythonObjectDagsterType
from dagster.core.types.decorator import (
make_python_type_usable_as_dagster_type,
usable_as_dagster_type,
)
from dagster.core.types.python_dict import Dict
from dagster.core.types.python_set import Set
from dagster.core.types.python_tuple import Tuple
from dagster.serdes import deserialize_value, serialize_value
from dagster.utils import file_relative_path
from dagster.utils.alert import make_email_on_run_failure_sensor
from dagster.utils.backcompat import ExperimentalWarning, deprecation_warning, rename_warning
from dagster.utils.log import get_dagster_logger
from dagster.utils.partitions import (
create_offset_partition_selector,
date_partition_range,
identity_partition_selector,
)
from dagster.utils.test import (
check_dagster_type,
execute_solid,
execute_solid_within_pipeline,
execute_solids_within_pipeline,
)
from .version import __version__
from dagster.config.source import BoolSource, StringSource, IntSource # isort:skip
# ########################
# ##### DEPRECATED ALIASES
# ########################
# NOTE: Unfortunately we have to declare deprecated aliases twice-- the
# TYPE_CHECKING declaration satisfies linters and type checkers, but the entry
# in `_DEPRECATED` is required for us to generate the deprecation warning.
if typing.TYPE_CHECKING:
from dagster.core.asset_defs import AssetGroup
# pylint:disable=reimported
from dagster.core.definitions import DagsterAssetMetadataValue as DagsterAssetMetadataEntryData
from dagster.core.definitions import (
DagsterPipelineRunMetadataValue as DagsterPipelineRunMetadataEntryData,
)
from dagster.core.definitions import FloatMetadataValue as FloatMetadataEntryData
from dagster.core.definitions import IntMetadataValue as IntMetadataEntryData
from dagster.core.definitions import JsonMetadataValue as JsonMetadataEntryData
from dagster.core.definitions import MarkdownMetadataValue as MarkdownMetadataEntryData
from dagster.core.definitions import MetadataEntry as EventMetadataEntry
from dagster.core.definitions import MetadataValue as EventMetadata
from dagster.core.definitions import PathMetadataValue as PathMetadataEntryData
from dagster.core.definitions import (
PythonArtifactMetadataValue as PythonArtifactMetadataEntryData,
)
from dagster.core.definitions import TableMetadataValue as TableMetadataEntryData
from dagster.core.definitions import TableSchemaMetadataValue as TableSchemaMetadataEntryData
from dagster.core.definitions import TextMetadataValue as TextMetadataEntryData
from dagster.core.definitions import UrlMetadataValue as UrlMetadataEntryData
# pylint:enable=reimported
_DEPRECATED = {
"AssetGroup": (
"dagster.core.asset_defs",
"0.16.0",
"Instead, place a set of assets wrapped with `with_resources` directly on a repository.",
),
}
_DEPRECATED_RENAMED = {
"EventMetadataEntry": (MetadataEntry, "0.16.0"),
"EventMetadata": (MetadataValue, "0.16.0"),
"TextMetadataEntryData": (TextMetadataValue, "0.16.0"),
"UrlMetadataEntryData": (UrlMetadataValue, "0.16.0"),
"PathMetadataEntryData": (PathMetadataValue, "0.16.0"),
"JsonMetadataEntryData": (JsonMetadataValue, "0.16.0"),
"MarkdownMetadataEntryData": (MarkdownMetadataValue, "0.16.0"),
"PythonArtifactMetadataEntryData": (
PythonArtifactMetadataValue,
"0.16.0",
),
"FloatMetadataEntryData": (FloatMetadataValue, "0.16.0"),
"IntMetadataEntryData": (IntMetadataValue, "0.16.0"),
"DagsterPipelineRunMetadataEntryData": (
DagsterPipelineRunMetadataValue,
"0.16.0",
),
"DagsterAssetMetadataEntryData": (
DagsterAssetMetadataValue,
"0.16.0",
),
"TableMetadataEntryData": (TableMetadataValue, "0.16.0"),
"TableSchemaMetadataEntryData": (
TableSchemaMetadataValue,
"0.16.0",
),
}
def __getattr__(name: str) -> typing.Any:
if name in _DEPRECATED:
module, breaking_version, additional_warn_text = _DEPRECATED[name]
value = getattr(importlib.import_module(module), name)
stacklevel = 3 if sys.version_info >= (3, 7) else 4
deprecation_warning(name, breaking_version, additional_warn_text, stacklevel=stacklevel)
return value
elif name in _DEPRECATED_RENAMED:
value, breaking_version = _DEPRECATED_RENAMED[name]
stacklevel = 3 if sys.version_info >= (3, 7) else 4
rename_warning(value.__name__, name, breaking_version, stacklevel=stacklevel)
return value
else:
raise AttributeError("module '{}' has no attribute '{}'".format(__name__, name))
def __dir__() -> typing.List[str]:
return sorted(list(__all__) + list(_DEPRECATED.keys()))
# Backports PEP 562, which allows for override of __getattr__ and __dir__, to this module. PEP 562
# was introduced in Python 3.7, so the `pep562` call here is a no-op for 3.7+.
# See:
# PEP 562: https://www.python.org/dev/peps/pep-0562/
# PEP 562 backport package: https://github.com/facelessuser/pep562
pep562(__name__)
__all__ = [
# Definition
"AssetGroup",
"AssetKey",
"AssetIn",
"AssetMaterialization",
"AssetObservation",
"AssetOut",
"AssetSelection",
"AssetSensorDefinition",
"AssetsDefinition",
"DagsterAssetMetadataValue",
"DagsterPipelineRunMetadataValue",
"TableColumn",
"TableColumnConstraints",
"TableConstraints",
"TableRecord",
"TableSchemaMetadataValue",
"TableSchema",
"CompositeSolidDefinition",
"ConfigMapping",
"DependencyDefinition",
"MetadataValue",
"MetadataEntry",
"ExecutorDefinition",
"ExecutorRequirement",
"ExpectationResult",
"Failure",
"Field",
"Map",
"GraphDefinition",
"GraphIn",
"GraphOut",
"HookDefinition",
"JobDefinition",
"In",
"InputDefinition",
"InputMapping",
"JsonMetadataValue",
"LoggerDefinition",
"build_init_logger_context",
"BoolMetadataValue",
"MarkdownMetadataValue",
"IntMetadataValue",
"FloatMetadataValue",
"Materialization",
"ModeDefinition",
"MultiDependencyDefinition",
"OpDefinition",
"Out",
"Output",
"OutputDefinition",
"OutputMapping",
"PathMetadataValue",
"PipelineDefinition",
"PresetDefinition",
"PythonArtifactMetadataValue",
"RepositoryData",
"RepositoryDefinition",
"ResourceDefinition",
"SolidDefinition",
"SourceAsset",
"NodeInvocation",
"SolidInvocation",
"TableMetadataValue",
"TextMetadataValue",
"UrlMetadataValue",
"make_values_resource",
"RetryPolicy",
"Backoff",
"Jitter",
"RunStatusSensorDefinition",
"DynamicOutput",
"DynamicOut",
"DynamicOutputDefinition",
# Decorators
"asset",
"asset_sensor",
"composite_solid",
"config_mapping",
"executor",
"graph",
"job",
"lambda_solid",
"logger",
"multi_asset",
"op",
"pipeline",
"repository",
"resource",
"schedule",
"sensor",
"solid",
"success_hook",
"failure_hook",
"run_failure_sensor",
"pipeline_failure_sensor",
"run_status_sensor",
# Execution
"CompositeSolidExecutionResult",
"DagsterEvent",
"DagsterEventType",
"DefaultRunLauncher",
"EventLogEntry",
"EventLogRecord",
"Executor",
"InitExecutorContext",
"InitLoggerContext",
"InitResourceContext",
"ExecuteInProcessResult",
"step_context_to_step_run_ref",
"external_instance_from_step_run_ref",
"step_run_ref_to_step_context",
"run_step_from_ref",
"build_init_resource_context",
"OpExecutionContext",
"PipelineExecutionResult",
"RetryRequested",
"with_resources",
"build_resources",
"SolidExecutionResult",
"SolidExecutionContext",
"build_solid_context",
"build_op_context",
"HookContext",
"build_hook_context",
"TypeCheckContext",
"InputContext",
"build_input_context",
"OutputContext",
"build_output_context",
"PipelineRun",
"DagsterRun",
"PipelineRunStatus",
"DagsterRunStatus",
"default_executors",
"execute_pipeline_iterator",
"execute_pipeline",
"validate_run_config",
"execute_solid_within_pipeline",
"in_process_executor",
"multiprocess_executor",
"multiple_process_executor_requirements",
"build_reconstructable_job",
"reconstructable",
"reexecute_pipeline_iterator",
"reexecute_pipeline",
# Errors
"DagsterConfigMappingFunctionError",
"DagsterError",
"DagsterEventLogInvalidForRun",
"DagsterExecutionInterruptedError",
"DagsterExecutionStepExecutionError",
"DagsterExecutionStepNotFoundError",
"DagsterInvalidConfigDefinitionError",
"DagsterInvalidConfigError",
"DagsterInvalidDefinitionError",
"DagsterInvariantViolationError",
"DagsterResourceFunctionError",
"DagsterRunNotFoundError",
"DagsterStepOutputNotFoundError",
"DagsterSubprocessError",
"DagsterTypeCheckDidNotPass",
"DagsterTypeCheckError",
"DagsterUnknownPartitionError",
"DagsterUnknownResourceError",
"DagsterUnmetExecutorRequirementsError",
"DagsterUserCodeExecutionError",
"raise_execution_interrupts",
# Logging
"DagsterLogManager",
"get_dagster_logger",
# Utilities
"check_dagster_type",
"execute_solid",
"execute_solids_within_pipeline",
"file_relative_path",
"config_from_files",
"config_from_pkg_resources",
"config_from_yaml_strings",
"configured",
"build_assets_job",
"load_assets_from_modules",
"load_assets_from_current_module",
"load_assets_from_package_module",
"load_assets_from_package_name",
"materialize",
"materialize_to_memory",
# types
"Any",
"Bool",
"Dict",
"Enum",
"EnumValue",
"Float",
"Int",
"List",
"Nothing",
"Optional",
"Set",
"String",
"Tuple",
"TypeCheck",
"dagster_type_loader",
"DagsterTypeLoader",
"dagster_type_materializer",
"DagsterTypeMaterializer",
# type creation
"DagsterType",
"PythonObjectDagsterType",
"make_python_type_usable_as_dagster_type",
"usable_as_dagster_type",
# config
"Array",
"BoolSource",
"ConfigSchema",
"Noneable",
"Permissive",
"ScalarUnion",
"StringSource",
"IntSource",
"Selector",
"Shape",
# file things
"FileHandle",
"LocalFileHandle",
"local_file_manager",
# instance
"DagsterInstance",
# storage
"EventRecordsFilter",
"RunShardedEventsCursor",
"serialize_value",
"deserialize_value",
# partitions and schedules
"build_schedule_from_partitioned_job",
"schedule_from_partitions",
"dynamic_partitioned_config",
"static_partitioned_config",
"daily_partitioned_config",
"hourly_partitioned_config",
"monthly_partitioned_config",
"weekly_partitioned_config",
"DynamicPartitionsDefinition",
"StaticPartitionsDefinition",
"DailyPartitionsDefinition",
"HourlyPartitionsDefinition",
"MonthlyPartitionsDefinition",
"TimeWindowPartitionsDefinition",
"WeeklyPartitionsDefinition",
"Partition",
"PartitionedConfig",
"PartitionsDefinition",
"PartitionScheduleDefinition",
"PartitionSetDefinition",
"RunRequest",
"ScheduleDefinition",
"ScheduleEvaluationContext",
"ScheduleExecutionContext",
"DefaultScheduleStatus",
"build_schedule_context",
"SensorDefinition",
"SensorEvaluationContext",
"SensorExecutionContext",
"DefaultSensorStatus",
"RunFailureSensorContext",
"PipelineFailureSensorContext",
"RunStatusSensorContext",
"build_sensor_context",
"build_run_status_sensor_context",
"StepLauncher",
"SkipReason",
"daily_schedule",
"hourly_schedule",
"monthly_schedule",
"weekly_schedule",
"create_offset_partition_selector",
"date_partition_range",
"identity_partition_selector",
"make_email_on_run_failure_sensor",
# IO managers
"IOManager",
"IOManagerDefinition",
"io_manager",
"RootInputManager",
"RootInputManagerDefinition",
"root_input_manager",
"fs_io_manager",
"mem_io_manager",
"custom_path_fs_io_manager",
# warnings
"ExperimentalWarning",
# Versioning / Memoization
"VersionStrategy",
"MEMOIZED_RUN_TAG",
"MemoizableIOManager",
"SourceHashVersionStrategy",
]
| 30.261561
| 99
| 0.745237
|
4a03d404ad22b5e9d2be70d83d6c238e80cb2a03
| 345
|
py
|
Python
|
app_LPB/migrations/0016_remove_tarefa_concluida.py
|
reglabel/ConquerTime
|
fe91c8eb02424246681814b7d5792c689e5787e0
|
[
"MIT"
] | null | null | null |
app_LPB/migrations/0016_remove_tarefa_concluida.py
|
reglabel/ConquerTime
|
fe91c8eb02424246681814b7d5792c689e5787e0
|
[
"MIT"
] | null | null | null |
app_LPB/migrations/0016_remove_tarefa_concluida.py
|
reglabel/ConquerTime
|
fe91c8eb02424246681814b7d5792c689e5787e0
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.2.9 on 2021-11-07 19:26
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('app_LPB', '0015_alter_tarefa_data_e_hora_criacao'),
]
operations = [
migrations.RemoveField(
model_name='tarefa',
name='concluida',
),
]
| 19.166667
| 61
| 0.611594
|
4a03d4065816cfb7f1549adb285b28e158533fd6
| 8,733
|
py
|
Python
|
kairon/shared/actions/utils.py
|
Shashank411/kairon
|
8a3a083136d8cf89359021e49a7610509772ca9b
|
[
"Apache-2.0"
] | null | null | null |
kairon/shared/actions/utils.py
|
Shashank411/kairon
|
8a3a083136d8cf89359021e49a7610509772ca9b
|
[
"Apache-2.0"
] | null | null | null |
kairon/shared/actions/utils.py
|
Shashank411/kairon
|
8a3a083136d8cf89359021e49a7610509772ca9b
|
[
"Apache-2.0"
] | null | null | null |
import json
import logging
import os
from pathlib import Path
from typing import Any, List
import requests
from loguru import logger
from mongoengine import DoesNotExist, connect
from rasa_sdk import Tracker
from smart_config import ConfigLoader
from .data_objects import HttpActionConfig, HttpActionRequestBody
from .exception import HttpActionFailure
from .models import ParameterType
from urllib.parse import urlencode, quote_plus
class ActionUtility:
"""
Utility class to assist executing actions
"""
@staticmethod
def execute_http_request(http_url: str, request_method: str, request_body=None, auth_token=None):
"""Executes http urls provided.
:param http_url: HTTP url to be executed
:param request_method: One of GET, PUT, POST, DELETE
:param request_body: Request body to be sent with the request
:param auth_token: auth token to be sent with request in case of token based authentication
:return: JSON/string response
"""
header = {"User-Agent": "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)"}
response = ""
if request_body is None:
request_body = {}
if not ActionUtility.is_empty(auth_token):
header = {'Authorization': auth_token}
try:
if request_method.lower() == 'get':
if request_body:
http_url = http_url + "?" + urlencode(request_body, quote_via=quote_plus)
response = requests.get(http_url, headers=header)
elif request_method.lower() in ['post', 'put', 'delete']:
response = requests.request(request_method.upper(), http_url, json=request_body, headers=header)
else:
raise HttpActionFailure("Invalid request method!")
logger.debug("raw response: " + str(response.text))
logger.debug("status " + str(response.status_code))
if response.status_code not in [200, 202, 201, 204]:
raise HttpActionFailure("Got non-200 status code")
except Exception as e:
logger.error(str(e))
raise HttpActionFailure("Failed to execute the url: " + str(e))
try:
http_response_as_json = response.json()
except ValueError as e:
logging.error(str(e))
http_response_as_json = response.text
return http_response_as_json, http_url
@staticmethod
def prepare_request(tracker: Tracker, http_action_config_params: List[HttpActionRequestBody]):
"""
Prepares request body:
1. Fetches value of parameter from slot(Tracker) if parameter_type is slot and adds to request body
2. Adds value of parameter directly if parameter_type is value
:param tracker: Tracker for the Http Action
:param http_action_config_params: User defined request body parameters <key, value, parameter_type>
:return: Request body for the HTTP request
"""
request_body = {}
if not http_action_config_params:
return request_body
for param in http_action_config_params:
if param['parameter_type'] == ParameterType.sender_id:
value = tracker.sender_id
elif param['parameter_type'] == ParameterType.slot:
value = tracker.get_slot(param['value'])
else:
value = param['value']
request_body[param['key']] = value
logger.debug("value for key " + param['key'] + ": " + str(value))
return request_body
@staticmethod
def is_empty(value: str):
"""
checks for null or empty string
:param value: string value
:return: boolean
"""
if not value:
return True
return bool(not value.strip())
@staticmethod
def connect_db():
"""
Creates connection to database.
:return: MongoDB connection URL
"""
system_yml_parent_dir = str(Path(os.path.realpath(__file__)).parent)
environment = ConfigLoader(os.getenv("system_file", system_yml_parent_dir + "/system.yaml")).get_config()
connect(host=environment['database']["url"])
@staticmethod
def get_http_action_config(bot: str, action_name: str):
"""
Fetch HTTP action configuration parameters from the MongoDB database
:param db_url: MongoDB connection string
:param bot: BotID
:param action_name: Action name
:return: HttpActionConfig object containing configuration for the action
"""
if ActionUtility.is_empty(bot) or ActionUtility.is_empty(action_name):
raise HttpActionFailure("Bot name and action name are required")
try:
http_config_dict = HttpActionConfig.objects().get(bot=bot,
action_name=action_name, status=True).to_mongo().to_dict()
logger.debug("http_action_config: " + str(http_config_dict))
except DoesNotExist as e:
logger.exception(e)
raise HttpActionFailure("No HTTP action found for bot")
return http_config_dict
@staticmethod
def retrieve_value_from_response(grouped_keys: List[str], http_response: Any):
"""
Retrieves values for user defined placeholders
:param grouped_keys: List of user defined keys
:param http_response: Response received from executing Http URL
:return: A dictionary of user defined placeholder and value from json
"""
value_mapping = {}
try:
for punctuation_separated_key in grouped_keys:
keys = punctuation_separated_key.split(".")
json_search_region = http_response
for key in keys:
if isinstance(json_search_region, dict):
json_search_region = json_search_region[key]
else:
json_search_region = json_search_region[int(key)]
value_mapping['${' + punctuation_separated_key + '}'] = json_search_region
except Exception as e:
raise HttpActionFailure("Unable to retrieve value for key from HTTP response: " + str(e))
return value_mapping
@staticmethod
def attach_response(template, http_response):
"""
Substitutes ${RESPONSE} placeholder with the response received from executing Http URL.
:param template: A string with placeholders. It is basically the user expected output.
:param http_response: Response received after executing Http URL.
:return: Http response added to the user defined output string.
"""
parsed_output = template
if template.__contains__('${RESPONSE}'):
parsed_output = template.replace('${RESPONSE}', str(http_response))
return parsed_output
@staticmethod
def prepare_response(response_template: str, http_response: Any):
"""
Prepares the user defined response.
:param response_template: A string that may contain placeholders. It is basically the user expected output.
:param http_response: Response received after executing Http URL.
:return: Returns a response curated from user defined template and Http response.
"""
value_mapping = {}
parsed_output = ActionUtility.attach_response(response_template, http_response)
keys_with_placeholders = [term for term in parsed_output.split(" ") if term.startswith("${") and term.endswith("}")]
# deepcode ignore C1801: Length check required in case there are no placeholders
if keys_with_placeholders is None or len(keys_with_placeholders) == 0:
if ActionUtility.is_empty(response_template):
return http_response
return parsed_output
keys_without_placeholders = [plcehlder.lstrip("${").rstrip("}") for plcehlder in keys_with_placeholders]
if type(http_response) not in [dict, list]:
if keys_with_placeholders is not None:
raise HttpActionFailure("Could not find value for keys in response")
value_mapping = ActionUtility.retrieve_value_from_response(keys_without_placeholders, http_response)
for key in value_mapping:
value_for_placeholder = value_mapping[key]
if isinstance(value_for_placeholder, dict):
parsed_output = parsed_output.replace(key, json.dumps(value_for_placeholder))
else:
parsed_output = parsed_output.replace(key, str(value_mapping[key]))
return parsed_output
| 42.393204
| 124
| 0.648002
|
4a03d55a2c0881eb93e06caff7b88055dbb03957
| 550
|
py
|
Python
|
apps/accounts/search_indexes.py
|
dstl/lighthouse
|
b810742d9f4cbfac02bf99096542499d25c88b58
|
[
"MIT"
] | 5
|
2016-05-12T13:47:38.000Z
|
2020-06-22T07:33:35.000Z
|
apps/accounts/search_indexes.py
|
dstl/lighthouse
|
b810742d9f4cbfac02bf99096542499d25c88b58
|
[
"MIT"
] | 7
|
2016-10-24T12:41:09.000Z
|
2016-12-08T21:58:18.000Z
|
apps/accounts/search_indexes.py
|
dstl/lighthouse
|
b810742d9f4cbfac02bf99096542499d25c88b58
|
[
"MIT"
] | 4
|
2016-05-12T21:53:21.000Z
|
2021-04-10T22:02:26.000Z
|
# (c) Crown Owned Copyright, 2016. Dstl.
from haystack import indexes
from .models import User
class UserIndex(indexes.SearchIndex, indexes.Indexable):
name = indexes.CharField(model_attr='name')
slug = indexes.CharField(model_attr='slug')
full_name = indexes.CharField(model_attr='full_name')
text = indexes.CharField(
document=True,
use_template=True)
def get_model(self):
return User
def prepare_categories(self, user):
return [categories.name for categories in user.categories.all()]
| 26.190476
| 72
| 0.705455
|
4a03d5bce16e5809cd0ec72f015889133f0179cc
| 713
|
py
|
Python
|
Books/GodOfPython/P12_File/direct/num4.py
|
Tim232/Python-Things
|
05f0f373a4cf298e70d9668c88a6e3a9d1cd8146
|
[
"MIT"
] | 2
|
2020-12-05T07:42:55.000Z
|
2021-01-06T23:23:18.000Z
|
Books/GodOfPython/P12_File/direct/num4.py
|
Tim232/Python-Things
|
05f0f373a4cf298e70d9668c88a6e3a9d1cd8146
|
[
"MIT"
] | null | null | null |
Books/GodOfPython/P12_File/direct/num4.py
|
Tim232/Python-Things
|
05f0f373a4cf298e70d9668c88a6e3a9d1cd8146
|
[
"MIT"
] | null | null | null |
import pickle
class Member():
def __init__(self, name, age, gender):
self.__name = name
self.__age = age
self.__gender = gender
@property
def name(self):
return self.__name
@property
def age(self):
return self.__age
@property
def gender(self):
return self.__gender
mem1 = Member('길용현', 30, '남')
mem2 = Member('홍길동', 24, '남')
f = open('D:/02.Python/ch12/direct/num4.txt', 'wb')
pickle.dump(mem1, f) #순서대로 저장
pickle.dump(mem2, f)
f.close()
f = open('D:/02.Python/ch12/direct/num4.txt', 'rb')
mem1 = pickle.load(f) #순서대로 로드
mem2 = pickle.load(f)
print(mem1.name, mem1.age, mem1.gender)
print(mem2.name, mem2.age, mem2.gender)
| 21.606061
| 59
| 0.614306
|
4a03d5dea8afb81f624c1b25a49a9e2eb7f68726
| 1,193
|
py
|
Python
|
demo/confnp/urls.py
|
jeanqasaur/jeeves
|
1b5783a98f88fa2fc9e4cae7e005d2c9242cfea4
|
[
"MIT"
] | 253
|
2015-01-02T01:54:27.000Z
|
2022-03-10T01:44:02.000Z
|
demo/conf/urls.py
|
jeanqasaur/jeeves
|
1b5783a98f88fa2fc9e4cae7e005d2c9242cfea4
|
[
"MIT"
] | 4
|
2015-06-09T03:36:28.000Z
|
2017-08-11T15:54:24.000Z
|
demo/conf/urls.py
|
jeanqasaur/jeeves
|
1b5783a98f88fa2fc9e4cae7e005d2c9242cfea4
|
[
"MIT"
] | 29
|
2015-02-14T02:24:08.000Z
|
2021-12-16T02:46:16.000Z
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
from django.conf.urls.static import static
from django.conf import settings
from conf import views
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'conf.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^accounts/login/$', 'django.contrib.auth.views.login'),
url(r'^accounts/logout/$', 'django.contrib.auth.views.logout_then_login'),
url(r'^accounts/profile/$', views.profile_view),
url(r'^register$', views.register_account),
url(r'^index$', views.papers_view),
url(r'^$', views.papers_view),
url(r'^submit$', views.submit_view),
url(r'^papers$', views.papers_view),
url(r'^paper$', views.paper_view),
url(r'^submit_review$', views.submit_review_view),
url(r'^submit_comment$', views.submit_comment_view),
url(r'^assign_reviews$', views.assign_reviews_view),
url(r'^search$', views.search_view),
url(r'^about$', views.about_view),
url(r'^users$', views.users_view),
) + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| 33.138889
| 78
| 0.684828
|
4a03d61047f980dd3be73776e4c1d62515923204
| 3,654
|
py
|
Python
|
duolinguo-robot.py
|
ret0x7c00/assistant-bot
|
4fad0f214a095a97c5e27135f6dfc99b15c0cc00
|
[
"MIT"
] | null | null | null |
duolinguo-robot.py
|
ret0x7c00/assistant-bot
|
4fad0f214a095a97c5e27135f6dfc99b15c0cc00
|
[
"MIT"
] | null | null | null |
duolinguo-robot.py
|
ret0x7c00/assistant-bot
|
4fad0f214a095a97c5e27135f6dfc99b15c0cc00
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python3
import requests
import time
from random import randrange
import json
import os
from stopwatch import Stopwatch
from barknotify import send_notify
# auth token
AUTH_TOKEN = os.environ.get('DUOLINGO_AUTH_TOKEN')
TOTAL_CNT_MIN = os.environ.get('DUOLINGO_TOTAL_CNT_MIN') or 3
TOTAL_CNT_MAX = os.environ.get('DUOLINGO_TOTAL_CNT_MAX') or 15
SLEEP_TIME_MIN = os.environ.get('DUOLINGO_SLEEP_TIME_MIN') or 1
SLEEP_TIME_MAX = os.environ.get('DUOLINGO_SLEEP_TIME_MAX') or 6
def get_question():
# 获取题目
url = "https://www.duolingo.cn/2017-06-30/sessions"
payload = "{\n \"fromLanguage\": \"zh\",\n \"learningLanguage\": \"en\",\n \"challengeTypes\": [\n \"characterIntro\",\n \"characterMatch\",\n \"characterSelect\",\n \"completeReverseTranslation\",\n \"definition\",\n \"dialogue\",\n \"form\",\n \"freeResponse\",\n \"gapFill\",\n \"judge\",\n \"listen\",\n \"name\",\n \"listenComprehension\",\n \"listenTap\",\n \"readComprehension\",\n \"select\",\n \"selectPronunciation\",\n \"selectTranscription\",\n \"tapCloze\",\n \"tapComplete\",\n \"tapDescribe\",\n \"translate\"\n ],\n \"type\": \"GLOBAL_PRACTICE\",\n \"juicy\": false\n}"
headers = {
'Authorization': AUTH_TOKEN,
'Content-Type': 'application/json; charset=UTF-8',
'Accept': 'application/json',
'Referer': 'http://www.duolingo.cn/practice',
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3572.0 Safari/537.36',
'Connection': 'keep-alive'
}
response = requests.request("POST", url, headers=headers, data=payload)
respJson = response.json()
return respJson
def put_answer(respJson):
# 提交答案
url2 = "https://www.duolingo.cn/2017-06-30/sessions/"+respJson["id"]
now = int(time.time())
respJson["startTime"] = now-900
respJson["endTime"] = now
respJson["failed"] = False
respJson["max_in_lesson_streak"] = randrange(13, 20)
respJson["heartsLeft"] = 0
payload2 = json.dumps(respJson, separators=(
',', ':'), ensure_ascii=False).encode('utf8')
headers = {
'Authorization': AUTH_TOKEN,
'Content-Type': 'application/json; charset=UTF-8',
'Accept': 'application/json',
'Referer': 'http://www.duolingo.cn/practice',
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3572.0 Safari/537.36',
'Connection': 'keep-alive'
}
response2 = requests.request("PUT", url2, headers=headers, data=payload2)
print(response2)
total_cnt = randrange(TOTAL_CNT_MIN, TOTAL_CNT_MAX)
print("total count: %s" % (total_cnt))
send_notify("Duolinguo", "begin to handle duolingo task. total count: %s" % (total_cnt))
stopwatch = Stopwatch()
stopwatch.start()
for cnt in range(1, total_cnt):
sleep_time = randrange(SLEEP_TIME_MIN, SLEEP_TIME_MAX)*60
if AUTH_TOKEN is None:
print("please set DUOLINGO_AUTH_TOKEN")
send_notify("Duolinguo", "please set DUOLINGO_AUTH_TOKEN")
break
print("the %s times is waiting for %ss" % (cnt, sleep_time))
respJson = get_question()
# print(respJson)
# 延迟1~5分钟
time.sleep(sleep_time)
put_answer(respJson)
time.sleep(randrange(10, 60))
stopwatch.stop()
print("total time: %s" % str(stopwatch))
send_notify("Duolinguo", "duolingo task has finished. total count: %s, spent: %s" %
(total_cnt, str(stopwatch)))
| 39.290323
| 761
| 0.635742
|
4a03d626ab33f7ba0d48d0030b2db4634836ad43
| 417
|
py
|
Python
|
my_django_extensions/wsgi.py
|
aamishbaloch/my-django-extensions
|
446f9a8d33355177e9d09ef926adeb003f1bd6d1
|
[
"MIT"
] | null | null | null |
my_django_extensions/wsgi.py
|
aamishbaloch/my-django-extensions
|
446f9a8d33355177e9d09ef926adeb003f1bd6d1
|
[
"MIT"
] | null | null | null |
my_django_extensions/wsgi.py
|
aamishbaloch/my-django-extensions
|
446f9a8d33355177e9d09ef926adeb003f1bd6d1
|
[
"MIT"
] | null | null | null |
"""
WSGI config for my_django_extensions project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'my_django_extensions.settings')
application = get_wsgi_application()
| 24.529412
| 80
| 0.798561
|
4a03d7e820f5e8a0d58790c830ae9fa2457c53e9
| 11,236
|
py
|
Python
|
canopy/io/file.py
|
SomaLogic/Canopy
|
535111c40995731e941da8cbf484c2aa9cb9b444
|
[
"MIT"
] | 7
|
2020-10-30T17:41:16.000Z
|
2022-03-30T06:18:26.000Z
|
canopy/io/file.py
|
SomaLogic/Canopy
|
535111c40995731e941da8cbf484c2aa9cb9b444
|
[
"MIT"
] | 1
|
2020-10-29T12:29:55.000Z
|
2020-12-17T17:49:18.000Z
|
canopy/io/file.py
|
SomaLogic/Canopy
|
535111c40995731e941da8cbf484c2aa9cb9b444
|
[
"MIT"
] | 1
|
2021-05-10T21:01:56.000Z
|
2021-05-10T21:01:56.000Z
|
from __future__ import annotations
from typing import TextIO, Dict, List, Tuple
from canopy import Adat
from canopy.tools.math import jround
from canopy.io.errors import AdatReadError
import csv
import json
import pkg_resources
import warnings
import re
def parse_file(f: TextIO) -> Tuple[List[List[float]], Dict[str, List[str]], Dict[str, List[str]], Dict[str, str]]:
"""Returns component pieces of an adat given an adat file object.
Parameters
----------
f : TextIO
An open adat file object.
Returns
-------
rfu_matrix : List[List[float]]
An nSample x nSomamer matrix of the RFU data (by row) where each sub-array corresponds to a sample.
row_metadata : Dict[str, List[str]]
A dictionary of each column of the row metadata where the key-value
pairs are column-name and an array of each sample's corresponding metadata
column_metadata : Dict[str, List[str]]
A dictionary of each row of the adat column metdata where the key-value pairs are
row-name and an array of each somamer's corresponding metadata.
header_metadata : Dict[str, str]
A dictionary of each row of the header_metadata corresponds to a key-value pair.
"""
current_section = None
header_metadata = {}
column_metadata = {}
row_metadata = {}
rfu_matrix = []
matrix_depth = 0
reader = csv.reader(f, delimiter='\t')
for line in reader:
# Check for trailing Nones
for index, cell in enumerate(reversed(line)):
if cell:
break
del line[-1]
# If we see a new section set which portion of the adat we are in & continue to next line
if '^HEADER' in line[0]:
current_section = 'HEADER'
continue
elif '^TABLE_BEGIN' in line[0]:
current_section = 'TABLE'
continue
elif '^COL_DATA' in line[0]:
current_section = 'COL_DATA'
continue
elif '^ROW_DATA' in line[0]:
current_section = 'ROW_DATA'
continue
# Parse the data according to which section of the adat we're reading
if current_section == 'HEADER':
# Not every key in the header has a value
if len(line) == 1:
header_metadata[line[0]] = ''
# Should be the typical case
elif len(line) == 2:
try:
header_metadata[line[0]] = json.loads(line[1])
if type(header_metadata[line[0]]) != dict:
header_metadata[line[0]] = line[1]
except json.JSONDecodeError:
header_metadata[line[0]] = line[1]
# More than 2 values to a key should never ever happen
else:
raise AdatReadError('Unexpected size of header: ' + '|'.join(line))
# If we have the report config section, check to see if it was loaded as a dict
if line[0] == "ReportConfig" and type(header_metadata[line[0]]) != dict:
warnings.warn('Malformed ReportConfig section in header. Setting to an empty dictionary.')
header_metadata[line[0]] = {}
elif current_section == 'COL_DATA':
# Get the height of the column metadata section & skip the rest of the section
col_metadata_length = len(line)
current_section = None
elif current_section == 'ROW_DATA':
# Get the index of the end of the row metadata section & skip the rest of the section
row_metadata_offset = len(line) - 1
current_section = None
elif current_section == 'TABLE':
# matrix_depth is used to identify if we are in the column
# metadata section or the row metadata/rfu section
matrix_depth += 1
# Column Metadata Section
if matrix_depth < col_metadata_length:
column_metadata_name = line[row_metadata_offset]
column_metadata_data = line[row_metadata_offset + 1:]
if column_metadata_name == 'SeqId' and re.match(r'\d{3,}-\d{1,3}_\d+', column_metadata_data[0]):
warnings.warn('V3 style seqIds (i.e., 12345-6_7). Converting to V4 Style. The adat file writer has an option to write using the V3 style')
seq_id_data = [x.split('_')[0] for x in column_metadata_data]
version_data = [x.split('_')[1] for x in column_metadata_data]
column_metadata[column_metadata_name] = seq_id_data
column_metadata['SeqIdVersion'] = version_data
else:
column_metadata[column_metadata_name] = column_metadata_data
# Perform a check to ensure all column metadata is the same length and if not, extend it to the maximum length
col_meta_lengths = [len(values) for values in column_metadata.values()]
if len(set(col_meta_lengths)) > 1:
max_length = max(col_meta_lengths)
for name, values in column_metadata.items():
if len(values) == max_length:
continue
warnings.warn(f'Adding empty values to column metadata: "{name}"')
n_missing_elements = max_length - len(values)
append_array = [''] * n_missing_elements
new_values = values + append_array
column_metadata[name] = new_values
# Row Metadata Titles
elif matrix_depth == col_metadata_length:
row_metadata_names = line[:row_metadata_offset]
row_metadata = {name: [] for name in row_metadata_names}
# Row Metadata & RFU Section
elif matrix_depth > col_metadata_length:
# Store in row metadata into dictionary
row_metadata_data = line[:row_metadata_offset]
for name, data in zip(row_metadata_names, row_metadata_data):
row_metadata[name].append(data)
# Store the RFU data
rfu_row_data = line[row_metadata_offset + 1:]
converted_rfu_row_data = list(map(float, rfu_row_data))
rfu_matrix.append(converted_rfu_row_data)
return rfu_matrix, row_metadata, column_metadata, header_metadata
def read_file(filepath: str) -> Adat:
"""Returns an Adat from the filepath/name.
Parameters
----------
filepath: str
Either the absolute or relative path to the file to be opened.
Examples
--------
>>> adat = Adat.from_file('path/to/file.adat')
Returns
-------
adat : Adat
"""
with open(filepath, 'r') as f:
rfu_matrix, row_metadata, column_metadata, header_metadata = parse_file(f)
return Adat.from_features(
rfu_matrix=rfu_matrix,
row_metadata=row_metadata,
column_metadata=column_metadata,
header_metadata=header_metadata
)
def write_file(adat, path: str, round_rfu: bool = True, convert_to_v3_seq_ids: bool = False) -> None:
"""Write this Adat to an adat format data source.
Parameters
----------
adat : Adat
Adat Pandas dataframe to be written.
path : str
The file path to write to.
round_rfu : bool
Rounds the RFU matrix to one decimal place if True,
otherwise leaves the matrix as-is. (Default = True)
convert_to_v3_seq_ids : bool
Combines the column metadata for SeqId and
SeqIdVersion to the V3 style (12345-6_7)
Examples
--------
>>> pd.write_file(adat, 'path/to/out/filename.adat')
>>> pd.write_file(adat, 'path/to/out/filename.adat', round_rfu=False)
Returns
-------
None
"""
# Add version number to header_metadata. If the field already exists, append to it.
pkg_version = 'Canopy_' + pkg_resources.require('canopy')[0].version
if '!GeneratedBy' not in adat.header_metadata:
adat.header_metadata['!GeneratedBy'] = pkg_version
elif pkg_version not in adat.header_metadata['!GeneratedBy']:
adat.header_metadata['!GeneratedBy'] += ', ' + pkg_version
# Create COL_DATA & ROW_DATA sections
column_names = adat.columns.names
column_types = ['String' for name in column_names]
row_names = adat.index.names
row_types = ['String' for name in row_names]
# Start writing the adat using the csv writer
with open(path, 'w') as f:
writer = csv.writer(f, delimiter='\t', lineterminator='\r\n')
# Checksum must be added with blank value
writer.writerow(['!Checksum'])
# Write HEADER section
writer.writerow(['^HEADER'])
for row in adat.header_metadata.items():
# We need to handle the reportconfig in a special way since it has double quotes
if row[0] == "ReportConfig":
f.write(row[0] + '\t' + json.dumps(row[1], separators=(',', ':')) + '\r\n')
else:
writer.writerow([x for x in row if x is not None])
# Write COL_DATA section
writer.writerow(['^COL_DATA'])
writer.writerow(['!Name'] + column_names)
writer.writerow(['!Type'] + column_types)
# Write ROW_DATA section
writer.writerow(['^ROW_DATA'])
writer.writerow(['!Name'] + row_names)
writer.writerow(['!Type'] + row_types)
# Begin the main section of the adat
writer.writerow(['^TABLE_BEGIN'])
# Write the column metadata
column_offset = [None for i in range(len(row_names))]
for column_name in column_names:
# Prep the data
column_data = adat.columns.get_level_values(column_name)
# Check if we are converting to the V3 style of adat seqIds
if column_name == 'SeqId' and convert_to_v3_seq_ids:
version_data = adat.columns.get_level_values('SeqIdVersion')
column_data = [seq_id + '_' + version for seq_id, version in zip(column_data, version_data)]
if column_name == 'SeqIdVersion' and convert_to_v3_seq_ids:
continue
# Create and write the row
row = []
row += column_offset
row += [column_name]
row += list(column_data)
writer.writerow(row)
# Write the row metadata column titles. Additional tabs added to conform to PX adat structure.
extra_nones = len(adat.columns.get_level_values(column_names[0])) + 1
writer.writerow(row_names + [None for x in range(extra_nones)])
# Write the row metadata and rfu matrix simulataneously
for i, rfu_row in enumerate(adat.values):
# Prep the data
row_metadata = [adat.index.get_level_values(row_name)[i] for row_name in row_names]
if round_rfu:
rfu_row = [jround(rfu, 1) for rfu in rfu_row]
else:
rfu_row = list(rfu_row)
# Create and write the row
row = []
row += row_metadata
row += [None]
row += rfu_row
writer.writerow(row)
| 37.959459
| 158
| 0.602617
|
4a03d7fbba121bc021adc76119aad215e556b143
| 6,309
|
py
|
Python
|
extensions/bag.py
|
ESDLMapEditorESSIM/esdl-mapeditor
|
c17090e19de8ff3a0cc552f347639bac67840d22
|
[
"Apache-2.0"
] | null | null | null |
extensions/bag.py
|
ESDLMapEditorESSIM/esdl-mapeditor
|
c17090e19de8ff3a0cc552f347639bac67840d22
|
[
"Apache-2.0"
] | 14
|
2020-09-30T21:16:46.000Z
|
2021-11-08T18:54:34.000Z
|
extensions/bag.py
|
ESDLMapEditorESSIM/esdl-mapeditor
|
c17090e19de8ff3a0cc552f347639bac67840d22
|
[
"Apache-2.0"
] | 1
|
2020-09-17T12:48:57.000Z
|
2020-09-17T12:48:57.000Z
|
# This work is based on original code developed and copyrighted by TNO 2020.
# Subsequent contributions are licensed to you by the developers of such code and are
# made available to the Project under one or several contributor license agreements.
#
# This work is licensed to you under the Apache License, Version 2.0.
# You may obtain a copy of the license at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Contributors:
# TNO - Initial implementation
# Manager:
# TNO
from flask import Flask
from flask_socketio import SocketIO, emit
from extensions.session_manager import get_handler, get_session
import src.settings as settings
import requests
from esdl.processing import ESDLAsset, ESDLGeometry, ESDLEnergySystem
import esdl
from geomet import wkt
import src.log as log
logger = log.get_logger(__name__)
class BAG:
def __init__(self, flask_app: Flask, socket: SocketIO):
self.flask_app = flask_app
self.socketio = socket
self.register()
def register(self):
logger.info('Registering BAG extension')
@self.socketio.on('get_bag_contours', namespace='/esdl')
def get_bag_contours(info):
with self.flask_app.app_context():
print("getting bag information")
esh = get_handler()
active_es_id = get_session('active_es_id')
area_id = info["id"]
area_polygon = { 'type': 'polygon', 'coordinates': info["polygon"] }
geometry = ESDLGeometry.create_ESDL_geometry(area_polygon)
boundary_wgs = ESDLGeometry.create_boundary_from_geometry(geometry)
# boundary_geojson = ESDLGeometry.create_geojson(area_id, '', [], boundary_wgs)
wkt_string = wkt.dumps(boundary_wgs)
# wkt_string = 'POLYGON ((4.359093904495239 52.012174264626445, 4.357388019561768 52.01154692445308, 4.357978105545044 52.01078750089633, 4.360188245773315 52.01160635705717, 4.362355470657349 52.012478026181434, 4.360767602920532 52.012847820073766, 4.359093904495239 52.012174264626445))'
# wkt_quoted = urllib.parse.quote(wkt_string)
es_edit = esh.get_energy_system(es_id=active_es_id)
instance = es_edit.instance
top_area = instance[0].area
target_area = ESDLEnergySystem.find_area(top_area, area_id)
if target_area:
try:
# url = 'http://' + settings.bag_config["host"] + ':' + settings.bag_config["port"] + \
# settings.bag_config["path_contour"] + '?wkt=' + wkt_quoted + '&format=xml'
# print(url)
# r = requests.get(url)
url = 'http://' + settings.bag_config["host"] + ':' + settings.bag_config["port"] + \
settings.bag_config["path_contour"] + '?format=xml'
print(url)
r = requests.post(url, json={"wkt": wkt_string})
if r.status_code == 201:
esdl_string = r.text
bag_es = ESDLAsset.load_asset_from_string(esdl_string)
if bag_es:
bag_inst = bag_es.instance[0]
if bag_inst:
bag_area = bag_inst.area
if bag_area:
bld_list = []
for bld in bag_area.asset:
if isinstance(bld, esdl.Building):
target_area.asset.append(bld.deepcopy())
geometry = bld.geometry
boundary_wgs = ESDLGeometry.create_boundary_from_geometry(geometry)
bld_list.append(ESDLGeometry.create_geojson(bld.id, bld.name, [], boundary_wgs))
if bld_list:
emit('geojson', {"layer": "bld_layer", "geojson": bld_list})
except Exception as e:
print('ERROR in accessing BAG service: '+str(e))
return None
# @EWOUD: Deze 'mogelijkheid' kunnen we ook gebruiken om geometries te renderen in de frontend
# self.emit_geometries_to_client(esh, active_es_id, bld_list)
else:
print("ERROR in finding area in ESDL for BAG service")
# self.flask_app.send_alert("ERROR in finding area in ESDL for BAG service")
return None
# @self.flask_app.route('/building_list')
# def get_building_list():
# try:
# url = 'http://' + settings.bag_config["host"] + ':' + settings.bag_config["port"] + \
# settings.bag_config["path_list"]
# print(url)
# r = requests.get(url)
# if len(r.text) > 0:
# building_list = json.loads(r.text)
# except Exception as e:
# print('ERROR in accessing BAG service')
# return None
#
# return { "buildings": building_list }
def emit_geometries_to_client(self, esh, es_id, building_list):
with self.flask_app.app_context():
# print(area_list)
emit_bld_list = []
for bld in building_list:
emit_bld_list.append({
"type": "Feature",
"geometry": {
"type": "Polygon",
"coordinates": bld['geom']['coordinates']
},
"properties": {
"id": bld['code'],
"name": bld['name'],
}
})
# print(emit_area_list)
# emit('geojson', {"layer": "area_layer", "geojson": emit_area_list})
emit('geojson', {"layer": "building_layer", "geojson": emit_bld_list}, namespace='/esdl')
| 48.160305
| 306
| 0.525281
|
4a03d9831bb44758ed1926b705dd8de9dc47d735
| 20,181
|
py
|
Python
|
python/ccxt/async_support/coinone.py
|
qbtrade/ccxt
|
ff625fc55bff733e570c4960f44578cfa3100666
|
[
"MIT"
] | null | null | null |
python/ccxt/async_support/coinone.py
|
qbtrade/ccxt
|
ff625fc55bff733e570c4960f44578cfa3100666
|
[
"MIT"
] | null | null | null |
python/ccxt/async_support/coinone.py
|
qbtrade/ccxt
|
ff625fc55bff733e570c4960f44578cfa3100666
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from ccxt.async_support.base.exchange import Exchange
import base64
import hashlib
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import ArgumentsRequired
from ccxt.base.errors import InvalidOrder
from ccxt.base.errors import OrderNotFound
from ccxt.base.errors import ExchangeNotAvailable
class coinone (Exchange):
def describe(self):
return self.deep_extend(super(coinone, self).describe(), {
'id': 'coinone',
'name': 'CoinOne',
'countries': ['KR'], # Korea
'rateLimit': 667,
'version': 'v2',
'has': {
'CORS': False,
'createMarketOrder': False,
'fetchTickers': True,
'fetchOrder': True,
},
'urls': {
'logo': 'https://user-images.githubusercontent.com/1294454/38003300-adc12fba-323f-11e8-8525-725f53c4a659.jpg',
'api': 'https://api.coinone.co.kr',
'www': 'https://coinone.co.kr',
'doc': 'https://doc.coinone.co.kr',
},
'requiredCredentials': {
'apiKey': True,
'secret': True,
},
'api': {
'public': {
'get': [
'orderbook/',
'trades/',
'ticker/',
],
},
'private': {
'post': [
'account/btc_deposit_address/',
'account/balance/',
'account/daily_balance/',
'account/user_info/',
'account/virtual_account/',
'order/cancel_all/',
'order/cancel/',
'order/limit_buy/',
'order/limit_sell/',
'order/complete_orders/',
'order/limit_orders/',
'order/order_info/',
'transaction/auth_number/',
'transaction/history/',
'transaction/krw/history/',
'transaction/btc/',
'transaction/coin/',
],
},
},
'markets': {
'BCH/KRW': {'id': 'bch', 'symbol': 'BCH/KRW', 'base': 'BCH', 'quote': 'KRW', 'baseId': 'bch', 'quoteId': 'krw'},
'BTC/KRW': {'id': 'btc', 'symbol': 'BTC/KRW', 'base': 'BTC', 'quote': 'KRW', 'baseId': 'btc', 'quoteId': 'krw'},
'BTG/KRW': {'id': 'btg', 'symbol': 'BTG/KRW', 'base': 'BTG', 'quote': 'KRW', 'baseId': 'btg', 'quoteId': 'krw'},
'ETC/KRW': {'id': 'etc', 'symbol': 'ETC/KRW', 'base': 'ETC', 'quote': 'KRW', 'baseId': 'etc', 'quoteId': 'krw'},
'ETH/KRW': {'id': 'eth', 'symbol': 'ETH/KRW', 'base': 'ETH', 'quote': 'KRW', 'baseId': 'eth', 'quoteId': 'krw'},
'IOTA/KRW': {'id': 'iota', 'symbol': 'IOTA/KRW', 'base': 'IOTA', 'quote': 'KRW', 'baseId': 'iota', 'quoteId': 'krw'},
'LTC/KRW': {'id': 'ltc', 'symbol': 'LTC/KRW', 'base': 'LTC', 'quote': 'KRW', 'baseId': 'ltc', 'quoteId': 'krw'},
'OMG/KRW': {'id': 'omg', 'symbol': 'OMG/KRW', 'base': 'OMG', 'quote': 'KRW', 'baseId': 'omg', 'quoteId': 'krw'},
'QTUM/KRW': {'id': 'qtum', 'symbol': 'QTUM/KRW', 'base': 'QTUM', 'quote': 'KRW', 'baseId': 'qtum', 'quoteId': 'krw'},
'XRP/KRW': {'id': 'xrp', 'symbol': 'XRP/KRW', 'base': 'XRP', 'quote': 'KRW', 'baseId': 'xrp', 'quoteId': 'krw'},
'EOS/KRW': {'id': 'eos', 'symbol': 'EOS/KRW', 'base': 'EOS', 'quote': 'KRW', 'baseId': 'eos', 'quoteId': 'krw'},
'DATA/KRW': {'id': 'data', 'symbol': 'DATA/KRW', 'base': 'DATA', 'quote': 'KRW', 'baseId': 'data', 'quoteId': 'krw'},
'ZIL/KRW': {'id': 'zil', 'symbol': 'ZIL/KRW', 'base': 'ZIL', 'quote': 'KRW', 'baseId': 'zil', 'quoteId': 'krw'},
'KNC/KRW': {'id': 'knc', 'symbol': 'KNC/KRW', 'base': 'KNC', 'quote': 'KRW', 'baseId': 'knc', 'quoteId': 'krw'},
'ZRX/KRW': {'id': 'zrx', 'symbol': 'ZRX/KRW', 'base': 'ZRX', 'quote': 'KRW', 'baseId': 'zrx', 'quoteId': 'krw'},
'LUNA/KRW': {'id': 'luna', 'symbol': 'LUNA/KRW', 'base': 'LUNA', 'quote': 'KRW', 'baseId': 'luna', 'quoteId': 'krw'},
'ATOM/KRW': {'id': 'atom', 'symbol': 'ATOM/KRW', 'base': 'ATOM', 'quote': 'KRW', 'baseId': 'atom', 'quoteId': 'krw'},
'VNT/KRW': {'id': 'vnt', 'symbol': 'vnt/KRW', 'base': 'VNT', 'quote': 'KRW', 'baseId': 'vnt', 'quoteId': 'krw'},
},
'fees': {
'trading': {
'tierBased': True,
'percentage': True,
'taker': 0.001,
'maker': 0.001,
'tiers': {
'taker': [
[0, 0.001],
[100000000, 0.0009],
[1000000000, 0.0008],
[5000000000, 0.0007],
[10000000000, 0.0006],
[20000000000, 0.0005],
[30000000000, 0.0004],
[40000000000, 0.0003],
[50000000000, 0.0002],
],
'maker': [
[0, 0.001],
[100000000, 0.0008],
[1000000000, 0.0006],
[5000000000, 0.0004],
[10000000000, 0.0002],
[20000000000, 0],
[30000000000, 0],
[40000000000, 0],
[50000000000, 0],
],
},
},
},
'exceptions': {
'405': ExchangeNotAvailable,
'104': OrderNotFound,
},
})
async def fetch_balance(self, params={}):
await self.load_markets()
response = await self.privatePostAccountBalance(params)
result = {'info': response}
balances = self.omit(response, [
'errorCode',
'result',
'normalWallets',
])
ids = list(balances.keys())
for i in range(0, len(ids)):
id = ids[i]
balance = balances[id]
code = id.upper()
if id in self.currencies_by_id:
code = self.currencies_by_id[id]['code']
free = self.safe_float(balance, 'avail')
total = self.safe_float(balance, 'balance')
used = total - free
account = {
'free': free,
'used': used,
'total': total,
}
result[code] = account
return self.parse_balance(result)
async def fetch_order_book(self, symbol, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'currency': market['id'],
'format': 'json',
}
response = await self.publicGetOrderbook(self.extend(request, params))
return self.parse_order_book(response, None, 'bid', 'ask', 'price', 'qty')
async def fetch_tickers(self, symbols=None, params={}):
await self.load_markets()
request = {
'currency': 'all',
'format': 'json',
}
response = await self.publicGetTicker(self.extend(request, params))
result = {}
ids = list(response.keys())
for i in range(0, len(ids)):
id = ids[i]
symbol = id
market = None
if id in self.markets_by_id:
market = self.markets_by_id[id]
symbol = market['symbol']
ticker = response[id]
result[symbol] = self.parse_ticker(ticker, market)
return result
async def fetch_ticker(self, symbol, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'currency': market['id'],
'format': 'json',
}
response = await self.publicGetTicker(self.extend(request, params))
return self.parse_ticker(response, market)
def parse_ticker(self, ticker, market=None):
timestamp = self.milliseconds()
last = self.safe_float(ticker, 'last')
previousClose = self.safe_float(ticker, 'yesterday_last')
change = None
if last is not None and previousClose is not None:
change = previousClose - last
symbol = market['symbol'] if (market is not None) else None
return {
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': self.safe_float(ticker, 'high'),
'low': self.safe_float(ticker, 'low'),
'bid': None,
'bidVolume': None,
'ask': None,
'askVolume': None,
'vwap': None,
'open': self.safe_float(ticker, 'first'),
'close': last,
'last': last,
'previousClose': previousClose,
'change': change,
'percentage': None,
'average': None,
'baseVolume': self.safe_float(ticker, 'volume'),
'quoteVolume': None,
'info': ticker,
}
def parse_trade(self, trade, market=None):
timestamp = self.safe_integer(trade, 'timestamp') * 1000
symbol = market['symbol'] if (market is not None) else None
is_ask = self.safe_string(trade, 'is_ask')
side = None
if is_ask == '1':
side = 'sell'
elif is_ask == '0':
side = 'buy'
return {
'id': None,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'order': None,
'symbol': symbol,
'type': None,
'side': side,
'price': self.safe_float(trade, 'price'),
'amount': self.safe_float(trade, 'qty'),
'fee': None,
'info': trade,
}
async def fetch_trades(self, symbol, since=None, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'currency': market['id'],
'period': 'hour',
'format': 'json',
}
response = await self.publicGetTrades(self.extend(request, params))
return self.parse_trades(response['completeOrders'], market, since, limit)
async def create_order(self, symbol, type, side, amount, price=None, params={}):
await self.load_markets()
if type != 'limit':
raise ExchangeError(self.id + ' allows limit orders only')
await self.load_markets()
request = {
'price': price,
'currency': self.market_id(symbol),
'qty': amount,
}
method = 'privatePostOrder' + self.capitalize(type) + self.capitalize(side)
response = await getattr(self, method)(self.extend(request, params))
id = self.safe_string(response, 'orderId')
if id is not None:
id = id.upper()
timestamp = self.milliseconds()
cost = price * amount
order = {
'info': response,
'id': id,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'lastTradeTimestamp': None,
'symbol': symbol,
'type': type,
'side': side,
'price': price,
'cost': cost,
'average': None,
'amount': amount,
'filled': None,
'remaining': amount,
'status': 'open',
'fee': None,
}
self.orders[id] = order
return order
async def fetch_order(self, id, symbol=None, params={}):
await self.load_markets()
result = None
market = None
if symbol is None:
if id in self.orders:
market = self.market(self.orders[id]['symbol'])
else:
raise ArgumentsRequired(self.id + ' fetchOrder() requires a symbol argument for order ids missing in the .orders cache(the order was created with a different instance of self class or within a different run of self code).')
else:
market = self.market(symbol)
try:
request = {
'order_id': id,
'currency': market['id'],
}
response = await self.privatePostOrderOrderInfo(self.extend(request, params))
result = self.parse_order(response)
self.orders[id] = result
except Exception as e:
if isinstance(e, OrderNotFound):
if id in self.orders:
self.orders[id]['status'] = 'canceled'
result = self.orders[id]
else:
raise e
else:
raise e
return result
def parse_order_status(self, status):
statuses = {
'live': 'open',
'partially_filled': 'open',
'filled': 'closed',
}
return self.safe_string(statuses, status, status)
def parse_order(self, order, market=None):
info = self.safe_value(order, 'info')
id = self.safe_string(info, 'orderId')
if id is not None:
id = id.upper()
timestamp = self.safe_integer(info, 'timestamp') * 1000
status = self.parse_order_status(self.safe_string(order, 'status'))
cost = None
side = self.safe_string(info, 'type')
if side.find('ask') >= 0:
side = 'sell'
else:
side = 'buy'
price = self.safe_float(info, 'price')
amount = self.safe_float(info, 'qty')
remaining = self.safe_float(info, 'remainQty')
filled = None
if amount is not None:
if remaining is not None:
filled = amount - remaining
if price is not None:
cost = price * amount
currency = self.safe_string(info, 'currency')
fee = {
'currency': currency,
'cost': self.safe_float(info, 'fee'),
'rate': self.safe_float(info, 'feeRate'),
}
symbol = None
if market is None:
marketId = currency.lower()
if marketId in self.markets_by_id:
market = self.markets_by_id[marketId]
if market is not None:
symbol = market['symbol']
return {
'info': order,
'id': id,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'lastTradeTimestamp': None,
'symbol': symbol,
'type': 'limit',
'side': side,
'price': price,
'cost': cost,
'amount': amount,
'filled': filled,
'remaining': remaining,
'status': status,
'fee': fee,
}
async def cancel_order(self, id, symbol=None, params={}):
await self.load_markets()
order = self.safe_value(self.orders, id)
amount = None
price = None
side = None
if order is None:
if symbol is None:
# eslint-disable-next-line quotes
raise InvalidOrder(self.id + " cancelOrder could not find the order id " + id + " in orders cache. The order was probably created with a different instance of self class earlier. The `symbol` argument is missing. To cancel the order, pass a symbol argument and {'price': 12345, 'qty': 1.2345, 'is_ask': 0} in the params argument of cancelOrder.")
price = self.safe_float(params, 'price')
if price is None:
# eslint-disable-next-line quotes
raise InvalidOrder(self.id + " cancelOrder could not find the order id " + id + " in orders cache. The order was probably created with a different instance of self class earlier. The `price` parameter is missing. To cancel the order, pass a symbol argument and {'price': 12345, 'qty': 1.2345, 'is_ask': 0} in the params argument of cancelOrder.")
amount = self.safe_float(params, 'qty')
if amount is None:
# eslint-disable-next-line quotes
raise InvalidOrder(self.id + " cancelOrder could not find the order id " + id + " in orders cache. The order was probably created with a different instance of self class earlier. The `qty`(amount) parameter is missing. To cancel the order, pass a symbol argument and {'price': 12345, 'qty': 1.2345, 'is_ask': 0} in the params argument of cancelOrder.")
side = self.safe_float(params, 'is_ask')
if side is None:
# eslint-disable-next-line quotes
raise InvalidOrder(self.id + " cancelOrder could not find the order id " + id + " in orders cache. The order was probably created with a different instance of self class earlier. The `is_ask`(side) parameter is missing. To cancel the order, pass a symbol argument and {'price': 12345, 'qty': 1.2345, 'is_ask': 0} in the params argument of cancelOrder.")
else:
price = order['price']
amount = order['amount']
side = 0 if (order['side'] == 'buy') else 1
symbol = order['symbol']
request = {
'order_id': id,
'price': price,
'qty': amount,
'is_ask': side,
'currency': self.market_id(symbol),
}
self.orders[id]['status'] = 'canceled'
return await self.privatePostOrderCancel(self.extend(request, params))
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
request = self.implode_params(path, params)
query = self.omit(params, self.extract_params(path))
url = self.urls['api'] + '/'
if api == 'public':
url += request
if query:
url += '?' + self.urlencode(query)
else:
self.check_required_credentials()
url += self.version + '/' + request
nonce = str(self.nonce())
json = self.json(self.extend({
'access_token': self.apiKey,
'nonce': nonce,
}, params))
payload = base64.b64encode(self.encode(json))
body = self.decode(payload)
secret = self.secret.upper()
signature = self.hmac(payload, self.encode(secret), hashlib.sha512)
headers = {
'content-type': 'application/json',
'X-COINONE-PAYLOAD': payload,
'X-COINONE-SIGNATURE': signature,
}
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def handle_errors(self, code, reason, url, method, headers, body, response):
if response is None:
return
if 'result' in response:
result = response['result']
if result != 'success':
#
# { "errorCode": "405", "status": "maintenance", "result": "error"}
#
code = self.safe_string(response, 'errorCode')
feedback = self.id + ' ' + self.json(response)
exceptions = self.exceptions
if code in exceptions:
raise exceptions[code](feedback)
else:
raise ExchangeError(feedback)
else:
raise ExchangeError(self.id + ' ' + body)
| 42.756356
| 369
| 0.491452
|
4a03da375fae801fc464984e04a18005b3dfd418
| 5,774
|
py
|
Python
|
Matc_links/Matc_links/spiders/get_links2.py
|
Nouldine/MyCrawlerSystem
|
7bba8ba3ec76e10f70a35700602812ee6f039b63
|
[
"MIT"
] | null | null | null |
Matc_links/Matc_links/spiders/get_links2.py
|
Nouldine/MyCrawlerSystem
|
7bba8ba3ec76e10f70a35700602812ee6f039b63
|
[
"MIT"
] | null | null | null |
Matc_links/Matc_links/spiders/get_links2.py
|
Nouldine/MyCrawlerSystem
|
7bba8ba3ec76e10f70a35700602812ee6f039b63
|
[
"MIT"
] | null | null | null |
from scrapy import Spider
from scrapy.spiders import CrawlSpider, Rule
from scrapy.selector import Selector
from scrapy.contrib.spiders import CrawlSpider, Rule
from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor
from scrapy.linkextractors import LinkExtractor
import scrapy
from scrapy.spidermiddlewares.httperror import HttpError
from twisted.internet.error import DNSLookupError
from twisted.internet.error import TimeoutError, TCPTimedOutError
from Matc_links.items import MatcLinksItem
class Matc_links( scrapy.Spider ):
name = 'matc_links2'
allowed_domains = ['madisoncollege.edu']
start_urls = [
"https://my.madisoncollege.edu/app/catalog/listSubjectsByLetter/MATC1/A/DEGR",
"https://my.madisoncollege.edu/app/catalog/listSubjectsByLetter/MATC1/B/DEGR",
"https://my.madisoncollege.edu/app/catalog/listSubjectsByLetter/MATC1/C/DEGR",
"https://my.madisoncollege.edu/app/catalog/listSubjectsByLetter/MATC1/D/DEGR",
"https://my.madisoncollege.edu/app/catalog/listSubjectsByLetter/MATC1/E/DEGR",
"https://my.madisoncollege.edu/app/catalog/listSubjectsByLetter/MATC1/F/DEGR",
"https://my.madisoncollege.edu/app/catalog/listSubjectsByLetter/MATC1/G/DEGR",
"https://my.madisoncollege.edu/app/catalog/listSubjectsByLetter/MATC1/H/DEGR",
"https://my.madisoncollege.edu/app/catalog/listSubjectsByLetter/MATC1/I/DEGR",
"https://my.madisoncollege.edu/app/catalog/listSubjectsByLetter/MATC1/J/DEGR",
"https://my.madisoncollege.edu/app/catalog/listSubjectsByLetter/MATC1/L/DEGR",
"https://my.madisoncollege.edu/app/catalog/listSubjectsByLetter/MATC1/M/DEGR",
"https://my.madisoncollege.edu/app/catalog/listSubjectsByLetter/MATC1/N/DEGR",
"https://my.madisoncollege.edu/app/catalog/listSubjectsByLetter/MATC1/O/DEGR",
"https://my.madisoncollege.edu/app/catalog/listSubjectsByLetter/MATC1/P/DEGR",
"https://my.madisoncollege.edu/app/catalog/listSubjectsByLetter/MATC1/Q/DEGR",
"https://my.madisoncollege.edu/app/catalog/listSubjectsByLetter/MATC1/R/DEGR",
"https://my.madisoncollege.edu/app/catalog/listSubjectsByLetter/MATC1/S/DEGR",
"https://my.madisoncollege.edu/app/catalog/listSubjectsByLetter/MATC1/T/DEGR",
"https://my.madisoncollege.edu/app/catalog/listSubjectsByLetter/MATC1/V/DEGR",
"https://my.madisoncollege.edu/app/catalog/listSubjectsByLetter/MATC1/W/DEGR"
"https://my.madisoncollege.edu/app/catalog/listSubjectsByLetter/MATC1/0/NDEG",
"https://my.madisoncollege.edu/app/catalog/listSubjectsByLetter/MATC1/A/NDEG",
"https://my.madisoncollege.edu/app/catalog/listSubjectsByLetter/MATC1/B/NDEG",
"https://my.madisoncollege.edu/app/catalog/listSubjectsByLetter/MATC1/C/NDEG",
"https://my.madisoncollege.edu/app/catalog/listSubjectsByLetter/MATC1/D/NDEG",
"https://my.madisoncollege.edu/app/catalog/listSubjectsByLetter/MATC1/E/NDEG",
"https://my.madisoncollege.edu/app/catalog/listSubjectsByLetter/MATC1/F/NDEG",
"https://my.madisoncollege.edu/app/catalog/listSubjectsByLetter/MATC1/G/NDEG",
"https://my.madisoncollege.edu/app/catalog/listSubjectsByLetter/MATC1/H/NDEG",
"https://my.madisoncollege.edu/app/catalog/listSubjectsByLetter/MATC1/I/NDEG",
"https://my.madisoncollege.edu/app/catalog/listSubjectsByLetter/MATC1/M/NDEG",
"https://my.madisoncollege.edu/app/catalog/listSubjectsByLetter/MATC1/N/NDEG",
"https://my.madisoncollege.edu/app/catalog/listSubjectsByLetter/MATC1/P/NDEG",
"https://my.madisoncollege.edu/app/catalog/listSubjectsByLetter/MATC1/Q/NDEG",
"https://my.madisoncollege.edu/app/catalog/listSubjectsByLetter/MATC1/R/NDEG",
"https://my.madisoncollege.edu/app/catalog/listSubjectsByLetter/MATC1/S/NDEG",
"https://my.madisoncollege.edu/app/catalog/listSubjectsByLetter/MATC1/V/NDEG",
"https://my.madisoncollege.edu/app/catalog/listSubjectsByLetter/MATC1/W/NDEG"
]
def start_requests( self ):
for u in self.start_urls:
yield scrapy.Request( u, callback = self.parse_httpbin,
errback = self.errback_httpbin,
dont_filter = True )
def parse_httpbin( self, response ):
self.logger.info("Go successful respinse {}".format(response.url))
items = MatcLinksItem()
links = response.xpath('*//a/@href').extract()
items['links'] = links
yield items
def errback_httpbin( self, failure):
# log all failures
self.logger.error(repr(failure))
# in case you want to do something special for some errors,
# you may need the non-200 response
if failure.check(HttpError):
# These exception come from HttpError spider middleware
# you can get non-200 response
response = failure.value.response
self.logger.error("HttpError on %s", response.url )
elif failure.check(DNSLookupError):
# This is the original request
request = failure.request
self.logger.error('DNSLookupError on %', request.url)
elif failure.check( TimeoutError, TPCTimeOutError ):
request = failure.request
self.logger.error('TimeoutError on %s', request.url)
| 43.089552
| 95
| 0.661586
|
4a03db345a92ff0ce170ddcf1853bf873762632a
| 2,916
|
py
|
Python
|
source_code/namespace/iter.py
|
itsjunqing/fit1008-introduction-to-computer-science
|
aaf2061ecdcbf10c08b60ef481d1e8c6d3199f94
|
[
"MIT"
] | 7
|
2019-08-29T10:11:17.000Z
|
2021-11-28T20:36:32.000Z
|
source_code/namespace/iter.py
|
itsjunqing/fit1008-introduction-to-computer-science
|
aaf2061ecdcbf10c08b60ef481d1e8c6d3199f94
|
[
"MIT"
] | null | null | null |
source_code/namespace/iter.py
|
itsjunqing/fit1008-introduction-to-computer-science
|
aaf2061ecdcbf10c08b60ef481d1e8c6d3199f94
|
[
"MIT"
] | 6
|
2020-10-21T07:55:10.000Z
|
2021-11-05T10:08:06.000Z
|
from data_structures.node import Node
# ListIterator class to iterate a list through the command "for xxx in xxx"
# Class will consists of the __init__, __iter__, and __next__ method
# all three methods are needed to construct an iterable list
class ListIterator:
def __init__(self, head):
# this ListIterator uses linked node implementation
self.current = head
def __iter__(self):
return self
def __next__(self):
if self.current is None:
raise StopIteration
item_list = self.current.item
self.current = self.current.link
return item_list
# Alternative method below by using try-catch block
# try:
# item_required = self.current.item
# self.current = self.current.link
# return item_required
# except AttributeError:
# raise StopIteration
class List:
def __init__(self):
self.head = None
self.count = 0
def __iter__(self):
return ListIterator(self.head)
def is_empty(self):
return self.count == 0
def is_full(self):
return False
def _get_node(self, index):
assert 0 <= index < self.count, "index out of bound"
node = self.head
for _ in range(index):
node = node.link
return node
def insert(self, index, item):
if self.is_full():
return False
if index < 0:
index = 0
elif index > self.count:
index = self.count
if index == 0:
self.head = Node(item, self.head)
else:
node = self._get_node(index - 1)
node.link = Node(item, node.link)
self.count += 1
return True
def delete(self, index):
valid_index = index >= 0 and index < self.count
if self.is_empty():
return False
if valid_index:
if index == 0:
self.head = self.head.next
else:
node = self._get_node(index - 1)
node.link= node.link.link
self.count -= 1
return valid_index
def __str__(self):
ret = ""
current = self.head
while current is not None:
ret = ret + str(current.item) + ","
current = current.link
return ret
def __len__(self):
return self.count
a_list = List()
a_list.insert(0, 4)
a_list.insert(0, 3)
a_list.insert(0, 2)
a_list.insert(0, 1)
# itera = iter(a_list)
# print(next(itera))
# print(next(itera))
# print(next(itera))
# print(next(itera))
# print(str(a_list))
#
# # similar to calling next(itera)
# for item in a_list:
# print(item)
class MyRange:
def __init__(self, start, end):
self.current = start
self.end = end
def __iter__(self):
return self
def __next__(self):
if self.current >= self.end:
raise StopIteration
item = self.current
self.current += 1
return item
class MyRange2:
def __init__(self, head):
self.current = head
def __iter__(self):
return self
def __next__(self):
if self.current is None:
raise StopIteration
item = self.current.item
self.current = self.current.link
return item
myrange = MyRange(1, 10)
myrangeiter = iter(myrange)
print(next(myrangeiter))
for item in myrange:
print(item)
myrange2 = MyRange2()
| 19.702703
| 75
| 0.6893
|
4a03db7f64c2ff4e41890745c9b3daeabec4d3e0
| 209
|
py
|
Python
|
test_commands.py
|
PerformLine/python-performline-client
|
82c8f0dc47e007d35702455a2cd8167c0908bddd
|
[
"BSD-3-Clause"
] | null | null | null |
test_commands.py
|
PerformLine/python-performline-client
|
82c8f0dc47e007d35702455a2cd8167c0908bddd
|
[
"BSD-3-Clause"
] | 1
|
2019-03-01T05:39:00.000Z
|
2019-03-01T05:39:00.000Z
|
test_commands.py
|
PerformLine/python-performline-client
|
82c8f0dc47e007d35702455a2cd8167c0908bddd
|
[
"BSD-3-Clause"
] | null | null | null |
from performline.client import Client
c = Client("976794ca6e5897e27d1b439064691bb1c3eb0420")
tf = list(c.trafficsources())
brands2 = list(c.brands(limit=2))
# brands2 = list(c.brands(create_date=">20200810"))
| 34.833333
| 54
| 0.77512
|
4a03dbc0217011304ea5efa3ee054d40aae9eba1
| 902
|
py
|
Python
|
ca_on_oshawa/__init__.py
|
dcycle/scrapers-ca
|
4c7a6cd01d603221b5b3b7a400d2e5ca0c6e916f
|
[
"MIT"
] | 19
|
2015-05-26T03:18:50.000Z
|
2022-01-31T03:27:41.000Z
|
ca_on_oshawa/__init__.py
|
dcycle/scrapers-ca
|
4c7a6cd01d603221b5b3b7a400d2e5ca0c6e916f
|
[
"MIT"
] | 119
|
2015-01-09T06:09:35.000Z
|
2022-01-20T23:05:05.000Z
|
ca_on_oshawa/__init__.py
|
dcycle/scrapers-ca
|
4c7a6cd01d603221b5b3b7a400d2e5ca0c6e916f
|
[
"MIT"
] | 17
|
2015-11-23T05:00:10.000Z
|
2021-09-15T16:03:33.000Z
|
from utils import CanadianJurisdiction
from pupa.scrape import Organization
class Oshawa(CanadianJurisdiction):
classification = 'legislature'
division_id = 'ocd-division/country:ca/csd:3518013'
division_name = 'Oshawa'
name = 'Oshawa City Council'
url = 'http://www.oshawa.ca'
def get_organizations(self):
organization = Organization(self.name, classification=self.classification)
organization.add_post(role='Mayor', label=self.division_name, division_id=self.division_id)
for ward_number in range(1, 6):
division_id = '{}/ward:{}'.format(self.division_id, ward_number)
organization.add_post(role='Regional Councillor', label='Ward {}'.format(ward_number), division_id=division_id)
organization.add_post(role='Councillor', label='Ward {}'.format(ward_number), division_id=division_id)
yield organization
| 41
| 123
| 0.716186
|
4a03dbef58d40837606b0b248f5cf914a4db40b9
| 3,223
|
py
|
Python
|
pypureclient/flasharray/FA_2_8/models/api_client_patch.py
|
Flav-STOR-WL/py-pure-client
|
03b889c997d90380ac5d6380ca5d5432792d3e89
|
[
"BSD-2-Clause"
] | 14
|
2018-12-07T18:30:27.000Z
|
2022-02-22T09:12:33.000Z
|
pypureclient/flasharray/FA_2_8/models/api_client_patch.py
|
Flav-STOR-WL/py-pure-client
|
03b889c997d90380ac5d6380ca5d5432792d3e89
|
[
"BSD-2-Clause"
] | 28
|
2019-09-17T21:03:52.000Z
|
2022-03-29T22:07:35.000Z
|
pypureclient/flasharray/FA_2_8/models/api_client_patch.py
|
Flav-STOR-WL/py-pure-client
|
03b889c997d90380ac5d6380ca5d5432792d3e89
|
[
"BSD-2-Clause"
] | 15
|
2020-06-11T15:50:08.000Z
|
2022-03-21T09:27:25.000Z
|
# coding: utf-8
"""
FlashArray REST API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: 2.8
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re
import six
import typing
from ....properties import Property
if typing.TYPE_CHECKING:
from pypureclient.flasharray.FA_2_8 import models
class ApiClientPatch(object):
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'enabled': 'bool'
}
attribute_map = {
'enabled': 'enabled'
}
required_args = {
}
def __init__(
self,
enabled=None, # type: bool
):
"""
Keyword args:
enabled (bool): Returns a value of `true` if the API client is permitted to exchange ID tokens for access tokens. API clients are disabled by default.
"""
if enabled is not None:
self.enabled = enabled
def __setattr__(self, key, value):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `ApiClientPatch`".format(key))
self.__dict__[key] = value
def __getattribute__(self, item):
value = object.__getattribute__(self, item)
if isinstance(value, Property):
raise AttributeError
else:
return value
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
if hasattr(self, attr):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(ApiClientPatch, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ApiClientPatch):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 28.776786
| 162
| 0.551039
|
4a03dc0288b281346162bcc29ec9141c182c8cb6
| 24,894
|
py
|
Python
|
src/cryptoadvance/specter/devices/hwi/jadepy/jade.py
|
GoofyAF/specter-desktop
|
9317f70f5e0529e1cf19bfd05dfe3777bc084184
|
[
"MIT"
] | 1
|
2021-09-06T16:58:30.000Z
|
2021-09-06T16:58:30.000Z
|
src/cryptoadvance/specter/devices/hwi/jadepy/jade.py
|
GoofyAF/specter-desktop
|
9317f70f5e0529e1cf19bfd05dfe3777bc084184
|
[
"MIT"
] | null | null | null |
src/cryptoadvance/specter/devices/hwi/jadepy/jade.py
|
GoofyAF/specter-desktop
|
9317f70f5e0529e1cf19bfd05dfe3777bc084184
|
[
"MIT"
] | null | null | null |
import cbor
import hashlib
import json
import time
import logging
import collections
import collections.abc
import traceback
import random
import sys
# JadeError
from .jade_error import JadeError
# Low-level comms backends
from .jade_serial import JadeSerialImpl
from .jade_tcp import JadeTCPImpl
# from .jade_ble import JadeBleImpl
# Default serial connection
DEFAULT_SERIAL_DEVICE = "/dev/ttyUSB0"
DEFAULT_BAUD_RATE = 115200
DEFAULT_SERIAL_TIMEOUT = 120
# Default BLE connection
DEFAULT_BLE_DEVICE_NAME = "Jade"
DEFAULT_BLE_SERIAL_NUMBER = None
DEFAULT_BLE_SCAN_TIMEOUT = 60
# 'jade' logger
logger = logging.getLogger("jade")
device_logger = logging.getLogger("jade-device")
# Helper to map bytes-like types into hex-strings
# to make for prettier message-logging
def _hexlify(data):
if data is None:
return None
elif isinstance(data, bytes) or isinstance(data, bytearray):
return data.hex()
elif isinstance(data, list):
return [_hexlify(item) for item in data]
elif isinstance(data, dict):
return {k: _hexlify(v) for k, v in data.items()}
else:
return data
import requests
def _http_request(params):
logger.debug("_http_request: {}".format(params))
# Use the first non-onion url
url = [url for url in params["urls"] if not url.endswith(".onion")][0]
if params["method"] == "GET":
assert "data" not in params, "Cannot pass body to requests.get"
f = requests.get(url)
elif params["method"] == "POST":
data = json.dumps(params["data"])
f = requests.post(url, data)
logger.debug("http_request received reply: {}".format(f.text))
if f.status_code != 200:
logger.error("http error {} : {}".format(f.status_code, f.text))
raise ValueError(f.status_code)
assert params["accept"] == "json"
f = f.json()
return {"body": f}
#
# High-Level Jade Client API
# Builds on a JadeInterface to provide a meaningful API
#
# Either:
# a) use with JadeAPI.create_[serial|ble]() as jade:
# (recommended)
# or:
# b) use JadeAPI.create_[serial|ble], then call connect() before
# using, and disconnect() when finished
# (caveat cranium)
# or:
# c) use ctor to wrap existing JadeInterface instance
# (caveat cranium)
#
class JadeAPI:
def __init__(self, jade):
assert jade is not None
self.jade = jade
def __enter__(self):
self.connect()
return self
def __exit__(self, exc_type, exc, tb):
if exc_type:
logger.error("Exception causing JadeAPI context exit.")
logger.error(exc_type)
logger.error(exc)
traceback.print_tb(tb)
self.disconnect(exc_type is not None)
@staticmethod
def create_serial(device=None, baud=None, timeout=None):
impl = JadeInterface.create_serial(device, baud, timeout)
return JadeAPI(impl)
@staticmethod
def create_ble(device_name=None, serial_number=None, scan_timeout=None, loop=None):
impl = JadeInterface.create_ble(device_name, serial_number, scan_timeout, loop)
return JadeAPI(impl)
# Connect underlying interface
def connect(self):
self.jade.connect()
# Disconnect underlying interface
def disconnect(self, drain=False):
self.jade.disconnect(drain)
# Drain all output from the interface
def drain(self):
self.jade.drain()
# Raise any returned error as an exception
@staticmethod
def _get_result_or_raise_error(reply):
if "error" in reply:
e = reply["error"]
raise JadeError(e.get("code"), e.get("message"), e.get("data"))
return reply["result"]
# Helper to call wrapper interface rpc invoker
def _jadeRpc(
self,
method,
params=None,
inputid=None,
http_request_fn=None,
long_timeout=False,
):
newid = inputid if inputid else str(random.randint(100000, 999999))
request = self.jade.build_request(newid, method, params)
reply = self.jade.make_rpc_call(request, long_timeout)
result = self._get_result_or_raise_error(reply)
# The Jade can respond with a request for interaction with a remote
# http server. This is used for interaction with the pinserver but the
# code below acts as a dumb proxy and simply makes the http request and
# forwards the response back to the Jade.
# Note: the function called to make the http-request can be passed in,
# or it can default to the simple _http_request() function above, if available.
if isinstance(result, collections.abc.Mapping) and "http_request" in result:
this_module = sys.modules[__name__]
make_http_request = http_request_fn or getattr(
this_module, "_http_request", None
)
assert make_http_request, "Default _http_request() function not available"
http_request = result["http_request"]
http_response = make_http_request(http_request["params"])
return self._jadeRpc(
http_request["on-reply"],
http_response["body"],
http_request_fn=make_http_request,
long_timeout=long_timeout,
)
return result
# Get version information from the hw
def get_version_info(self):
return self._jadeRpc("get_version_info")
# Add client entropy to the hw rng
def add_entropy(self, entropy):
params = {"entropy": entropy}
return self._jadeRpc("add_entropy", params)
# OTA new firmware
def ota_update(self, fwcmp, fwlen, chunksize, cb):
cmphasher = hashlib.sha256()
cmphasher.update(fwcmp)
cmphash = cmphasher.digest()
cmplen = len(fwcmp)
# Initiate OTA
params = {"fwsize": fwlen, "cmpsize": cmplen, "cmphash": cmphash}
result = self._jadeRpc("ota", params)
assert result is True
# Write binary chunks
written = 0
while written < cmplen:
remaining = cmplen - written
length = min(remaining, chunksize)
chunk = bytes(fwcmp[written : written + length])
result = self._jadeRpc("ota_data", chunk)
assert result is True
written += length
if cb:
cb(written, cmplen)
# All binary data uploaded
return self._jadeRpc("ota_complete")
# Run (debug) healthcheck on the hw
def run_remote_selfcheck(self):
return self._jadeRpc("debug_selfcheck", long_timeout=True)
# Set the (debug) mnemonic
def set_mnemonic(self, mnemonic, passphrase=None, temporary_wallet=False):
params = {
"mnemonic": mnemonic,
"passphrase": passphrase,
"temporary_wallet": temporary_wallet,
}
return self._jadeRpc("debug_set_mnemonic", params)
# Set the (debug) seed
def set_seed(self, seed, temporary_wallet=False):
params = {"seed": seed, "temporary_wallet": temporary_wallet}
return self._jadeRpc("debug_set_mnemonic", params)
# Override the pinserver details on the hww
def set_pinserver(self, urlA=None, urlB=None, pubkey=None, cert=None):
params = {}
if urlA is not None or urlB is not None:
params["urlA"] = urlA
params["urlB"] = urlB
if pubkey is not None:
params["pubkey"] = pubkey
if cert is not None:
params["certificate"] = cert
return self._jadeRpc("update_pinserver", params)
# Reset the pinserver details on the hww to their defaults
def reset_pinserver(self, reset_details, reset_certificate):
params = {
"reset_details": reset_details,
"reset_certificate": reset_certificate,
}
return self._jadeRpc("update_pinserver", params)
# Trigger user authentication on the hw
# Involves pinserver handshake
def auth_user(self, network, http_request_fn=None):
params = {"network": network}
return self._jadeRpc(
"auth_user", params, http_request_fn=http_request_fn, long_timeout=True
)
# Get xpub given a path
def get_xpub(self, network, path):
params = {"network": network, "path": path}
return self._jadeRpc("get_xpub", params)
# Get registered multisig wallets
def get_registered_multisigs(self):
return self._jadeRpc("get_registered_multisigs")
# Register a multisig wallet
def register_multisig(
self, network, multisig_name, variant, sorted_keys, threshold, signers
):
params = {
"network": network,
"multisig_name": multisig_name,
"descriptor": {
"variant": variant,
"sorted": sorted_keys,
"threshold": threshold,
"signers": signers,
},
}
return self._jadeRpc("register_multisig", params)
# Get receive-address for parameters
def get_receive_address(
self, *args, recovery_xpub=None, csv_blocks=0, variant=None, multisig_name=None
):
if multisig_name is not None:
assert len(args) == 2
keys = ["network", "paths", "multisig_name"]
args += (multisig_name,)
elif variant is not None:
assert len(args) == 2
keys = ["network", "path", "variant"]
args += (variant,)
else:
assert len(args) == 4
keys = [
"network",
"subaccount",
"branch",
"pointer",
"recovery_xpub",
"csv_blocks",
]
args += (recovery_xpub, csv_blocks)
return self._jadeRpc("get_receive_address", dict(zip(keys, args)))
# Sign a message
def sign_message(
self,
path,
message,
use_ae_signatures=False,
ae_host_commitment=None,
ae_host_entropy=None,
):
if use_ae_signatures:
# Anti-exfil protocol:
# We send the signing request and receive the signer-commitment in
# reply once the user confirms.
# We can then request the actual signature passing the ae-entropy.
params = {
"path": path,
"message": message,
"ae_host_commitment": ae_host_commitment,
}
signer_commitment = self._jadeRpc("sign_message", params)
params = {"ae_host_entropy": ae_host_entropy}
signature = self._jadeRpc("get_signature", params)
return signer_commitment, signature
else:
# Standard EC signature, simple case
params = {"path": path, "message": message}
return self._jadeRpc("sign_message", params)
# Get a Liquid master blinding key
def get_master_blinding_key(self):
return self._jadeRpc("get_master_blinding_key")
# Get a Liquid public blinding key for a given script
def get_blinding_key(self, script):
params = {"script": script}
return self._jadeRpc("get_blinding_key", params)
# Get the shared secret to unblind a tx, given the receiving script on
# our side and the pubkey of the sender (sometimes called "nonce" in
# Liquid). Optionally fetch our blinding pubkey also.
def get_shared_nonce(self, script, their_pubkey, include_pubkey=False):
params = {
"script": script,
"their_pubkey": their_pubkey,
"include_pubkey": include_pubkey,
}
return self._jadeRpc("get_shared_nonce", params)
# Get a "trusted" blinding factor to blind an output. Normally the blinding
# factors are generated and returned in the `get_commitments` call, but
# for the last output the VBF must be generated on the host side, so this
# call allows the host to get a valid ABF to compute the generator and
# then the "final" VBF. Nonetheless, this call is kept generic, and can
# also generate VBFs, thus the "type" parameter.
# `hash_prevouts` is computed as specified in BIP143 (double SHA of all
# the outpoints being spent as input. It's not checked right away since
# at this point Jade doesn't know anything about the tx we are referring
# to. It will be checked later during `sign_liquid_tx`.
# `output_index` is the output we are trying to blind.
# `type` can either be "ASSET" or "VALUE" to generate ABFs or VBFs.
def get_blinding_factor(self, hash_prevouts, output_index, type):
params = {
"hash_prevouts": hash_prevouts,
"output_index": output_index,
"type": type,
}
return self._jadeRpc("get_blinding_factor", params)
# Generate the blinding factors and commitments for a given output.
# Can optionally get a "custom" VBF, normally used for the last
# input where the VBF is not random, but generated accordingly to
# all the others.
# `hash_prevouts` and `output_index` have the same meaning as in
# the `get_blinding_factor` call.
# NOTE: the `asset_id` should be passed as it is normally displayed, so
# reversed compared to the "consensus" representation.
def get_commitments(self, asset_id, value, hash_prevouts, output_index, vbf=None):
params = {
"asset_id": asset_id,
"value": value,
"hash_prevouts": hash_prevouts,
"output_index": output_index,
}
if vbf is not None:
params["vbf"] = vbf
return self._jadeRpc("get_commitments", params)
# Common code for sending btc- and liquid- tx-inputs and receiving the
# signatures. Handles standard EC and AE signing schemes.
def _send_tx_inputs(self, base_id, inputs, use_ae_signatures):
if use_ae_signatures:
# Anti-exfil protocol:
# We send one message per input (which includes host-commitment *but
# not* the host entropy) and receive the signer-commitment in reply.
# Once all n input messages are sent, we can request the actual signatures
# (as the user has a chance to confirm/cancel at this point).
# We request the signatures passing the ae-entropy for each one.
# Send inputs one at a time, receiving 'signer-commitment' in reply
signer_commitments = []
host_ae_entropy_values = []
for txinput in inputs:
# ae-protocol - do not send the host entropy immediately
txinput = txinput.copy() # shallow copy
host_ae_entropy_values.append(txinput.pop("ae_host_entropy", None))
base_id += 1
input_id = str(base_id)
reply = self._jadeRpc("tx_input", txinput, input_id)
signer_commitments.append(reply)
# Request the signatures one at a time, sending the entropy
signatures = []
for (i, host_ae_entropy) in enumerate(host_ae_entropy_values, 1):
base_id += 1
sig_id = str(base_id)
params = {"ae_host_entropy": host_ae_entropy}
reply = self._jadeRpc("get_signature", params, sig_id)
signatures.append(reply)
assert len(signatures) == len(inputs)
return list(zip(signer_commitments, signatures))
else:
# Legacy protocol:
# We send one message per input - without expecting replies.
# Once all n input messages are sent, the hw then sends all n replies
# (as the user has a chance to confirm/cancel at this point).
# Then receive all n replies for the n signatures.
# NOTE: *NOT* a sequence of n blocking rpc calls.
# NOTE: at some point this flow should be removed in favour of the one
# above, albeit without passing anti-exfil entropy or commitment data.
# Send all n inputs
requests = []
for txinput in inputs:
base_id += 1
msg_id = str(base_id)
request = self.jade.build_request(msg_id, "tx_input", txinput)
self.jade.write_request(request)
requests.append(request)
time.sleep(0.1)
# Receive all n signatures
signatures = []
for request in requests:
reply = self.jade.read_response()
self.jade.validate_reply(request, reply)
signature = self._get_result_or_raise_error(reply)
signatures.append(signature)
assert len(signatures) == len(inputs)
return signatures
# Sign a Liquid txn
def sign_liquid_tx(
self, network, txn, inputs, commitments, change, use_ae_signatures=False
):
# 1st message contains txn and number of inputs we are going to send.
# Reply ok if that corresponds to the expected number of inputs (n).
base_id = 100 * random.randint(1000, 9999)
params = {
"network": network,
"txn": txn,
"num_inputs": len(inputs),
"trusted_commitments": commitments,
"use_ae_signatures": use_ae_signatures,
"change": change,
}
reply = self._jadeRpc("sign_liquid_tx", params, str(base_id))
assert reply
# Send inputs and receive signatures
return self._send_tx_inputs(base_id, inputs, use_ae_signatures)
# Sign a txn
def sign_tx(self, network, txn, inputs, change, use_ae_signatures=False):
# 1st message contains txn and number of inputs we are going to send.
# Reply ok if that corresponds to the expected number of inputs (n).
base_id = 100 * random.randint(1000, 9999)
params = {
"network": network,
"txn": txn,
"num_inputs": len(inputs),
"use_ae_signatures": use_ae_signatures,
"change": change,
}
reply = self._jadeRpc("sign_tx", params, str(base_id))
assert reply
# Send inputs and receive signatures
return self._send_tx_inputs(base_id, inputs, use_ae_signatures)
#
# Mid-level interface to Jade
# Wraps either a serial or a ble connection
# Calls to send and receive bytes and cbor messages over the interface.
#
# Either:
# a) use wrapped with JadeAPI
# (recommended)
# or:
# b) use with JadeInterface.create_[serial|ble]() as jade:
# ...
# or:
# c) use JadeInterface.create_[serial|ble], then call connect() before
# using, and disconnect() when finished
# (caveat cranium)
# or:
# d) use ctor to wrap existing low-level implementation instance
# (caveat cranium)
#
class JadeInterface:
def __init__(self, impl):
assert impl is not None
self.impl = impl
def __enter__(self):
self.connect()
return self
def __exit__(self, exc_type, exc, tb):
if exc_type:
logger.error("Exception causing JadeInterface context exit.")
logger.error(exc_type)
logger.error(exc)
traceback.print_tb(tb)
self.disconnect(exc_type is not None)
@staticmethod
def create_serial(device=None, baud=None, timeout=None):
if device and JadeTCPImpl.isSupportedDevice(device):
impl = JadeTCPImpl(device)
else:
impl = JadeSerialImpl(
device or DEFAULT_SERIAL_DEVICE,
baud or DEFAULT_BAUD_RATE,
timeout or DEFAULT_SERIAL_TIMEOUT,
)
return JadeInterface(impl)
# @staticmethod
# def create_ble(device_name=None, serial_number=None, scan_timeout=None, loop=None):
# impl = JadeBleImpl(
# device_name or DEFAULT_BLE_DEVICE_NAME,
# serial_number or DEFAULT_BLE_SERIAL_NUMBER,
# scan_timeout or DEFAULT_BLE_SCAN_TIMEOUT,
# loop=loop,
# )
# return JadeInterface(impl)
def connect(self):
self.impl.connect()
def disconnect(self, drain=False):
if drain:
self.drain()
self.impl.disconnect()
def drain(self):
logger.warn("Draining interface...")
drained = bytearray()
finished = False
while not finished:
byte_ = self.impl.read(1)
drained.extend(byte_)
finished = byte_ == b""
if finished or byte_ == b"\n" or len(drained) > 256:
try:
device_logger.warn(drained.decode("utf-8"))
except Exception as e:
# Dump the bytes raw and as hex if decoding as utf-8 failed
device_logger.warn("Raw:")
device_logger.warn(drained)
device_logger.warn("----")
device_logger.warn("Hex dump:")
device_logger.warn(drained.hex())
# Clear and loop to continue collecting
drained.clear()
@staticmethod
def build_request(input_id, method, params=None):
request = {"method": method, "id": input_id}
if params is not None:
request["params"] = params
return request
@staticmethod
def serialise_cbor_request(request):
dump = cbor.dumps(request)
len_dump = len(dump)
if "method" in request and "ota_data" in request["method"]:
msg = "Sending ota_data message {} as cbor of size {}".format(
request["id"], len_dump
)
logger.info(msg)
else:
logger.info(
"Sending: {} as cbor of size {}".format(_hexlify(request), len_dump)
)
return dump
def write(self, bytes_):
logger.debug("Sending: {} bytes".format(len(bytes_)))
wrote = self.impl.write(bytes_)
logger.debug("Sent: {} bytes".format(len(bytes_)))
return wrote
def write_request(self, request):
msg = self.serialise_cbor_request(request)
written = 0
while written < len(msg):
written += self.write(msg[written:])
def read(self, n):
logger.debug("Reading {} bytes...".format(n))
bytes_ = self.impl.read(n)
logger.debug("Received: {} bytes".format(len(bytes_)))
return bytes_
def read_cbor_message(self):
while True:
# 'self' is sufficiently 'file-like' to act as a load source.
# Throws EOFError on end of stream/timeout/lost-connection etc.
message = cbor.load(self)
# A message response (to a prior request)
if "id" in message:
logger.info("Received msg: {}".format(_hexlify(message)))
return message
# A log message - handle as normal
if "log" in message:
response = message["log"].decode("utf-8")
log_methods = {
"E": device_logger.error,
"W": device_logger.warn,
"I": device_logger.info,
"D": device_logger.debug,
"V": device_logger.debug,
}
log_method = device_logger.error
if len(response) > 1 and response[1] == " ":
lvl = response[0]
log_method = log_methods.get(lvl, device_logger.error)
log_method(">> {}".format(response))
else:
# Unknown/unhandled/unexpected message
logger.error("Unhandled message received")
device_logger.error(message)
def read_response(self, long_timeout=False):
while True:
try:
return self.read_cbor_message()
except EOFError as e:
if not long_timeout:
raise
@staticmethod
def validate_reply(request, reply):
assert isinstance(reply, dict) and "id" in reply
assert ("result" in reply) != ("error" in reply)
assert reply["id"] == request["id"] or reply["id"] == "00" and "error" in reply
def make_rpc_call(self, request, long_timeout=False):
# Write outgoing request message
assert isinstance(request, dict)
assert "id" in request and len(request["id"]) > 0
assert "method" in request and len(request["method"]) > 0
assert len(request["id"]) < 16 and len(request["method"]) < 32
self.write_request(request)
# Read and validate incoming message
reply = self.read_response(long_timeout)
self.validate_reply(request, reply)
return reply
| 35.613734
| 92
| 0.605568
|
4a03dc136589f7c161cfe69b25a9a2641e839c09
| 2,413
|
py
|
Python
|
setup.py
|
vam-google/python-iot
|
6054a14c007ea967bcef275fc4aafc70e48b8b5d
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
vam-google/python-iot
|
6054a14c007ea967bcef275fc4aafc70e48b8b5d
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
vam-google/python-iot
|
6054a14c007ea967bcef275fc4aafc70e48b8b5d
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import io
import os
import setuptools
name = "google-cloud-iot"
description = "Cloud IoT API API client library"
version = "1.0.0"
release_status = "Development Status :: 5 - Production/Stable"
dependencies = [
"google-api-core[grpc] >= 1.14.0, < 2.0.0dev",
"grpc-google-iam-v1 >= 0.12.3, < 0.13dev",
'enum34; python_version < "3.4"',
]
package_root = os.path.abspath(os.path.dirname(__file__))
readme_filename = os.path.join(package_root, "README.rst")
with io.open(readme_filename, encoding="utf-8") as readme_file:
readme = readme_file.read()
packages = [
package for package in setuptools.find_packages() if package.startswith("google")
]
namespaces = ["google"]
if "google.cloud" in packages:
namespaces.append("google.cloud")
setuptools.setup(
name=name,
version=version,
description=description,
long_description=readme,
author="Google LLC",
author_email="googleapis-packages@oogle.com",
license="Apache 2.0",
url="https://github.com/googleapis/python-iot",
classifiers=[
release_status,
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Operating System :: OS Independent",
"Topic :: Internet",
],
platforms="Posix; MacOS X; Windows",
packages=packages,
namespace_packages=namespaces,
install_requires=dependencies,
python_requires=">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*",
include_package_data=True,
zip_safe=False,
)
| 32.173333
| 85
| 0.678409
|
4a03dcdd518eaed6815e42f07613893739165cf3
| 668
|
py
|
Python
|
manage.py
|
Ishikashah2510/nirvaas_main
|
5eaf92756d06261a7f555b10aad864a34c9e761b
|
[
"MIT"
] | null | null | null |
manage.py
|
Ishikashah2510/nirvaas_main
|
5eaf92756d06261a7f555b10aad864a34c9e761b
|
[
"MIT"
] | null | null | null |
manage.py
|
Ishikashah2510/nirvaas_main
|
5eaf92756d06261a7f555b10aad864a34c9e761b
|
[
"MIT"
] | 3
|
2020-12-30T11:35:22.000Z
|
2021-01-07T13:10:26.000Z
|
#!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
"""Run administrative tasks."""
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'nirvaas_main.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| 29.043478
| 76
| 0.681138
|
4a03dd6060b029e5bbc5a601e95e19d16ac521d0
| 2,362
|
py
|
Python
|
src/python/zquantum/core/interfaces/ansatz_utils.py
|
kottmanj/z-quantum-core
|
21752e92e79aafedbfeb6e7ae196bdc2fd5803e4
|
[
"Apache-2.0"
] | null | null | null |
src/python/zquantum/core/interfaces/ansatz_utils.py
|
kottmanj/z-quantum-core
|
21752e92e79aafedbfeb6e7ae196bdc2fd5803e4
|
[
"Apache-2.0"
] | null | null | null |
src/python/zquantum/core/interfaces/ansatz_utils.py
|
kottmanj/z-quantum-core
|
21752e92e79aafedbfeb6e7ae196bdc2fd5803e4
|
[
"Apache-2.0"
] | null | null | null |
from functools import wraps
class _InvalidatingSetter(object):
"""Setter descriptor that sets target object's _parametrized_circuit to None.
The descriptor uses __get__ and __set__ methods. Both of them accept ansatz as a
first argument (in this case).
We just forward the __get__, but in __set__ we set obj._parametrized_circuit to None.
"""
def __init__(self, target):
self.target = target
def __get__(self, ansatz, obj_type):
return self.target.__get__(ansatz, obj_type)
def __set__(self, ansatz, new_obj):
self.target.__set__(ansatz, new_obj)
ansatz._parametrized_circuit = None
def invalidates_parametrized_circuit(target):
"""
Make given target (either property or method) invalidate ansatz's circuit.
It can be used as a decorator, when for some reason `ansatz_property` shouldn't be used.
"""
if isinstance(target, property):
# If we are dealing with a property, return our modified descriptor.
return _InvalidatingSetter(target)
else:
# Methods are functions that take instance as a first argument
# They only change to "bound" methods once the object is instantiated
# Therefore, we are decorating a function of signature _function(ansatz, ...)
@wraps(target)
def _wrapper(ansatz, *args, **kwargs):
# Pass through the arguments, store the returned value for later use
return_value = target(ansatz, *args, **kwargs)
# Invalidate circuit
ansatz._parametrized_circuit = None
# Return original result
return return_value
return _wrapper
class DynamicProperty:
"""A shortcut to create a getter-setter descriptor with one liners."""
def __init__(self, name: str, default_value=None):
self.default_value = default_value
self.name = name
@property
def attrname(self):
return f"_{self.name}"
def __get__(self, obj, obj_type):
if not hasattr(obj, self.attrname):
setattr(obj, self.attrname, self.default_value)
return getattr(obj, self.attrname)
def __set__(self, obj, new_obj):
setattr(obj, self.attrname, new_obj)
def ansatz_property(name: str, default_value=None):
return _InvalidatingSetter(DynamicProperty(name, default_value))
| 34.231884
| 92
| 0.681202
|
4a03de4bf0252fe09c54bf1dce80222a464b23ab
| 554
|
py
|
Python
|
zuoti/runoob/6_fibonacci_sequence.py
|
Ethan16/python_misc
|
29cf2fdbd7529a05bcf35768e0244e634fe2ae7a
|
[
"Apache-2.0"
] | 1
|
2019-05-04T09:26:29.000Z
|
2019-05-04T09:26:29.000Z
|
zuoti/runoob/6_fibonacci_sequence.py
|
Ethan16/python_misc
|
29cf2fdbd7529a05bcf35768e0244e634fe2ae7a
|
[
"Apache-2.0"
] | null | null | null |
zuoti/runoob/6_fibonacci_sequence.py
|
Ethan16/python_misc
|
29cf2fdbd7529a05bcf35768e0244e634fe2ae7a
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
@version: 1.0
@author: James
@license: Apache Licence
@contact: euler52201044@sina.com
@file: 6_fibonacci_sequence.py
@time: 2019/5/2 下午4:20
@description: 题目:斐波那契数列。
程序分析:斐波那契数列(Fibonacci sequence),又称黄金分割数列,指的是这样一个数列:0、1、1、2、3、5、8、13、21、34、……。
在数学上,费波那契数列是以递归的方法来定义:
F0 = 0 (n=0)
F1 = 1 (n=1)
Fn = F[n-1]+ F[n-2](n=>2)
"""
def fibonacci(n):
a, b = 1, 1
for i in range(n - 1):
a, b = b, a + b
return a
num = int(input('Fibonacci index : \n'))
print('Fibonacci number : ')
print(fibonacci(num))
| 19.103448
| 77
| 0.611913
|
4a03de7b2faf9b6cc0095c48fec69a44c260024d
| 4,050
|
py
|
Python
|
forest/benchmarking/tests/test_robust_phase_estimation.py
|
stjordanis/forest-benchmarking
|
f9ad9701c2d253de1a0c922d7220ed7de75ac685
|
[
"Apache-2.0"
] | 40
|
2019-01-25T18:35:24.000Z
|
2022-03-13T11:21:18.000Z
|
forest/benchmarking/tests/test_robust_phase_estimation.py
|
stjordanis/forest-benchmarking
|
f9ad9701c2d253de1a0c922d7220ed7de75ac685
|
[
"Apache-2.0"
] | 140
|
2019-01-25T20:09:02.000Z
|
2022-03-12T01:08:01.000Z
|
forest/benchmarking/tests/test_robust_phase_estimation.py
|
stjordanis/forest-benchmarking
|
f9ad9701c2d253de1a0c922d7220ed7de75ac685
|
[
"Apache-2.0"
] | 22
|
2019-02-01T13:18:35.000Z
|
2022-01-12T15:03:13.000Z
|
import numpy as np
from numpy import pi
from pyquil.gates import I, H, RY, RZ, RX
from pyquil.noise import damping_after_dephasing
from pyquil.quil import Program
from pyquil.quilbase import Measurement
import forest.benchmarking.robust_phase_estimation as rpe
from forest.benchmarking.observable_estimation import ObservablesExperiment, estimate_observables
def test_expectations_at_depth(qvm):
qvm.qam.random_seed = 5
q = 0
qubits = (q, )
expected_outcomes = [1., 0, -1., 0]
for depth in [0, 1, 2, 3, 4]:
prep, meas, settings = rpe.all_eigenvector_prep_meas_settings(qubits, I(q))
depth_many_rot = [RZ(pi/2, q) for _ in range(depth)]
program = Program(prep) + sum(depth_many_rot, Program()) + Program(meas)
expt = ObservablesExperiment(list(settings), program)
results = list(estimate_observables(qvm, expt))
for res in results:
meas_dir = res.setting.observable[q]
idx = ((depth - 1) if meas_dir == 'Y' else depth) % 4
expected = expected_outcomes[idx]
exp = res.expectation
assert np.allclose(expected, exp, atol=.05)
def test_noiseless_rpe(qvm):
qvm.qam.random_seed = 5
angle = pi / 4 - .5 # pick arbitrary angle
q = 0
num_depths = 7
mult_factor = 10
expts = rpe.generate_rpe_experiments(RZ(angle, q),
*rpe.all_eigenvector_prep_meas_settings([q], I(q)),
num_depths=num_depths)
results = rpe.acquire_rpe_data(qvm, expts, multiplicative_factor=mult_factor)
est = rpe.robust_phase_estimate(results, [q])
assert np.abs(angle - est) < 2 * np.sqrt(rpe.get_variance_upper_bound(num_depths, mult_factor))
def test_noisy_rpe(qvm):
qvm.qam.random_seed = 5
angles = pi * np.linspace(2 / 9, 2.0 - 2 / 9, 3)
add_error = .15
q = 0
def add_damping_dephasing_noise(prog, T1, T2, gate_time):
p = Program()
p.defgate("noise", np.eye(2))
p.define_noisy_gate("noise", [q], damping_after_dephasing(T1, T2, gate_time))
for elem in prog:
p.inst(elem)
if isinstance(elem, Measurement):
continue # skip measurement
p.inst(("noise", q))
return p
def add_noise_to_experiments(expts, t1, t2, p00, p11, q):
gate_time = 200 * 10 ** (-9)
for ex in expts:
ex.program = add_damping_dephasing_noise(ex.program, t1, t2,
gate_time).define_noisy_readout(q, p00, p11)
tolerance = .1
# scan over each angle and check that RPE correctly predicts the angle to within .1 radians
for angle in angles:
RH = Program(RY(-pi / 4, q)).inst(RZ(angle, q)).inst(RY(pi / 4, q))
evecs = rpe.bloch_rotation_to_eigenvectors(pi / 4, q)
cob_matrix = rpe.get_change_of_basis_from_eigvecs(evecs)
cob = rpe.change_of_basis_matrix_to_quil(qvm, [q], cob_matrix)
prep, meas, settings = rpe.all_eigenvector_prep_meas_settings([q], cob)
expts = rpe.generate_rpe_experiments(RH, prep, meas, settings, num_depths=7)
add_noise_to_experiments(expts, 25 * 10 ** (-6.), 20 * 10 ** (-6.), .92, .87, q)
results = rpe.acquire_rpe_data(qvm, expts, multiplicative_factor=5.,
additive_error=add_error)
phase_estimate = rpe.robust_phase_estimate(results, [q])
assert np.allclose(phase_estimate, angle, atol=tolerance)
def test_do_rpe(qvm):
angles = [-pi / 2, pi]
qubits = [0, 1]
qubit_groups = [(qubit,) for qubit in qubits]
changes_of_basis = [H(qubit) for qubit in qubits]
for angle in angles:
rotation = Program([RX(angle, qubit) for qubit in qubits])
phases, expts, ress = rpe.do_rpe(qvm, rotation, changes_of_basis, qubit_groups,
num_depths=6)
for group in qubit_groups:
assert np.allclose(phases[group], angle % (2*pi), atol=.1)
| 41.326531
| 99
| 0.624198
|
4a03dece1659a6e7d9da3db66fbeddf5c6b72f09
| 9,441
|
py
|
Python
|
libs/client/cornflow_client/tests/unit/test_schema_manager.py
|
ggsdc/corn
|
4c17c46a70f95b8882bcb6a55ef7daa1f69e0456
|
[
"MIT"
] | 2
|
2020-07-09T20:58:47.000Z
|
2020-07-20T20:40:46.000Z
|
cornflow-client/cornflow_client/tests/unit/test_schema_manager.py
|
baobabsoluciones/cornflow
|
bd7cae22107e5fe148704d5f41d4f58f9c410b40
|
[
"Apache-2.0"
] | 2
|
2022-03-31T08:42:10.000Z
|
2022-03-31T12:05:23.000Z
|
libs/client/cornflow_client/tests/unit/test_schema_manager.py
|
ggsdc/corn
|
4c17c46a70f95b8882bcb6a55ef7daa1f69e0456
|
[
"MIT"
] | null | null | null |
"""
"""
#
import json
import os
#
from unittest import TestCase
#
from cornflow_client import SchemaManager
from cornflow_client.constants import DATASCHEMA
from cornflow_client.core.tools import load_json
from cornflow_client.tests.const import dict_example
class TestSchemaManager(TestCase):
def setUp(self):
self.root_data = os.path.join(os.path.dirname(__file__), "../data")
pass
def get_data_file(self, filename):
return os.path.join(self.root_data, filename)
def get_project_data_file(self, filename):
return os.path.join(self.root_data, "../../data", filename)
def test_schema_dict(self):
sm = SchemaManager.from_filepath(
self.get_project_data_file("../data/pulp_json_schema.json")
)
dict_schema = sm.jsonschema_to_dict()
self.assertCountEqual(
dict_schema["CoefficientSchema"], dict_example["CoefficientSchema"]
)
self.assertCountEqual(
dict_schema["ObjectiveSchema"], dict_example["ObjectiveSchema"]
)
self.assertCountEqual(
dict_schema["ConstraintsSchema"], dict_example["ConstraintsSchema"]
)
self.assertCountEqual(
dict_schema["VariablesSchema"], dict_example["VariablesSchema"]
)
self.assertCountEqual(
dict_schema["ParametersSchema"], dict_example["ParametersSchema"]
)
self.assertCountEqual(dict_schema["Sos1Schema"], dict_example["Sos1Schema"])
self.assertCountEqual(dict_schema["Sos2Schema"], dict_example["Sos2Schema"])
self.assertCountEqual(dict_schema[DATASCHEMA], dict_example[DATASCHEMA])
sm.jsonschema_to_flask()
def test_schema_validation(self):
sm = SchemaManager.from_filepath(
self.get_project_data_file("pulp_json_schema.json")
)
val = sm.validate_file(self.get_data_file("pulp_example_data.json"))
self.assertTrue(val)
sm.jsonschema_to_flask()
def test_schema_validation_2(self):
sm = SchemaManager.from_filepath(self.get_data_file("hk_data_schema.json"))
val = sm.validate_file(self.get_data_file("hk_data_input.json"))
self.assertTrue(val)
# Test that it can be transformed into a dict
dict_schema = sm.jsonschema_to_dict()
self.assertEqual(
dict_schema["JobsSchema"][0],
{
"name": "id",
"type": "Integer",
"required": True,
"allow_none": False,
"many": False,
},
)
self.assertEqual(
dict_schema["JobsSchema"][1],
{"name": "successors", "type": "Integer", "many": True, "required": True},
)
marshmallow_object = sm.dict_to_flask()
self.assertEqual(
marshmallow_object().fields.keys(),
{"resources", "needs", "jobs", "durations"},
)
with open(self.get_data_file("hk_data_input.json"), "r") as f:
content = json.load(f)
marshmallow_object().load(content)
# marshmallow_object().fields['jobs'].nested().fields['successors']
def test_check_schema(self):
sm = SchemaManager.from_filepath(
self.get_project_data_file("vrp_solution_schema.json")
)
val = sm.validate_schema()
self.assertTrue(val)
sm.jsonschema_to_flask()
def test_check_wrong_schema_1(self):
schema = load_json(self.get_project_data_file("vrp_solution_schema.json"))
del schema["properties"]["routes"]["items"]["required"]
sm = SchemaManager(schema)
val = sm.validate_schema()
self.assertFalse(val)
def test_check_wrong_schema_2(self):
schema = load_json(self.get_project_data_file("vrp_solution_schema.json"))
schema["properties"]["routes"]["items"]["properties"]["pos"][
"type"
] = "not_a_type"
sm = SchemaManager(schema)
val = sm.validate_schema()
self.assertFalse(val)
def test_check_wrong_schema_3(self):
schema = load_json(self.get_project_data_file("vrp_solution_schema.json"))
del schema["properties"]["routes"]["items"]["properties"]["pos"]["type"]
sm = SchemaManager(schema)
val = sm.validate_schema()
self.assertFalse(val)
def test_validation_errors(self):
sm = SchemaManager.from_filepath(
self.get_project_data_file("pulp_json_schema.json")
)
data = {"objective": [], "constraints": [], "variables": []}
bool = sm.validate_data(data)
val = sm.get_validation_errors(data)
self.assertFalse(bool)
# The two errors are: objective should be an object and parameters is missing.
# we do not impose sos1 and sos2 to be there.
self.assertEqual(len(val), 2)
self.assertEqual(val[0].message, "[] is not of type 'object'")
sm.jsonschema_to_flask()
def test_validation_errors2(self):
sm = SchemaManager.from_filepath(
self.get_project_data_file("pulp_json_schema.json")
)
data = {
"objective": [],
"constraints": ["notAConstraint"],
"variables": ["notAVariable"],
}
val = sm.get_validation_errors(data)
# errors are: objective should be an object, there's a wrongly formatted constraint and variable.
# and parameters is missing
self.assertEqual(len(val), 4)
sm.jsonschema_to_flask()
def test_validation_errors3(self):
sm = SchemaManager.from_filepath(self.get_data_file("hk_data_schema.json"))
bool = sm.validate_file(self.get_data_file("data_input_bad.json"))
val = sm.get_file_errors(self.get_data_file("data_input_bad.json"))
self.assertFalse(bool)
self.assertEqual(len(val), 2)
sm.jsonschema_to_flask()
def test_schema_names(self):
sm = SchemaManager.from_filepath(self.get_data_file("name_problem_schema.json"))
dict_schema = sm.jsonschema_to_dict()
self.assertEqual(len(dict_schema["CoefficientsSchema"]), 2)
self.assertEqual(len(dict_schema["Coefficients1Schema"]), 1)
sm.jsonschema_to_flask()
def test_array_integer(self):
sm = SchemaManager.from_filepath(
self.get_data_file("graph_coloring_input.json")
)
sm.jsonschema_to_flask()
def test_non_mandatory(self):
sm = SchemaManager.from_filepath(self.get_data_file("instance-hackathon2.json"))
schema_marsh = sm.jsonschema_to_flask()
with open(self.get_data_file("hk_data_input.json"), "r") as f:
content = json.load(f)
err = schema_marsh().load(content)
return
def test_flask_schema_extra_info(self):
with open(self.get_data_file("pulp_example_data.json"), "r") as f:
content = json.load(f)
sm = SchemaManager.from_filepath(
self.get_project_data_file("pulp_json_schema.json")
)
marshmallow_object = sm.jsonschema_to_flask()
content["new_param"] = 1
content["objective"]["another_something_new"] = 1
marshmallow_object().load(content)
def test_to_excel(self):
sm = SchemaManager.from_filepath(
self.get_data_file("graph_coloring_input.json")
)
template = sm.to_template()
import pandas as pd
dataframes = {k: pd.DataFrame.from_dict(v) for k, v in template.items()}
self.assertEqual(dataframes["pairs"]["n1"][0], 1)
self.assertEqual(dataframes["pairs"].shape, (1, 2))
self.assertEqual(dataframes["_README"].shape, (1, 2))
self.assertEqual(dataframes["_TYPES"].shape, (2, 3))
def test_to_excel2(self):
sm = SchemaManager.from_filepath(self.get_data_file("hk_data_schema.json"))
template = sm.to_template()
import pandas as pd
dataframes = {k: pd.DataFrame.from_dict(v) for k, v in template.items()}
self.assertEqual(dataframes["durations"].job[0], 1)
self.assertEqual(dataframes["durations"].shape, (1, 3))
self.assertEqual(dataframes["jobs"].shape, (1, 2))
self.assertEqual(dataframes["_README"].shape, (4, 2))
self.assertEqual(dataframes["_TYPES"].shape, (11, 3))
def test_to_excel3(self):
path = self.get_data_file("../../data/empty_schema.json")
schema = SchemaManager.load_json(path)
schema["properties"] = dict(
configuration=dict(
type="object",
properties=dict(
option1=dict(type="integer"), option2=dict(type="string")
),
)
)
sm = SchemaManager(schema)
template = sm.to_template()
import pandas as pd
dataframes = {k: pd.DataFrame.from_dict(v) for k, v in template.items()}
self.assertEqual(len(dataframes["configuration"]), 2)
self.assertEqual(dataframes["configuration"].iloc[1, 1], "string")
self.assertEqual(dataframes["configuration"].iloc[0, 1], 1)
self.assertEqual(dataframes["_README"].shape, (1, 2))
self.assertEqual(dataframes["_TYPES"].shape, (2, 3))
# TODO: fix this test and uncomment
# def test_list_of_lists(self):
# sm = SchemaManager.from_filepath(self.get_data_file('graph_coloring2_input.json'))
# sm.jsonschema_to_flask()
| 38.692623
| 105
| 0.633937
|
4a03df052cd9499c67acb4c2ef25e34f3b10e7df
| 338
|
py
|
Python
|
server/src/tests/samples/assert1.py
|
AndrewLester/pyright
|
a47c20a7d74dd88f06396d66e201a684571d99b0
|
[
"MIT"
] | null | null | null |
server/src/tests/samples/assert1.py
|
AndrewLester/pyright
|
a47c20a7d74dd88f06396d66e201a684571d99b0
|
[
"MIT"
] | 1
|
2021-08-31T20:37:43.000Z
|
2021-08-31T20:37:43.000Z
|
server/src/tests/samples/assert1.py
|
AndrewLester/pyright
|
a47c20a7d74dd88f06396d66e201a684571d99b0
|
[
"MIT"
] | null | null | null |
# This sample tests the ability to detect errant assert calls
# that are always true - the "reportAssertAlwaysTrue" option.
# This should generate a warning.
from typing import Any, Tuple
assert (1 != 2, "Error message")
a: Tuple[Any, ...] = (2, 3)
assert a
b = ()
assert b
c = (2, 3)
# This should generate a warning.
assert c
| 14.695652
| 61
| 0.680473
|
4a03dfcef82b26c04a4f7276a7e17db3e513eb2a
| 1,646
|
py
|
Python
|
extract_raw_txt.py
|
opensmartmesh/osmesh-analytics-utils
|
074a4b739b30535ff9530f4a0864ac9d1b9932b6
|
[
"MIT"
] | null | null | null |
extract_raw_txt.py
|
opensmartmesh/osmesh-analytics-utils
|
074a4b739b30535ff9530f4a0864ac9d1b9932b6
|
[
"MIT"
] | null | null | null |
extract_raw_txt.py
|
opensmartmesh/osmesh-analytics-utils
|
074a4b739b30535ff9530f4a0864ac9d1b9932b6
|
[
"MIT"
] | null | null | null |
# Dependencies : pandas
# Install pandas with : pip install pandas
import pandas as pd
import glob
from os.path import basename, dirname, join
import datetime
def build_datetime(month, day, time_str, year=2017):
dt_str = str(year) + '/' + str(month) + '/' + str(day) + ' ' + time_str
dt = datetime.datetime.strptime(dt_str, '%Y/%m/%d %X')
return dt
def get_month_from_file_name(file_name):
return basename(dirname(file_name))
def parse_file(file_name):
month_str = get_month_from_file_name(file_name)
df = pd.read_csv(file_name, sep='\t', names=['day', 'time', 'value'])
df['date'] = [build_datetime(month_str, day, time_str) for day, time_str in zip(df['day'], df['time'])]
del(df['time'])
del(df['day'])
print(df.head())
return df
def get_full_date_and_convert_to_csv(input_file_dir, output_file_dir):
all_files_txt = glob.glob(input_file_dir + '/*txt')
for a_file in all_files_txt:
df = parse_file(a_file)
output_file_name = join(output_file_dir, basename(a_file)[:-3] + 'csv')
df.to_csv(output_file_name, index=False)
if __name__ == "__main__":
#
#
# python extract_rax_txt.py input_dir output dir
import sys
if len(sys.argv) == 3:
# Where are the raw .txt (\t separation) files with one column for day, one column for time and month information in the dir
# year information is 2017 by default.
input_file_dir = sys.argv[1]
# Where to save csv files with one column for date
output_file_dir = sys.argv[2]
get_full_date_and_convert_to_csv(input_file_dir, output_file_dir)
| 28.877193
| 132
| 0.671932
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.