commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
|---|---|---|---|---|---|---|---|
14a0293224e78875e74bfc6491017d6059aa07f5
|
Enhance PEP8
|
bin/tftpy_server.py
|
bin/tftpy_server.py
|
#!/usr/bin/env python
# vim: ts=4 sw=4 et ai:
# -*- coding: utf8 -*-
import logging
import sys
from optparse import OptionParser
import tftpy
log = logging.getLogger('tftpy')
log.setLevel(logging.INFO)
# console handler
handler = logging.StreamHandler()
handler.setLevel(logging.DEBUG)
default_formatter = logging.Formatter('[%(asctime)s] %(message)s')
handler.setFormatter(default_formatter)
log.addHandler(handler)
def main():
usage=""
parser = OptionParser(usage=usage)
parser.add_option('-i',
'--ip',
type='string',
help='ip address to bind to (default: INADDR_ANY)',
default="")
parser.add_option('-p',
'--port',
type='int',
help='local port to use (default: 69)',
default=69)
parser.add_option('-r',
'--root',
type='string',
help='path to serve from',
default=None)
parser.add_option('-q',
'--quiet',
action='store_true',
default=False,
help="Do not log unless it is critical")
parser.add_option('-d',
'--debug',
action='store_true',
default=False,
help='upgrade logging from info to debug')
options, args = parser.parse_args()
if options.debug:
log.setLevel(logging.DEBUG)
# increase the verbosity of the formatter
debug_formatter = logging.Formatter('[%(asctime)s%(msecs)03d] %(levelname)s [%(name)s:%(lineno)s] %(message)s')
handler.setFormatter(debug_formatter)
elif options.quiet:
log.setLevel(logging.WARNING)
if not options.root:
parser.print_help()
sys.exit(1)
server = tftpy.TftpServer(options.root)
try:
server.listen(options.ip, options.port)
except tftpy.TftpException as err:
sys.stderr.write("%s\n" % str(err))
sys.exit(1)
except KeyboardInterrupt:
pass
if __name__ == '__main__':
main()
|
Python
| 0
|
@@ -415,16 +415,17 @@
ndler)%0A%0A
+%0A
def main
@@ -437,17 +437,19 @@
usage
-=
+ =
%22%22%0A p
@@ -2167,16 +2167,17 @@
pass%0A%0A
+%0A
if __nam
|
6c8757aa3a98dcd692904ed2e9993044ff22c017
|
Modify formats
|
test/test_subproc_runner.py
|
test/test_subproc_runner.py
|
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
import errno
import os
import platform
import re
import subprocess
import sys
from subprocess import PIPE, CalledProcessError
import pytest
from typepy import is_not_null_string, is_null_string
from subprocrunner import SubprocessRunner
from subprocrunner._logger._null_logger import NullLogger
os_type = platform.system()
if os_type == "Linux":
list_command = "ls"
list_command_errno = [errno.ENOENT]
elif os_type == "Darwin":
list_command = "ls"
list_command_errno = [1, errno.ENOENT]
elif os_type == "Windows":
list_command = "dir"
list_command_errno = [1]
else:
raise NotImplementedError(os_type)
class Test_SubprocessRunner_run:
@pytest.mark.parametrize(
["command", "dry_run", "expected"],
[
[list_command, False, [0]],
[list_command, True, [0]],
[list_command + " __not_exist_dir__", False, list_command_errno],
[list_command + " __not_exist_dir__", True, [0]],
],
)
def test_normal(self, monkeypatch, command, dry_run, expected):
r = SubprocessRunner(command, dry_run=dry_run)
r.run()
if not dry_run:
print(r.stderr, file=sys.stderr)
assert r.returncode in expected
monkeypatch.setattr("subprocrunner._logger._logger.logger", NullLogger())
r.run()
@pytest.mark.skipif(platform.system() == "Windows", reason="platform dependent tests")
@pytest.mark.parametrize(
["command", "expected"], [[list_command + " -l", 0], [[list_command, "-l"], 0]]
)
def test_command(self, command, expected):
assert SubprocessRunner(command).run() == expected
@pytest.mark.parametrize(
["command", "expected"], [["echo test", "test"], [["echo", "test"], "test"]]
)
def test_stdout(self, command, expected):
runner = SubprocessRunner(command)
runner.run()
assert runner.command == command
assert isinstance(runner.command_str, str)
assert runner.returncode == 0
assert runner.stdout.strip() == expected
assert is_null_string(runner.stderr)
@pytest.mark.skip
@pytest.mark.parametrize(
["command", "ignore_stderr_regexp", "out_regexp", "expected"],
[
[list_command + " __not_exist_dir__", None, re.compile("WARNING"), True],
[
list_command + " __not_exist_dir__",
re.compile(re.escape("__not_exist_dir__")),
re.compile("WARNING"),
False,
],
],
)
def test_stderr(self, capsys, command, ignore_stderr_regexp, out_regexp, expected):
from loguru import logger
import subprocrunner
logger.remove()
logger.add(sys.stderr, level="DEBUG")
logger.enable("test")
subprocrunner.set_logger(True)
runner = SubprocessRunner(command, ignore_stderr_regexp=ignore_stderr_regexp)
runner.run()
assert is_null_string(runner.stdout.strip())
assert is_not_null_string(runner.stderr.strip())
assert runner.returncode != 0
out, err = capsys.readouterr()
print("[sys stdout]\n{}\n".format(out))
print("[sys stderr]\n{}\n".format(err))
print("[proc stdout]\n{}\n".format(runner.stdout))
print("[proc stderr]\n{}\n".format(runner.stderr))
actual = out_regexp.search(err) is not None
assert actual == expected
@pytest.mark.skipif(platform.system() == "Windows", reason="platform dependent tests")
@pytest.mark.parametrize(
["command", "ignore_stderr_regexp", "expected"],
[
[[list_command, "__not_exist_dir__"], None, CalledProcessError],
[[list_command, "__not_exist_dir__"], re.compile(re.escape("__not_exist_dir__")), None],
],
)
def test_stderr_check(self, command, ignore_stderr_regexp, expected):
runner = SubprocessRunner(command, ignore_stderr_regexp=ignore_stderr_regexp)
if ignore_stderr_regexp:
runner.run(check=True)
else:
with pytest.raises(expected):
runner.run(check=True)
def test_timeout_kwarg(self, mocker):
mocked_communicate = mocker.patch("subprocess.Popen.communicate")
mocked_communicate.return_value = ("", "")
mocker.patch("subprocrunner.Which.verify")
runner = SubprocessRunner("dummy")
runner.run(timeout=1)
mocked_communicate.assert_called_with(timeout=1)
def test_unicode(self, monkeypatch):
def monkey_communicate(input=None, timeout=None):
return ("", "'dummy' は、内部コマンドまたは外部コマンド、" "操作可能なプログラムまたはバッチ ファイルとして認識されていません")
monkeypatch.setattr(subprocess.Popen, "communicate", monkey_communicate)
runner = SubprocessRunner(list_command)
runner.run()
class Test_SubprocessRunner_popen:
@pytest.mark.parametrize(
["command", "environ", "expected"],
[["hostname", None, 0], ["hostname", dict(os.environ), 0]],
)
def test_normal(self, command, environ, expected):
proc = SubprocessRunner(command).popen(env=environ)
ret_stdout, ret_stderr = proc.communicate()
assert is_not_null_string(ret_stdout)
assert is_null_string(ret_stderr)
assert proc.returncode == expected
@pytest.mark.skipif(platform.system() == "Windows", reason="platform dependent tests")
@pytest.mark.parametrize(["command", "pipe_input", "expected"], [["grep a", b"aaa", 0]])
def test_normal_stdin(self, command, pipe_input, expected):
proc = SubprocessRunner(command).popen(PIPE)
ret_stdout, ret_stderr = proc.communicate(input=pipe_input)
assert is_not_null_string(ret_stdout)
assert is_null_string(ret_stderr)
assert proc.returncode == expected
class Test_SubprocessRunner_command_history:
@pytest.mark.parametrize(
["command", "dry_run", "expected"], [[list_command, False, 0], [list_command, True, 0]]
)
def test_normal(self, command, dry_run, expected):
SubprocessRunner.is_save_history = False
SubprocessRunner.clear_history()
loop_count = 3
for _i in range(loop_count):
SubprocessRunner(command, dry_run=dry_run).run()
assert len(SubprocessRunner.get_history()) == 0
SubprocessRunner.is_save_history = True
for _i in range(loop_count):
SubprocessRunner(command, dry_run=dry_run).run()
assert len(SubprocessRunner.get_history()) == loop_count
|
Python
| 0.000001
|
@@ -1789,18 +1789,39 @@
ected%22%5D,
+%0A %5B%0A
-%5B
%5B%22echo t
@@ -1834,16 +1834,28 @@
%22test%22%5D,
+%0A
%5B%5B%22echo
@@ -1873,17 +1873,28 @@
%22test%22%5D
-%5D
+,%0A %5D,
%0A )%0A
|
dabbf0b5796a4d16bdd588e9d8c541c1f3c8559b
|
Support for building multiple images at once
|
src/ddocker/app/build.py
|
src/ddocker/app/build.py
|
"""
"""
import logging
import pesos.scheduler
import os
import threading
import time
from pesos.vendor.mesos import mesos_pb2
from ddocker.app import subcommand
from ddocker.app.scheduler import Scheduler
from Queue import Queue
logger = logging.getLogger("ddocker.build")
def args(parser):
parser.add_argument("dockerfile")
parser.add_argument("--tag", action="append", default=[], dest="tags",
help="Multiple tags to apply to the image once built")
parser.add_argument("--executor-uri", dest="executor", required=True,
help="URI to the ddocker executor for mesos")
# Isolation
group = parser.add_argument_group("isolation")
group.add_argument("--cpu-limit", default=1.0,
help="CPU allocated to building the image")
group.add_argument("--mem-limit", default=256,
help="Memory allocated to building the image (mb)")
# Arguments for the staging filesystem
group = parser.add_argument_group("fs")
group.add_argument("--staging-uri", default="/tmp/ddocker",
help="The URI to use as a base directory for staging files.")
group.add_argument("--aws-access-key-id", default=os.environ.get("AWS_ACCESS_KEY_ID"),
help="Access key for using the S3 filesystem")
group.add_argument("--aws-secret-access-key", default=os.environ.get("AWS_SECRET_ACCESS_KEY"),
help="Secret key for using the S3 filesystem")
@subcommand("build", callback=args)
def main(args):
logger.info("Building docker image from %s", args.dockerfile)
task_queue = Queue()
# Launch the mesos framework
framework = mesos_pb2.FrameworkInfo()
framework.user = "" # Let mesos fill this in
framework.name = "ddocker"
if args.framework_id:
framework.id.value = args.framework_id
# Kick off the scheduler driver
scheduler = Scheduler(
task_queue,
args.executor,
args.cpu_limit,
args.mem_limit,
args
)
driver = pesos.scheduler.MesosSchedulerDriver(
scheduler, framework, args.mesos_master
)
# Put the task onto the queue
task_queue.put((args.dockerfile, args.tags))
thread = threading.Thread(target=driver.run)
thread.setDaemon(True)
thread.start()
# Wait here until the tasks are done
while thread.isAlive():
time.sleep(0.5)
|
Python
| 0
|
@@ -325,16 +325,27 @@
kerfile%22
+, nargs=%22+%22
)%0A pa
@@ -2208,16 +2208,59 @@
e queue%0A
+ for dockerfile in args.dockerfile:%0A
task
@@ -2271,21 +2271,16 @@
ue.put((
-args.
dockerfi
|
70445bd32ba08b9bd88726a7551345f71ae4e630
|
Improve logging, refactoring
|
executor/opensubmit/executor/execution.py
|
executor/opensubmit/executor/execution.py
|
'''
Functions related to command execution on the local host.
'''
from .submission import Submission
from .result import Result, PassResult, FailResult
import logging
logger = logging.getLogger('opensubmit.executor')
import os, sys, platform, subprocess, signal
from threading import Timer
def kill_longrunning(config):
'''
Terminate everything under the current user account that has run too long.
This is a final safeguard if the subprocess timeout stuff is not working.
You better have no production servers running also under the current user account ...
'''
import psutil
ourpid = os.getpid()
username = psutil.Process(ourpid).username
# check for other processes running under this account
timeout = config.getint("Execution","timeout")
for proc in psutil.process_iter():
if proc.username == username and proc.pid != ourpid:
runtime = time.time() - proc.create_time
logger.debug("This user already runs %u for %u seconds." % (proc.pid, runtime))
if runtime > timeout:
logger.debug("Killing %u due to exceeded runtime." % proc.pid)
try:
proc.kill()
except Exception as e:
logger.error("ERROR killing process %d." % proc.pid)
def shell_execution(cmdline, working_dir, timeout=999999):
'''
Run given shell command in the given working directory with the given timeout.
Return according result object.
'''
got_timeout = False
# Allow code to load its own libraries
os.environ["LD_LIBRARY_PATH"]=working_dir
try:
if platform.system() == "Windows":
proc = subprocess.Popen(cmdline,
cwd=working_dir,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
creationflags=subprocess.CREATE_NEW_PROCESS_GROUP,
universal_newlines=True)
else:
proc = subprocess.Popen(cmdline,
cwd=working_dir,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
preexec_fn=os.setsid,
universal_newlines=True)
output = None
try:
output, stderr = proc.communicate(timeout=timeout)
logger.debug("Process regulary finished.")
except subprocess.TimeoutExpired as e:
got_timeout = True
logger.debug("Process killed by timeout: " + str(e))
if output == None:
output = ""
except Exception:
details = str(sys.exc_info())
logger.info("Exception on process execution: " + details)
return FailResult("Internal error on execution: "+details)
logger.info("Executed with error code {0}.".format(proc.returncode))
if proc.returncode!=0:
logger.debug("Output of the failed execution:\n"+output)
dircontent = os.listdir(working_dir)
logger.debug("Working directory after execution: " + str(dircontent))
if got_timeout:
res=FailResult("Execution was terminated because it took too long (%u seconds). Output so far:\n\n%s"%(timeout,output))
else:
if proc.returncode == 0:
res = PassResult()
else:
res = FailResult()
res.error_code=proc.returncode
res.stdout=output+"\n\nDirectory content as I see it:\n\n" + str(dircontent)
return res
|
Python
| 0.000002
|
@@ -68,43 +68,8 @@
''%0A%0A
-from .submission import Submission%0A
from
@@ -2964,16 +2964,20 @@
Executed
+ %7B0%7D
with er
@@ -2986,17 +2986,17 @@
r code %7B
-0
+1
%7D.%22.form
@@ -2998,16 +2998,25 @@
.format(
+cmdline,
proc.ret
@@ -3383,32 +3383,266 @@
put))%0A else:%0A
+ text = 'Execution of %22%7B0%7D%22 ended with error code %7B1%7D.%5Cn%7B2%7D%5CnDirectory content as I see it:%5Cn%7B3%7D'.format(%0A ' '.join(cmdline),%0A proc.returncode,%0A output,%0A str(dircontent))%0A
if proc.
@@ -3683,24 +3683,28 @@
PassResult(
+text
)%0A el
@@ -3736,18 +3736,26 @@
lResult(
-)%0A
+text)%0A
@@ -3789,94 +3789,8 @@
ode%0A
- res.stdout=output+%22%5Cn%5CnDirectory content as I see it:%5Cn%5Cn%22 + str(dircontent)%0A%0A
|
6a1f4be94fa595af60f483fef56c45f2c770b711
|
Remove unused imports in svm's benchmarks
|
scikits/learn/benchmarks/bench_svm.py
|
scikits/learn/benchmarks/bench_svm.py
|
"""
To run this, you'll need to have installed.
* pymvpa
* libsvm and it's python bindings
* scikit-learn (of course)
Does two benchmarks
First, we fix a training set, increase the number of
samples to classify and plot number of classified samples as a
function of time.
In the second benchmark, we increase the number of dimensions of the
training set, classify a sample and plot the time taken as a function of the number of dimensions.
"""
import numpy as np
import pylab as pl
from datetime import datetime
import gc
from timeit import Timer
# to store the results
scikit_results = []
svm_results = []
mvpa_results = []
mu_second = 0.0 + 10**6 # number of microseconds in a second
def bench_scikit(X, Y, T):
"""
bench with scikit-learn bindings on libsvm
"""
import scikits.learn
from scikits.learn.svm import SVM
gc.collect()
# start time
tstart = datetime.now()
clf = scikits.learn.svm.SVM(kernel='linear', scale=False);
clf.fit(X, Y);
Z = clf.predict(T)
delta = (datetime.now() - tstart)
# stop time
scikit_results.append(delta.microseconds/mu_second)
def bench_svm(X, Y, T):
"""
bench with swig-generated wrappers that come with libsvm
"""
import svm
X1 = X.tolist()
Y1 = Y.tolist()
T1 = T.tolist()
gc.collect()
# start time
tstart = datetime.now()
problem = svm.svm_problem(Y1, X1)
param = svm.svm_parameter(svm_type=0, kernel_type=0)
model = svm.svm_model(problem, param)
for i in T.tolist():
model.predict(i)
delta = (datetime.now() - tstart)
# stop time
svm_results.append(delta.microseconds/mu_second)
def bench_pymvpa(X, Y, T):
"""
bench with pymvpa (by default uses a custom swig-generated wrapper
around libsvm)
"""
from mvpa.datasets import Dataset
from mvpa.clfs import svm
data = Dataset.from_wizard(samples=X, targets=Y)
gc.collect()
# start time
tstart = datetime.now()
clf = svm.SVM(kernel=svm.LinearSVMKernel())
clf.train(data)
Z = clf.predict(T)
delta = (datetime.now() - tstart)
# stop time
mvpa_results.append(delta.microseconds/mu_second)
if __name__ == '__main__':
from scikits.learn.datasets.iris import load
SP, SW, PL, PW, LABELS = load()
X = np.c_[SP, SW, PL, PW]
Y = LABELS
n = 100
step = 100
for i in range(n):
T = np.random.randn(step*i, 4)
bench_scikit(X, Y, T)
bench_pymvpa(X, Y, T)
bench_svm(X, Y, T)
import pylab as pl
xx = range(0, n*step, step)
pl.title('Classification in the Iris dataset (5-d space)')
pl.plot(xx, scikit_results, 'b-', label='scikit-learn')
pl.plot(xx, svm_results,'r-', label='libsvm-swig')
pl.plot(xx, mvpa_results, 'g-', label='pymvpa')
pl.legend()
pl.xlabel('number of samples to classify')
pl.ylabel('time (in microseconds)')
pl.show()
# now do a bench where the number of points is fixed
# and the variable is the number of dimensions
from scikits.learn.datasets.samples_generator.nonlinear import friedman
from scikits.learn.datasets.samples_generator.linear import sparse_uncorrelated
scikit_results = []
svm_results = []
mvpa_results = []
n = 100
step = 20
start_dim = 400
print '============================================'
print 'Warning: this is going to take a looong time'
print '============================================'
dimension = start_dim
for i in range(0, n):
print '============================================'
print 'Entering iteration %s' % i
print '============================================'
dimension += step
X, Y = sparse_uncorrelated(nb_features=dimension, nb_samples=100)
Y = Y.astype(np.int)
T, _ = friedman(nb_features=dimension, nb_samples=100)
bench_scikit(X, Y, T)
bench_svm(X, Y, T)
bench_pymvpa(X, Y, T)
xx = np.arange(start_dim, start_dim+n*step, step)
pl.title('Classification in high dimensional spaces')
pl.plot(xx, scikit_results, 'b-', label='scikit-learn')
pl.plot(xx, svm_results,'r-', label='libsvm-swig')
pl.plot(xx, mvpa_results, 'g-', label='mvpa')
pl.legend()
pl.xlabel('number of dimensions')
pl.ylabel('time (in seconds)')
pl.axis('tight')
pl.show()
|
Python
| 0.00006
|
@@ -528,33 +528,8 @@
t gc
-%0Afrom timeit import Timer
%0A%0A#
|
f2eb527e7602472856f981726b91bb23bbf22a9e
|
Add URL decorator to dashboard app
|
stores/dashboard/app.py
|
stores/dashboard/app.py
|
from django.conf.urls.defaults import patterns, url
from django.utils.translation import ugettext_lazy as _
from oscar.core.application import Application
from oscar.apps.dashboard.nav import register, Node
from stores.dashboard import views
node = Node(_('Store Manager'))
node.add_child(Node(_('Stores'), 'stores-dashboard:store-list'))
node.add_child(Node(_('Store Groups'), 'stores-dashboard:store-group-list'))
register(node, 100)
class StoresDashboardApplication(Application):
name = 'stores-dashboard'
store_list_view = views.StoreListView
store_create_view = views.StoreCreateView
store_update_view = views.StoreUpdateView
store_delete_view = views.StoreDeleteView
store_group_list_view = views.StoreGroupListView
store_group_create_view = views.StoreGroupCreateView
store_group_update_view = views.StoreGroupUpdateView
store_group_delete_view = views.StoreGroupDeleteView
def get_urls(self):
urlpatterns = patterns('',
url(r'^$', self.store_list_view.as_view(), name='store-list'),
url(
r'^create/$',
self.store_create_view.as_view(),
name='store-create'
),
url(
r'^update/(?P<pk>[\d]+)/$',
self.store_update_view.as_view(),
name='store-update'
),
url(
r'^delete/(?P<pk>[\d]+)/$',
self.store_delete_view.as_view(),
name='store-delete'
),
url(
r'^groups/$',
self.store_group_list_view.as_view(),
name='store-group-list'
),
url(
r'^groups/create/$',
self.store_group_create_view.as_view(),
name='store-group-create'
),
url(
r'^groups/update/(?P<pk>[\d]+)/$',
self.store_group_update_view.as_view(),
name='store-group-update'
),
url(
r'^groups/delete/(?P<pk>[\d]+)/$',
self.store_group_delete_view.as_view(),
name='store-group-delete'
),
)
return self.post_process_urls(urlpatterns)
application = StoresDashboardApplication()
|
Python
| 0
|
@@ -200,16 +200,73 @@
er, Node
+%0Afrom oscar.views.decorators import staff_member_required
%0A%0Afrom s
@@ -2327,16 +2327,96 @@
terns)%0A%0A
+ def get_url_decorator(self, url_name):%0A return staff_member_required%0A
%0Aapplica
|
095fd27319ab39809c2a312790f920bf3d2b041e
|
Add a --player option to the play command
|
brainfm/main/cli.py
|
brainfm/main/cli.py
|
#!/usr/bin/env python
import brainfm
import click
import jmespath
import json
import pathlib
import requests
import sys
import terminaltables
import webbrowser
CONFIG_PATH = pathlib.Path("~/.brainfm/config").expanduser()
CACHE_PATH = pathlib.Path("~/.brainfm/cache").expanduser()
CACHE_PATH.mkdir(parents=True, exist_ok=True)
STATIONS_PATTERN = jmespath.compile("[*].[station_id, name, canonical_name]")
# TODO graceful failure when config is missing/invalid
with CONFIG_PATH.open() as fp:
config = json.load(fp)
client = brainfm.Connection(config["email"], config["password"])
# TODO expire cached values
if (CACHE_PATH / "svu").exists():
with (CACHE_PATH / "svu").open() as fp:
client._svu = fp.read()
else:
with (CACHE_PATH / "svu").open(mode="w") as fp:
fp.write(client.svu)
cached = {
"stations": None
}
if (CACHE_PATH / "stations").exists():
with (CACHE_PATH / "stations").open() as fp:
cached["stations"] = json.load(fp)
@click.group()
def cli():
pass
@cli.command()
def svu():
"""Display the current siteVisitorUUID"""
print(client.svu)
@cli.command()
def ls():
"""List available stations"""
if not cached["stations"]:
cached["stations"] = client.get_stations()
with (CACHE_PATH / "stations").open(mode="w") as fp:
json.dump(cached["stations"], fp, indent=4, sort_keys=True)
headers = ["id", "name", "canonical"]
data = sorted(STATIONS_PATTERN.search(cached["stations"]))
table = terminaltables.AsciiTable(
table_data=[headers] + data,
title="Available Stations")
print(table.table)
@cli.command()
@click.argument("station_id")
def gs(station_id):
"""Get a single station"""
try:
output = client.get_station(station_id=station_id)
except requests.exceptions.HTTPError as e:
if e.response.status_code == 404:
output = {
"code": "UnknownStationID",
"error": "Unknown station {!r}".format(station_id)}
else:
raise e
print(json.dumps(output, indent=4, sort_keys=True))
@cli.command()
@click.argument("station_id")
def gt(station_id):
"""Get a station token"""
try:
output = client.get_token(station_id=station_id)
except requests.exceptions.HTTPError as e:
if e.response.status_code == 404:
output = {
"code": "UnknownStationID",
"error": "Unknown station {!r}".format(station_id)}
else:
raise e
print(json.dumps(output, indent=4, sort_keys=True))
@cli.command()
@click.argument("station_id")
def url(station_id):
"""Get a station URL"""
try:
token = client.get_token(station_id=station_id)
except requests.exceptions.HTTPError as e:
if e.response.status_code == 404:
print(json.dumps(
{
"code": "UnknownStationID",
"error": "Unknown station {!r}".format(station_id)},
indent=4, sort_keys=True))
sys.exit(1)
else:
raise e
print("https://stream.brain.fm/?tkn=" + token["session_token"])
@cli.command()
@click.argument("station_id")
def play(station_id):
"""Play a station stream"""
try:
token = client.get_token(station_id=station_id)
except requests.exceptions.HTTPError as e:
if e.response.status_code == 404:
print(json.dumps(
{
"code": "UnknownStationID",
"error": "Unknown station {!r}".format(station_id)},
indent=4, sort_keys=True))
sys.exit(1)
else:
raise e
webbrowser.open_new_tab(
"https://stream.brain.fm/?tkn=" + token["session_token"])
main = cli
|
Python
| 0
|
@@ -103,16 +103,47 @@
equests%0A
+import shlex%0Aimport subprocess%0A
import s
@@ -3239,24 +3239,90 @@
tation_id%22)%0A
+@click.option(%22--player%22, help=%22Command used to play the stream%22)%0A
def play(sta
@@ -3320,32 +3320,45 @@
play(station_id
+, player=None
):%0A %22%22%22Play a
@@ -3808,40 +3808,13 @@
-webbrowser.open_new_tab(%0A
+url =
%22ht
@@ -3866,16 +3866,157 @@
_token%22%5D
+%0A if player is None:%0A webbrowser.open_new_tab(url)%0A else:%0A cmd = shlex.split(player) + %5Burl%5D%0A subprocess.Popen(cmd
)%0A%0Amain
|
92c5012164e2a09240328f882732fc4e86d8410d
|
Update tests/chainerx_tests/op_utils.py
|
tests/chainerx_tests/op_utils.py
|
tests/chainerx_tests/op_utils.py
|
import inspect
import sys
import numpy
import pytest
import chainer
import chainer.testing
import chainerx
class _OpTest(chainer.testing.function.FunctionTestBase):
def setup(self):
# This method can be overridden by a concrete class with arbitrary
# arguments.
pass
def teardown(self):
pass
def forward(self, inputs, device):
# device is chainer.Device and it's ignored.
# chainerx's default device is used instead.
test_self = self
class MyFunc(chainer.FunctionNode):
def forward_chainerx(self, inputs):
return test_self.forward_chainerx(inputs)
return MyFunc().apply(inputs)
def forward_chainerx(self, inputs):
raise NotImplementedError(
'Op test implementation must override `forward_chainerx`.')
class ChainerOpTest(_OpTest):
# Base class for op test that compares the output with Chainer
# implementation.
#
# It must be used in conjunction with `op_test` decorator.
#
# Examples:
#
# @op_utils.op_test(['native:0', 'cuda:0'])
# class test_conv(op_utils.ChainerOpTest):
#
# def setup(self, float_dtype):
# self.dtype = float_dtype
#
# def generate_inputs(self):
# dtype = self.dtype
# x = numpy.random.uniform(-1, 1, (1, 3)).astype(dtype)
# w = numpy.random.uniform(-1, 1, (5, 3)).astype(dtype)
# b = numpy.random.uniform(-1, 1, (5,)).astype(dtype)
# return x, w, b
#
# def forward_chainerx(self, inputs):
# x, w, b = inputs
# y = chainerx.conv(x, w, b, self.stride, self.pad, self.cover_all)
# return y,
#
# def forward_chainer(self, inputs):
# x, w, b = inputs
# y = chainer.functions.convolution_nd(
# x, w, b, self.stride, self.pad, self.cover_all)
# return y,
#
# In this example, `float_dtype` is a Pytest fixture for parameterizing
# floating-point dtypes (i.e. float16, float32, float64). As seen from
# this, arguments in the `setup` method are treated as Pytest fixtures.
#
# Test implementations must at least override the following methods:
# * `generate_inputs`: Generates inputs to the test target.
# * `forward_chainerx`: Forward implementation using ChainerX.
# * `forward_chainer`: Forward reference implementation using Chainer.
#
# It can have similar attributes as `chainer.testing.FunctionTestCase`.
def forward_expected(self, inputs):
output_vars = self.forward_chainer(inputs)
return tuple([y.array for y in output_vars])
def forward_chainerx(self, inputs):
raise NotImplementedError(
'Op test implementation must override `forward_chainerx`.')
def forward_chainer(self, inputs):
raise NotImplementedError(
'Op test implementation must override `forward_chainer`.')
class NumpyOpTest(_OpTest):
# Base class for op test that compares the output with NumPy
# implementation.
#
# It must be used in conjunction with `op_test` decorator.
#
# Examples:
#
# @op_utils.op_test(['native:0', 'cuda:0'])
# class test_tanh(op_utils.NumpyOpTest):
#
# def setup(self, float_dtype):
# self.dtype = dtype
#
# def generate_inputs(self):
# x = numpy.random.uniform(-1, 1, (2, 3)).astype(self.dtype)
# return x,
#
# def forward_xp(self, inputs, xp):
# x, = inputs
# return xp.tanh(x),
#
# In this example, `float_dtype` is a Pytest fixture for parameterizing
# floating-point dtypes (i.e. float16, float32, float64). As seen from
# this, arguments in the `setup` method are treated as Pytest fixture.
#
# Test implementations must at least override the following methods:
# * `generate_inputs`: Generates inputs to the test target.
# * `forward_xp`: Forward implementation using both ChainerX and NumPy.
#
# It can have similar attributes as `chainer.testing.FunctionTestCase`.
def forward_chainerx(self, inputs):
return self.forward_xp(inputs, chainerx)
def forward_expected(self, inputs):
outputs = self.forward_xp(inputs, numpy)
return tuple([numpy.asarray(y) for y in outputs])
def forward_xp(self, inputs, xp):
raise NotImplementedError(
'Op test implementation must override `forward_xp`.')
def _make_backend_config(device_name):
backend_config = chainer.testing.BackendConfig({
'use_chainerx': True,
'chainerx_device': device_name,
})
return backend_config
def _create_test_entry_function(
cls, module, devices, func_suffix, method_name):
# Creates a test entry function from the template class, and places it in
# the same module as the class.
#
# func_suffix:
# The suffix of the test entry function to create.
# method_name:
# The name of the test method name defined in `FunctionTestBase` class.
@pytest.mark.parametrize_device(devices)
def entry_func(device, *args, **kwargs):
obj = cls()
run_test_method = getattr(obj, method_name)
try:
obj.setup(*args, **kwargs)
run_test_method(_make_backend_config(device.name))
finally:
obj.teardown()
func_name = '{}_{}'.format(cls.__name__, func_suffix)
entry_func.__name__ = func_name
# Set the signature of the entry function
sig = inspect.signature(cls.setup)
params = list(sig.parameters.values())
params = params[1:] # Remove `self` argument
device_param = inspect.Parameter(
'device', inspect.Parameter.POSITIONAL_OR_KEYWORD)
params = [device_param] + params # Prepend `device` argument
entry_func.__signature__ = inspect.Signature(params)
# Set the pytest mark
try:
pytestmark = cls.pytestmark
entry_func.pytestmark += pytestmark
except AttributeError:
pass
# Place the entry function in the module of the class
setattr(module, func_name, entry_func)
def op_test(devices):
# Decorator to set up an op test.
#
# This decorator can be used in conjunction with either ``NumpyOpTest`` or
# ``ChainerOpTest`` to define an op test.
#
# See the documentation of the respective classes for detailed explanation
# and examples.
#
# Args:
# devices: List of devices to test.
def wrap(cls):
# TODO(niboshi): Avoid using private entries in chainer.testing.
if isinstance(
cls, chainer.testing._bundle._ParameterizedTestCaseBundle):
classes = [(c, m) for c, m, name in cls.cases]
else:
classes = [(cls, cls.__module__)]
tests = [
('forward', 'run_test_forward'),
('backward', 'run_test_backward'),
('double_backward', 'run_test_double_backward'),
]
for cls, mod in classes:
for func_suffix, method_name in tests:
_create_test_entry_function(
cls, sys.modules[mod], devices, func_suffix, method_name)
# return None: no other decorator can be applied after this decorator.
return None
return wrap
|
Python
| 0
|
@@ -3867,16 +3867,17 @@
fixture
+s
.%0A #%0A
|
fd2b4f1d536aec9e92a8b793eb2294c0a935bc35
|
add cb and co for pusher
|
btspusher/pusher.py
|
btspusher/pusher.py
|
# -*- coding: utf-8 -*-
import asyncio
from autobahn.asyncio.wamp import ApplicationSession
from autobahn.wamp import auth
from btspusher.wamp import ApplicationRunner
class PusherComponent(ApplicationSession):
future = None # a future from asyncio
instance = None
login_info = None
@staticmethod
def login(login_info):
PusherComponent.login_info = login_info
@asyncio.coroutine
def onJoin(self, details):
print("join")
if self.future:
self.future.set_result(1)
self.future = None
PusherComponent.instance = self
def onConnect(self):
print("connected")
if self.login_info:
self.join(self.config.realm, [u"wampcra"], self.login_info["user"])
else:
self.join(self.config.realm)
def onChallenge(self, challenge):
key = self.login_info["password"].encode('utf8')
signature = auth.compute_wcs(
key, challenge.extra['challenge'].encode('utf8'))
return signature.decode('ascii')
def onLeave(self, details):
print("session left")
def onDisconnect(self):
PusherComponent.instance = None
print("lost connect")
class Pusher(object):
def __init__(self, loop, login_info=None):
url = u"wss://pusher.btsbots.com/ws"
realm = u"realm1"
try:
if login_info:
PusherComponent.login(login_info)
PusherComponent.future = asyncio.Future()
runner = ApplicationRunner(url, realm)
runner.run(PusherComponent)
loop.run_until_complete(
asyncio.wait_for(PusherComponent.future, 10))
except Exception:
print("can't connect to pusher.btsbots.com")
def publish(self, *args, **kwargs):
kwargs["__t"] = args[0]
if PusherComponent.instance:
PusherComponent.instance.publish(*args, **kwargs)
def sync_subscribe(self, *args, **kwargs):
if PusherComponent.instance:
asyncio.wait(PusherComponent.instance.subscribe(*args, **kwargs))
@asyncio.coroutine
def subscribe(self, *args, **kwargs):
if PusherComponent.instance:
yield from PusherComponent.instance.subscribe(*args, **kwargs)
def sync_call(self, *args, **kwargs):
if PusherComponent.instance:
asyncio.wait(PusherComponent.instance.call(*args, **kwargs))
@asyncio.coroutine
def call(self, *args, **kwargs):
if PusherComponent.instance:
yield from PusherComponent.instance.call(*args, **kwargs)
if __name__ == '__main__':
loop = asyncio.get_event_loop()
bts_pusher = Pusher(loop)
def on_event(i):
print("Got event: {}".format(i))
# bts_pusher.sync_subscribe(on_event, "public.test")
bts_pusher.publish("public.test", "hello", a="bb")
loop.run_forever()
loop.close()
|
Python
| 0
|
@@ -290,16 +290,44 @@
o = None
+%0A cb = None%0A co = None
%0A%0A @s
@@ -490,16 +490,56 @@
%22join%22)%0A
+ PusherComponent.instance = self%0A
@@ -635,43 +635,102 @@
- PusherComponent.instance = self
+if self.cb:%0A self.cb(self)%0A if self.co:%0A yield from self.co(self)
%0A%0A
@@ -1382,16 +1382,29 @@
_init__(
+%0A
self, lo
@@ -1422,16 +1422,34 @@
nfo=None
+, co=None, cb=None
):%0A
@@ -1604,16 +1604,16 @@
n_info)%0A
-
@@ -1658,16 +1658,88 @@
uture()%0A
+ PusherComponent.co = co%0A PusherComponent.cb = cb%0A
|
5c9b501c07f28d36b3cb6d35dbd2dde8074e16b7
|
Fix tests
|
src/tests/rotation_cipher_plm_test.py
|
src/tests/rotation_cipher_plm_test.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = "Eduardo Lopez Biagi"
__license__ = "BSD-new"
from .. import rotation_cipher_plm as rcplm
import unittest
import functools
class TestRotationCipher(unittest.TestCase):
def setUp(self):
self.rc = rcplm.RotationCipher()
def test_alphabet(self):
self.assertEqual(len(self.rc.alphabet), 26)
def test_rotate_char(self):
self.assertEqual(self.rc.rotate_char("a"), "a")
self.assertEqual(self.rc.rotate_char("a", 1), "b")
self.assertEqual(self.rc.rotate_char("a", 5), "f")
self.assertEqual(self.rc.rotate_char("a", 25), "z")
self.assertEqual(self.rc.rotate_char("a", 26), "a")
def test_encode(self):
self.assertEqual(self.rc.encode("abcd"), "abcd")
self.assertEqual(self.rc.encode("abcd", 1), "bcde")
self.assertEqual(self.rc.encode("abcd", 5), "fghi")
self.assertEqual(self.rc.encode("abcd", 25), "zabc")
self.assertEqual(self.rc.encode("abcd", 26), "abcd")
def test_encode_special_chars(self):
self.assertEqual(self.rc.encode("ab cd", 1), "bc de")
self.assertEqual(self.rc.encode("ab cd!", 1), "bc de!")
class TestLetterBigrams(unittest.TestCase):
def setUp(self):
self.lbg = rcplm.LetterBigrams()
def test_init(self):
self.assertEqual(len(self.lbg.words), 267751)
def test_init_lowercase(self):
self.assertEqual(self.lbg.words[0], "aa")
def test_build_probabilistic_model(self):
self.assertEqual(self.lbg.bigrams['aa'], {"count": 194, "p": 9.025973158782064e-05})
self.assertEqual(self.lbg.bigrams['za'], {"count": 1729, "p": 0.0007971407927475385})
bigrams_count = functools.reduce(lambda v,e: v + e['count'], self.lbg.bigrams.values(), 0)
self.assertEqual(bigrams_count, 2171509)
def test_calculate_probabilities(self):
self.lbg.bigrams = {
"aa": {"count": 10, "p": 0},
"ab": {"count": 5, "p": 0},
"ac": {"count": 0, "p": 0}
}
k = 2
self.lbg.calculate_probabilities(k)
self.assertEqual(self.lbg.probability("aa"), (10 + k) / (15 + k))
self.assertEqual(self.lbg.probability("ab"), (5 + k) / (15 + k))
self.assertEqual(self.lbg.probability("ac"), (0 + k) / (15 + k))
def test_calculate_probabilities(self):
self.lbg.bigrams = {
"aa": {"count": 10, "p": 0},
"ab": {"count": 5, "p": 0},
"ac": {"count": 0, "p": 0}
}
# Maximum likelihood
self.lbg.calculate_probabilities(0)
self.assertEqual(self.lbg.probability("aa"), 10/15)
self.assertEqual(self.lbg.probability("ab"), 5/15)
self.assertEqual(self.lbg.probability("ac"), 0)
def test_probability(self):
self.assertEqual(self.lbg.probability("za"), 0.0007971407927475385)
class TestDecoder(unittest.TestCase):
def setUp(self):
rotation_cipher = rcplm.RotationCipher()
self.phrase = "Tonight instead of discussing the existence or non existence \
of God they have decided to fight for it"
encoded_phrase = rotation_cipher.encode(self.phrase, 5)
self.phrases = [rotation_cipher.encode(encoded_phrase, x) for x in range(0, 26)]
def test_most_probable(self):
phrase, best_p, second_best_p = rcplm.most_probable(self.phrases)
self.assertEqual(phrase, self.phrase.lower())
self.assertEqual(best_p, 2.3852979238833054e-156)
self.assertEqual(second_best_p, 2.3543957084256302e-213)
if __name__ == '__main__':
unittest.main()
|
Python
| 0.000003
|
@@ -1584,25 +1584,25 @@
p%22:
-9.025973158782064
+8.977135925347058
e-05
@@ -1681,29 +1681,29 @@
0.00079
-7140792747538
+6433084658995
5%7D)%0A
@@ -2172,32 +2172,36 @@
0 + k) / (15 + k
+ * 3
))%0A self.
@@ -2257,16 +2257,20 @@
(15 + k
+ * 3
))%0A
@@ -2334,16 +2334,20 @@
(15 + k
+ * 3
))%0A%0A
@@ -2370,32 +2370,35 @@
te_probabilities
+_ml
(self):%0A
@@ -2894,21 +2894,21 @@
0079
-7140792747538
+6433084658995
5)%0A%0A
@@ -3501,23 +3501,23 @@
2.3
-852979238833054
+102527364450072
e-15
@@ -3565,29 +3565,29 @@
, 2.
-3543957084256302
+7518911947067603
e-21
-3
+4
)%0A%0Ai
|
62617dcc596a79f577de187722477a1e34a68a4e
|
version 0.7b10
|
databench/__init__.py
|
databench/__init__.py
|
"""Databench module."""
# flake8: noqa
from __future__ import absolute_import
__version__ = '0.7b9'
__all__ = ['Analysis', 'AnalysisZMQ', 'App', 'Datastore', 'Meta', 'MetaZMQ',
'on', 'Readme', 'testing', 'utils']
from .analysis import Analysis, on
from .analysis_zmq import AnalysisZMQ
from .app import App
from .datastore import Datastore
from .datastore_legacy import DatastoreLegacy
from .meta import Meta
from .meta_zmq import MetaZMQ
from .readme import Readme
from . import testing
from . import utils
|
Python
| 0.000001
|
@@ -96,9 +96,10 @@
0.7b
-9
+10
'%0A__
|
4fe36d96d3810b39fcd15dee87318763d0d277a9
|
remove time
|
streamteam/io/nbody6.py
|
streamteam/io/nbody6.py
|
# coding: utf-8
""" Class for reading data from NBODY6 simulations """
from __future__ import division, print_function
__author__ = "adrn <adrn@astro.columbia.edu>"
# Standard library
import os, sys
import logging
import re
# Third-party
import numpy as np
import astropy.units as u
from astropy.constants import G
from astropy.table import Table
# Project
from .core import NBodyReader
# Create logger
logger = logging.getLogger(__name__)
__all__ = ["NBODY6Reader"]
class NBODY6Reader(NBodyReader):
def _read_units(self):
""" """
units = dict(length=u.pc,
speed=u.km/u.s,
dimensionless=u.dimensionless_unscaled)
return units
def read_snapshot(self, filename, units=None):
""" Given a filename, read and return the data. By default,
returns data in simulation units, but this can be changed with
the `units` kwarg.
Parameters
----------
filename : str
The name of the shapshot file to read.
units : dict (optional)
A unit system to transform the data to. If None, will return
the data in simulation units.
"""
# read the first line to get the numer of particles and timestep
fullpath = os.path.join(self.path, filename)
# column names for SNAP file, in simulation units
colnames = "id x y z vx vy vz".split()
coltypes = "dimensionless length length length speed speed speed".split()
colunits = [self.sim_units[x] for x in coltypes]
data = np.genfromtxt(fullpath, skiprows=1, names=colnames)
if units is not None:
new_colunits = []
for colname,colunit in zip(colnames,colunits):
newdata = (data[colname]*colunit).decompose(units)
data[colname] = newdata.value
new_colunits.append(newdata.unit)
time = time.decompose(units)
colunits = new_colunits
tbl = Table(data, meta=dict(time=time.value))
for colname,colunit in zip(colnames,colunits):
tbl[colname].unit = colunit
return tbl
|
Python
| 0.99805
|
@@ -1954,49 +1954,8 @@
t)%0A%0A
- time = time.decompose(units)%0A
@@ -2015,36 +2015,8 @@
data
-, meta=dict(time=time.value)
)%0A
|
1601ac43644a5e4ee84a2c2257d5a09fc7971849
|
rename measurement string for q-value
|
consensx/views.py
|
consensx/views.py
|
from django.shortcuts import render
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_exempt
import random # ID generation
import string # ID generation
import os # mkdir
import pickle
import json
from .models import CSX_upload, CSX_calculation
from .consensx import run_calculation
from .selection import run_selection
chars = string.ascii_uppercase + string.digits
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
def handle_uploaded_file(f, path, filename):
file_full_path = path + '/' + filename
with open(file_full_path, 'wb+') as destination:
for chunk in f.chunks():
destination.write(chunk)
def db(request, my_id):
DB_entry = CSX_calculation.objects.get(id_code=my_id)
print("PATH", request.path)
return HttpResponse(DB_entry.returnHTML())
def home(request):
if request.method == 'POST': # if the form has been submitted...
# generate ID for calcilation
my_id = ''.join(random.choice(chars) for _ in range(6))
my_path = os.path.join(BASE_DIR, 'media', my_id)
os.mkdir(my_path)
# check if POST is a test submit
if request.POST.get('submit_test', False):
# IMPLEMENT TEST CALC HERE!
return run_calculation(request, my_id)
PDB_file = request.FILES['pdb_upload'] # get PDB file
handle_uploaded_file(PDB_file, my_path, PDB_file.name)
try: # get restraint file if any
restraint_file = request.FILES['bmrb_upload']
restraint_file_name = restraint_file.name
handle_uploaded_file(restraint_file, my_path, restraint_file_name)
except KeyError:
restraint_file = None
restraint_file_name = None
try: # get NOE file if any
NOE_file = request.FILES['xplor_upload']
NOE_file_name = NOE_file.name
handle_uploaded_file(NOE_file, my_path, NOE_file_name)
except KeyError:
NOE_file = None
NOE_file_name = None
try: # check if fitting is enabled
fit_enable = bool(request.POST['superimpose'])
except KeyError:
fit_enable = False
try: # get fit range if any
fit_range = request.POST['fit_range']
except KeyError:
fit_range = None
try: # check if fitting is enabled
r3average = bool(request.POST['r3average'])
except KeyError:
r3average = False
try: # check if fitting is enabled
svd_enable = bool(request.POST['RDCSVD'])
except KeyError:
svd_enable = False
post_data = CSX_upload(
id_code=my_id,
PDB_file=PDB_file.name,
NOE_file=NOE_file_name,
STR_file=restraint_file_name,
karplus=request.POST['KARPLUS'],
superimpose=fit_enable,
fit_range=fit_range,
r3average=r3average,
svd_enable=svd_enable,
rdc_lc=request.POST['RDCLC']
)
post_data.save()
return run_calculation(request, my_id)
else:
return render(request, "consensx/home.html")
@csrf_exempt
def selection(request, my_id):
print("SELECTION ID IS: " + my_id)
my_path = os.path.join(BASE_DIR, 'media', my_id)
user_selection = json.loads(request.body.decode("utf-8"))
original_values = pickle.load( open( my_path + "/calced_values.p", "rb" ) )
# GYULA!
if request.method == 'POST': # if the AJAX request has been received...
num_coordsets, sel_values = run_selection(my_path,
original_values,
user_selection)
measure = user_selection["MEASURE"]
return_dict = {}
return_dict["measure"] = measure
return_dict["num_coordsets"] = num_coordsets
if measure == "correlation":
measure = "corr"
values_dict = {}
for key, value in sel_values.items():
values_dict[key] = {
"original": original_values[key + "_" + measure],
"selection": "{0:.3g}".format(value)
}
print("values_dict")
print(values_dict)
return_dict["values"] = values_dict
return HttpResponse(json.dumps(return_dict), content_type='application/json')
|
Python
| 0.000018
|
@@ -4186,16 +4186,71 @@
%22corr%22%0A%0A
+ if measure == %22q-value%22:%0A measure = %22qval%22%0A%0A
valu
|
bcd49d8688a1f40e47b4490105b58d3046bb45e3
|
Fix Issue #93 - Fix Curl connect will raise auth not supported error.
|
microproxy/layer/proxy/socks.py
|
microproxy/layer/proxy/socks.py
|
import struct
import ipaddress
from tornado import gen
from tornado import iostream
from base import ProxyLayer
from microproxy.utils import get_logger
from microproxy.exception import ProtocolError, SrcStreamClosedError
logger = get_logger(__name__)
class SocksLayer(ProxyLayer):
SOCKS_VERSION = 0x05
SOCKS_REQ_COMMAND = {
"CONNECT": 0x1,
"BIND": 0x02,
"UDP_ASSOCIATE": 0x03
}
SOCKS_RESP_STATUS = {
"SUCCESS": 0x0,
"GENRAL_FAILURE": 0x01,
"CONNECTION_NOT_ALLOWED": 0x02,
"NETWORK_UNREACHABLE": 0x03,
"HOST_UNREACHABLE": 0x04,
"CONNECTION_REFUSED": 0x05,
"TTL_EXPIRED": 0x06,
"COMMAND_NOT_SUPPORTED": 0x07,
"ADDRESS_TYPE_NOT_SUPPORTED": 0x08,
}
SOCKS_ADDR_TYPE = {
"IPV4": 0x01,
"DOMAINNAME": 0x03,
"IPV6": 0x04
}
def __init__(self, context):
super(SocksLayer, self).__init__(context)
@gen.coroutine
def process_and_return_context(self):
yield self.socks_greeting()
host, port, addr_type = yield self.socks_request()
dest_stream = yield self.socks_response_with_dest_stream_creation(host, port, addr_type)
self.context.src_stream.pause()
self.context.dest_stream = dest_stream
self.context.host = host
self.context.port = port
raise gen.Return(self.context)
@gen.coroutine
def socks_greeting(self):
src_stream = self.context.src_stream
data = yield src_stream.read_bytes(3)
logger.debug("socks greeting to {0}".format(src_stream.socket.getpeername()[0]))
socks_version, socks_nmethod, _ = struct.unpack('BBB', data)
if socks_version != self.SOCKS_VERSION:
raise ProtocolError("not support socks version {0}".format(socks_version))
if socks_nmethod == 1:
response = struct.pack('BB', self.SOCKS_VERSION, 0)
yield src_stream.write(response)
else:
raise ProtocolError("socks5 auth not supported")
@gen.coroutine
def socks_request(self):
src_stream = self.context.src_stream
data = yield src_stream.read_bytes(4)
request_header_data = struct.unpack('!BBxB', data)
socks_version = request_header_data[0]
socks_cmd = request_header_data[1]
socks_atyp = request_header_data[2]
if socks_version != self.SOCKS_VERSION:
raise ProtocolError("not support socks version {0}".format(socks_version))
if socks_cmd != self.SOCKS_REQ_COMMAND["CONNECT"]:
raise ProtocolError("not support socks command {0}".format(socks_cmd))
if socks_atyp == self.SOCKS_ADDR_TYPE["IPV4"]:
host_data = yield src_stream.read_bytes(4)
host = ipaddress.IPv4Address(host_data).compressed
elif socks_atyp == self.SOCKS_ADDR_TYPE["DOMAINNAME"]:
host_length_data = yield src_stream.read_bytes(1)
host_length = struct.unpack("!B", host_length_data)[0]
host_data = yield src_stream.read_bytes(host_length)
host = host_data.decode("idna")
elif socks_atyp == self.SOCKS_ADDR_TYPE["IPV6"]:
host_data = yield src_stream.read_bytes(16)
host = ipaddress.IPv6Address(host_data).compressed
else:
raise ProtocolError("not support socks address type")
port_data = yield src_stream.read_bytes(2)
port, = struct.unpack("!H", port_data)
logger.debug("socks request to {0}:{1}".format(host, port))
raise gen.Return((host,
port,
socks_atyp))
@gen.coroutine
def socks_response_with_dest_stream_creation(self, host, port, addr_type):
src_stream = self.context.src_stream
dest_stream = self.create_dest_stream((host, port))
try:
yield src_stream.write(struct.pack("!BBx",
self.SOCKS_VERSION,
self.SOCKS_RESP_STATUS["SUCCESS"]))
if addr_type == self.SOCKS_ADDR_TYPE["IPV4"]:
yield src_stream.write(struct.pack('!B', self.SOCKS_ADDR_TYPE["IPV4"]))
yield src_stream.write(ipaddress.IPv4Address(host).packed)
elif addr_type == self.SOCKS_ADDR_TYPE["IPV6"]:
yield src_stream.write(struct.pack('!B', self.SOCKS_ADDR_TYPE["IPV6"]))
yield src_stream.write(ipaddress.IPv6Address(host).packed)
elif addr_type == self.SOCKS_ADDR_TYPE["DOMAINNAME"]:
yield src_stream.write(struct.pack("!BB",
self.SOCKS_ADDR_TYPE["DOMAINNAME"],
len(host)))
yield src_stream.write(host.encode("idna"))
yield src_stream.write(struct.pack("!H", port))
raise gen.Return(dest_stream)
except iostream.StreamClosedError as e:
dest_stream.close()
raise SrcStreamClosedError(e)
|
Python
| 0
|
@@ -1533,17 +1533,17 @@
d_bytes(
-3
+2
)%0A%0A
@@ -1666,11 +1666,8 @@
thod
-, _
= s
@@ -1682,17 +1682,16 @@
pack('BB
-B
', data)
@@ -1692,16 +1692,68 @@
data)%0A%0A
+ yield src_stream.read_bytes(socks_nmethod)%0A%0A
@@ -1884,43 +1884,8 @@
))%0A%0A
- if socks_nmethod == 1:%0A
@@ -1940,36 +1940,32 @@
ION, 0)%0A
-
yield src_stream
@@ -1984,83 +1984,8 @@
nse)
-%0A else:%0A raise ProtocolError(%22socks5 auth not supported%22)
%0A%0A
|
73fbfd435c849c0690121b0a3fc8545057247c8a
|
Fix command options issues
|
mistral_actions/client/shell.py
|
mistral_actions/client/shell.py
|
import sys
from mistral_actions.client import actions as actions_cli
import mistral_actions.utils as utils
def do_clear(args):
"""Unregister all actions from Mistral."""
actions_cli.unregister_all()
print("All actions are removed from Mistral successfully.")
@utils.arg(
'--override',
dest='override',
action="store_true",
default=False,
help="Set true will override all actions exist in Mistral.")
def do_register(args):
"""Register all actions to Mistral."""
registered_actions = actions_cli.get_all_registered()
discovered_actions = actions_cli.discover()
registered_action_names = [a['name'] for a in registered_actions]
discovered_action_names = [a['name'] for a in discovered_actions]
intersection = set(registered_action_names) & set(discovered_action_names)
if args.override:
for name in intersection:
actions_cli.unregister(name)
else:
discovered_actions = filter(
lambda a: a['name'] not in registered_action_names,
discovered_actions)
actions_cli.register_all(discovered_actions)
def do_discover(args):
"""Discover all actions from this project."""
discovered_actions = actions_cli.discover()
fileds = ['name', 'description', 'input_str']
print("Follow actions discovered: ")
utils.print_list(discovered_actions, fileds, sortby_index=0)
@utils.arg('name', metavar='<name>', help='Name of action.')
def do_unregister(args):
"""Unregister a action from Mistral."""
name = args.name
sys.argv.remove(name)
actions_cli.unregister(name)
def do_md_dump(args):
"""Dump all discovered actions to stdout."""
sorted_actions = sorted(actions_cli.discover(), key=lambda a: a['name'])
fileds = ['name', 'description', 'input_str']
utils.dump_as_markdown_table(sorted_actions, fileds)
def do_action_list(args):
"""List all actions has been registered in Mistral."""
actions = actions_cli.get_all_registered()
fileds = ['name', 'description', 'input_str']
utils.print_list(actions, fileds, sortby_index=0)
|
Python
| 0.000017
|
@@ -491,24 +491,125 @@
Mistral.%22%22%22%0A
+ override = args.override%0A try:%0A sys.argv.remove(%22--override%22)%0A except:%0A pass%0A
register
@@ -650,24 +650,24 @@
egistered()%0A
-
discover
@@ -928,21 +928,16 @@
%0A if
-args.
override
@@ -1164,51 +1164,389 @@
-actions_cli.register_all(discovered_actions
+if len(discovered_actions):%0A try:%0A actions_cli.register_all(discovered_actions)%0A print(%22Follow actions have been registered: %22)%0A for action in discovered_actions:%0A print(action%5B'name'%5D)%0A except Exception as ex:%0A print(%22Fail to register actions: %25s%22 %25 ex)%0A else:%0A print(%22No action need to register.%22
)%0A%0A%0A
@@ -2044,17 +2044,23 @@
def do_m
-d
+arkdown
_dump(ar
@@ -2108,16 +2108,34 @@
o stdout
+ as markdown table
.%22%22%22%0A
@@ -2369,17 +2369,18 @@
tions ha
-s
+ve
been re
|
f846f58891e1389941f008e3f53c95ffd1b6558d
|
Update to add email functionality based on threshold checking.
|
dbtracker/__init__.py
|
dbtracker/__init__.py
|
import logging
from dbtracker.cli import Cli
import argparse
def main(argv=None):
parser = argparse.ArgumentParser(
description="Queries MySQL and PostgreSQL for stats")
parser.add_argument(
"-S", "--save",
action="store_true",
help="generate and save database stats")
parser.add_argument(
"-g", "--growth",
help="display a graph of the growth. Arguments in the form of run number ranges e.g. 3-4 or 4",
type=str)
parser.add_argument(
"-H", "--history",
help="List the datetime stamps of the last n saved runs",
type=int)
parser.add_argument(
"-c", "--count",
action="store_true",
help="Gets database row counts but does not save")
parser.add_argument(
"-d", "--dates",
type=str,
help="compares two datetime stamps e.g. 2015-04-24 16:18:57.166095-07:00 - 2015-04-22 17:00:50.746688-07:00")
parser.add_argument(
"-s", "--silent",
action="store_true",
help="turns logging levels down to ERROR only")
parser.add_argument(
"-C", "--config",
type=str,
help="use a custom configuration file path")
args = parser.parse_args(argv)
if args.silent:
logging.basicConfig(level=logging.ERROR)
else:
logging.basicConfig(level=logging.INFO)
cli = Cli(args)
cli.main()
|
Python
| 0
|
@@ -1091,32 +1091,334 @@
r.add_argument(%0A
+ %22--min%22,%0A type=int,%0A help=%22Minimum threshold of a database row change, before a notification is sent.%22)%0A parser.add_argument(%0A %22--max%22,%0A type=int,%0A help=%22Maximum threshold of a database row change, before a notification is sent.%22)%0A parser.add_argument(%0A
%22-C%22, %22-
|
4e93ab58167752b655f405e809fe76971af92865
|
Adding epoch
|
LogReg/LogReg.py
|
LogReg/LogReg.py
|
import numpy as np
import math
class LogReg:
__class = 10
__basisFct = None
__weights = None
__bias = 0
__features = None
__error = 0
__cost = None
__learningRate = 0
__targets = None
def __init__(self, dataSet, learningRate=0.0001):
self.__features = self.__getFeatures(dataSet)
self.__targets = self.__getLabel(dataSet)
self.__learningRate = learningRate
np.random.seed()
self.__weights = np.random.randn(len(dataSet.data_instances[0].features)+1, self.__class)
@property
def classes(self):
return self.__class
@classes.setter
def classes(self, value):
self.__class = value
@property
def basisFct(self):
return self.__basisFct
@basisFct.setter
def basisFct(self, value):
self.__basisFct = value
@property
def weights(self):
return self.__weights
@weights.setter
def weights(self, value):
self.__weights = value
@property
def bias(self):
return self.__bias
@bias.setter
def bias(self, value):
self.__bias = value
@property
def features(self):
return self.__features
@features.setter
def features(self, value):
self.__features = value
@property
def error(self):
return self.__error
@error.setter
def error(self, value):
self.__error = value
@property
def learningRate(self):
return self.__learningRate
@learningRate.setter
def learningRate(self, value):
self.__learningRate = value
@property
def targets(self):
return self.__targets
@targets.setter
def targets(self, value):
self.__targets = value
def __softmax(self, W, X):
numerator = np.dot(X,W)
numerator -= np.max(numerator)
return np.exp(numerator) / np.sum(np.exp(numerator))
def __updateWeights(self, prob, target, feature):
index = np.argmax(target)
self.__weights[:, index] -= self.__learningRate * self.__grad(prob,target,feature)
def __maxProb(self, prob):
return np.argmax(prob)
def train(self):
i = 1
print "Training ...\n\n"
for feature, target in zip(self.__features, self.__targets):
prob = self.__softmax(self.__weights, feature)
self.__error += (1/i) * self.__logLikelihood(target, prob)
self.__updateWeights(prob, target, feature)
print "Training finished"
print "------------------------------"
def __getFeatures(self, dataSet):
listOfList = []
for instance in dataSet.data_instances:
valueList = instance.features.values()
valueList.append(1)
listOfList.append(valueList)
features = np.array(listOfList)
return features
def __getLabel(self, dataSet):
target = np.zeros((len(dataSet.data_instances),self.__class))
i = 0
for instance in dataSet.data_instances:
target[i,instance.label] = 1
i += 1
return target
def __logLikelihood(self, target, prob):
return -np.dot(target,np.log(prob))
def __grad(self, prob, target, feature):
return -(target[np.argmax(target)] - prob[np.argmax(prob)]) * feature
def __predict(self, prob=None, feature=None):
if prob is None:
prob = self.__softmax(self.__weights,feature)
index = np.argmax(prob)
prediction = np.zeros(self.__class)
prediction[index] = 1
return prediction
def test(self, dataSet):
self.__features = self.__getFeatures(dataSet)
self.__targets = self.__getLabel(dataSet)
goodPred = 0
badPred = 0
print "Testing ..."
for feature, target in zip(self.__features, self.__targets):
prob = self.__predict(feature=feature)
predict = np.argmax(prob)
if predict == np.argmax(target):
goodPred += 1
else:
badPred += 1
pred = ((1.0*goodPred)/(goodPred+badPred)) * 100.0
print "Percentage of good prediction is: " + str(pred)
|
Python
| 0.999407
|
@@ -2223,16 +2223,55 @@
..%5Cn%5Cn%22%0A
+ for epoch in range(0, 10):%0A
@@ -2323,32 +2323,36 @@
elf.__targets):%0A
+
prob
@@ -2398,32 +2398,36 @@
re)%0A
+
self.__error +=
@@ -2465,24 +2465,105 @@
rget, prob)%0A
+ print self.__error%0A if self.__error != 0:%0A
@@ -2598,32 +2598,55 @@
arget, feature)%0A
+ i += 1%0A
print %22T
@@ -2711,17 +2711,16 @@
-----%22%0A%0A
-%0A
def
|
516844b2d34da22a4ad567ba25f900e1f747327c
|
exclude unversioned protos (#2672)
|
Logging/synth.py
|
Logging/synth.py
|
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This script is used to synthesize generated parts of this library."""
import os
import synthtool as s
import synthtool.gcp as gcp
import logging
logging.basicConfig(level=logging.DEBUG)
gapic = gcp.GAPICGenerator()
common = gcp.CommonTemplates()
library = gapic.php_library(
service='logging',
version='v2',
config_path='/google/logging/artman_logging.yaml',
artman_output_name='google-cloud-logging-v2')
# copy all src including partial veneer classes
s.move(library / 'src')
# copy proto files to src also
s.move(library / 'proto/src/Google/Cloud/Logging', 'src/')
s.move(library / 'tests/')
# copy GPBMetadata file to metadata
s.move(library / 'proto/src/GPBMetadata/Google/Logging', 'metadata/')
# document and utilize apiEndpoint instead of serviceAddress
s.replace(
"**/Gapic/*GapicClient.php",
r"'serviceAddress' =>",
r"'apiEndpoint' =>")
s.replace(
"**/Gapic/*GapicClient.php",
r"@type string \$serviceAddress\n\s+\*\s+The address",
r"""@type string $serviceAddress
* **Deprecated**. This option will be removed in a future major release. Please
* utilize the `$apiEndpoint` option instead.
* @type string $apiEndpoint
* The address""")
s.replace(
"**/Gapic/*GapicClient.php",
r"\$transportConfig, and any \$serviceAddress",
r"$transportConfig, and any `$apiEndpoint`")
# fix year
s.replace(
'**/Gapic/*GapicClient.php',
r'Copyright \d{4}',
r'Copyright 2016')
for client in ['ConfigServiceV2', 'LoggingServiceV2', 'MetricsServiceV2']:
s.replace(
f'**/V2/{client}Client.php',
r'Copyright \d{4}',
'Copyright 2016')
s.replace(
'tests/**/V2/*Test.php',
r'Copyright \d{4}',
r'Copyright 2018')
### [START] protoc backwards compatibility fixes
# roll back to private properties.
s.replace(
"src/**/V*/**/*.php",
r"Generated from protobuf field ([^\n]{0,})\n\s{5}\*/\n\s{4}protected \$",
r"""Generated from protobuf field \1
*/
private $""")
# prevent proto messages from being marked final
s.replace(
"src/**/V*/**/*.php",
r"final class",
r"class")
# Replace "Unwrapped" with "Value" for method names.
s.replace(
"src/**/V*/**/*.php",
r"public function ([s|g]\w{3,})Unwrapped",
r"public function \1Value"
)
### [END] protoc backwards compatibility fixes
|
Python
| 0
|
@@ -1100,32 +1100,37 @@
src also%0As.move(
+%0A
library / 'proto
@@ -1160,16 +1160,76 @@
ng',
+%0A
'src/'
-)
+,%0A %5Blibrary / 'proto/src/Google/Cloud/Logging/Type'%5D)%0A
%0As.m
@@ -1288,32 +1288,37 @@
metadata%0As.move(
+%0A
library / 'proto
@@ -1350,16 +1350,20 @@
ogging',
+%0A
'metada
@@ -1366,16 +1366,77 @@
tadata/'
+,%0A %5Blibrary / 'proto/src/GPBMetadata/Google/Logging/Type'%5D
)%0A%0A# doc
|
838d8c8952f63464dfafaaeba3b16b681317c15e
|
add plot
|
tests/test_annotate.py
|
tests/test_annotate.py
|
import matplotlib.pyplot as plt
import numpy as np
def plot():
fig = plt.figure(1, figsize=(8, 5))
ax = fig.add_subplot(111, autoscale_on=False, xlim=(-1, 5), ylim=(-4, 3))
t = np.arange(0.0, 5.0, 0.2)
s = np.cos(2 * np.pi * t)
ax.plot(t, s, color="blue")
ax.annotate(
"text",
xy=(4.0, 1.0),
xycoords="data",
xytext=(4.5, 1.5),
textcoords="data",
arrowprops=dict(arrowstyle="->", ec="r"),
)
ax.annotate(
"arrowstyle",
xy=(0, 1),
xycoords="data",
xytext=(-50, 30),
textcoords="offset points",
arrowprops=dict(arrowstyle="->"),
)
ax.annotate(
"no arrow",
xy=(0, 1),
xycoords="data",
xytext=(50, -30),
textcoords="offset pixels",
)
return fig
def test():
from .helpers import assert_equality
assert_equality(plot, __file__[:-3] + "_reference.tex")
|
Python
| 0.000094
|
@@ -935,8 +935,63 @@
e.tex%22)%0A
+%0A%0Aif __name__ == %22__main__%22:%0A plot()%0A plt.show()%0A
|
8e10a62052f252c21c3898f70fc10d23c7261af0
|
Update urls.py
|
submify/submify/urls.py
|
submify/submify/urls.py
|
"""submify URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
from django.conf.urls.static import static
from django.conf import settings
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^accounts/', include('allauth.urls')),
url(r'^student/', include('student.urls'))
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
|
Python
| 0.000002
|
@@ -171,16 +171,17 @@
xamples:
+:
%0AFunctio
|
6b9da294869c0c63502b59758abbe002be65944f
|
ensure timestamps are utc
|
tests/test_contents.py
|
tests/test_contents.py
|
from fixtures import *
import sys
import struct
import logging
import datetime
import binascii
#logging.basicConfig(level=logging.DEBUG)
def make_string_name_key(name):
return b'N' + name.encode('utf-8')
def make_int_name_key(name, wordsize=4):
if wordsize == 4:
return b'N' + struct.pack('<BI', 0, name)
elif wordsize == 8:
return b'N' + struct.pack('<BQ', 0, name)
else:
raise RuntimeError('unexpected wordsize')
def make_name_key(name, wordsize=4):
if isinstance(name, str):
return make_string_name_key(name)
else:
return make_int_name_key(name, wordsize=wordsize)
def get_nodeid(idb, name, wordsize=4):
# TODO: 64bit
key = make_name_key(name)
cursor = idb.id0.find(key)
if wordsize == 4:
return struct.unpack('<I', cursor.value)[0]
elif wordsize == 8:
return struct.unpack('<Q', cursor.value)[0]
else:
raise RuntimeError('unexpected wordsize')
def make_complex_key(nodeid, tag, index, wordsize=4):
if wordsize == 4:
wordformat = 'I'
elif wordsize == 8:
wordformat = 'Q'
else:
raise RuntimeError('unexpected wordsize')
fmt = '>s' + wordformat + 's' + wordformat
tag = tag.encode('utf-8')
if isinstance(index, str):
index = index.encode('utf-8')
elif isinstance(index, int) and index < 0:
fmt = '>s' + wordformat + 's' + wordformat.lower()
return struct.pack(fmt, b'.', nodeid, tag, index)
def get_int(idb, nodeid, tag, index):
key = make_complex_key(nodeid, tag, index)
cursor = idb.id0.find(key)
data = cursor.value
if data is None:
raise KeyError((nodeid, tag, index))
if len(data) == 1:
return struct.unpack('<B', data)[0]
elif len(data) == 2:
return struct.unpack('<H', data)[0]
elif len(data) == 4:
return struct.unpack('<L', data)[0]
elif len(data) == 8:
return struct.unpack('<Q', data)[0]
else:
return RuntimeError('unexpected data size')
def get_string(idb, nodeid, tag, index):
key = make_complex_key(nodeid, tag, index)
cursor = idb.id0.find(key)
data = cursor.value
if data is None:
raise KeyError((nodeid, tag, index))
return bytes(data).rstrip(b'\x00').decode('utf-8')
def get_bytes(idb, nodeid, tag, index):
key = make_complex_key(nodeid, tag, index)
cursor = idb.id0.find(key)
data = cursor.value
if data is None:
raise KeyError((nodeid, tag, index))
return bytes(data)
class ROOT_INDEX:
'''
via: https://github.com/williballenthin/pyidbutil/blob/master/idbtool.py#L182
'''
VERSION = -1
VERSION_STRING = 1303
PARAM = 0x41b994
OPEN_COUNT = -4
CREATED = -2
CRC = -5
MD5 = 1302
def test_root(kernel32_idb):
root = get_nodeid(kernel32_idb, 'Root Node')
assert get_int(kernel32_idb, root, 'A', ROOT_INDEX.VERSION) == 695
assert get_string(kernel32_idb, root, 'S', ROOT_INDEX.VERSION_STRING) == '6.95'
assert get_int(kernel32_idb, root, 'A', ROOT_INDEX.OPEN_COUNT) == 1
ts = get_int(kernel32_idb, root, 'A', ROOT_INDEX.CREATED)
ts = datetime.datetime.fromtimestamp(ts)
assert ts.isoformat() == '2017-06-20T18:31:34'
assert get_int(kernel32_idb, root, 'A', ROOT_INDEX.CRC) == 0xdf9bdf12
md5 = get_bytes(kernel32_idb, root, 'S', ROOT_INDEX.MD5)
md5 = binascii.hexlify(md5).decode('ascii')
assert md5 == '00bf1bf1b779ce1af41371426821e0c2'
|
Python
| 0.999998
|
@@ -3175,16 +3175,19 @@
atetime.
+utc
fromtime
@@ -3237,18 +3237,18 @@
7-06-20T
-18
+22
:31:34'%0A
|
cfb68d7e1146241b9783d82d09f7f813e658d4aa
|
fix doctests
|
tests/test_doctests.py
|
tests/test_doctests.py
|
# encoding: utf8
from quantiphy import Quantity
import pytest
import doctest
import glob
import sys
def test_README():
if sys.version_info < (3, 6):
# code used in doctests assumes python3.6
return
Quantity.reset_prefs()
rv = doctest.testfile('../README.rst', optionflags=doctest.ELLIPSIS)
assert rv.failed == 0
assert rv.attempted == 29
def test_quantiphy():
if sys.version_info < (3, 6):
# code used in doctests assumes python3.6
return
Quantity.reset_prefs()
rv = doctest.testfile('../quantiphy.py', optionflags=doctest.ELLIPSIS)
assert rv.failed == 0
assert rv.attempted == 100
# this target should be undated when the number of doctests change
def test_manual():
if sys.version_info < (3, 6):
# code used in doctests assumes python3.6
return
Quantity.reset_prefs()
expected_test_count = {
'../doc/index.rst': 29,
'../doc/user.rst': 368,
'../doc/api.rst': 0,
'../doc/examples.rst': 36,
'../doc/accessories.rst': 12,
'../doc/releases.rst': 0,
}
found = glob.glob('../doc/*.rst')
for f in found:
assert f in expected_test_count, f
for path, tests in expected_test_count.items():
rv = doctest.testfile(path, optionflags=doctest.ELLIPSIS)
assert rv.failed == 0, path
assert rv.attempted == tests, path
if __name__ == '__main__':
# As a debugging aid allow the tests to be run on their own, outside pytest.
# This makes it easier to see and interpret and textual output.
defined = dict(globals())
for k, v in defined.items():
if callable(v) and k.startswith('test_'):
print()
print('Calling:', k)
print((len(k)+9)*'=')
v()
|
Python
| 0.000001
|
@@ -928,18 +928,18 @@
x.rst':
-29
+31
,%0A
|
72f7162b2a307297798dbeb866d54de5acfdeffb
|
correct input dimension comment
|
models/alexnet_14/alexNet_14.py
|
models/alexnet_14/alexNet_14.py
|
# The Model of DeepVO
from keras.models import Sequential
from keras.layers.core import Dense, Dropout, Activation, Flatten, Reshape
from keras.layers.convolutional import Convolution2D, MaxPooling2D
from keras.layers.normalization import BatchNormalization
from keras import backend as K #enable tensorflow functions
#AlexNet with batch normalization in Keras
#input image is 224x224
def create_model():
"""
This model is designed to take in multiple inputs and give multiple outputs.
Here is what the network was designed for:
Inputs:
two 128x128 RGB images stacked (RGBRGB)
Outputs:
Rotation between images in quaternion form
Translation between two images
"""
main_input = Convolution2D(96, 11, 11, border_mode='same', input_shape=(128, 128, 6), name='main_input')
x = BatchNormalization()(main_input)
x = Activation('relu')(x)
x = MaxPooling2D(pool_size=(11, 11), strides=(1, 1), border_mode='same')(x)
x = Convolution2D(384, 3, 3, border_mode='same')(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = MaxPooling2D(pool_size=(3, 3), strides=(1, 1), border_mode='same')(x)
x = Flatten()(x)
x = Dense(4096, init='normal')(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = Dense(4096, init='normal')(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
# Delta rotation in quaternion form
quaternion_rotation = Dense(4, activation='tanh', name='quaternion_rotation')(x)
quaternion_rotation = Lambda(normalize_quaternion)(quaternion_rotation)
# Delta Translation output
translation = Dense(3, activation='linear', name='translation')
model = Model(input=main_input, output=[translation, quaternion_rotation])
return model
def normalize_quaternion(x):
"use tensorflow normalize function on this layer to ensure valid quaternion rotation"
x = K.l2_normalize(x, dim=1)
return x
def run_model(model, Xtr, Ytr, Xte, Yte, save_path=None):
"Note: y should be a 2d list of quaternion rotations and translations.""
model.compile(loss='mean_squared_error', optimizer='adam', metrics=['mean_absolute_error'])
history = model.fit(Xtr, Ytr, batch_size=8, nb_epoch=30, verbose=1).history
score = model.evaluate(Xte, Yte, verbose=1)
if (save_path != None):
model.save(save_path)
return score, history
|
Python
| 0.000004
|
@@ -376,15 +376,15 @@
is
-224x224
+128x128
%0A%0Ade
@@ -2043,61 +2043,39 @@
be
-a 2d list of quaternion rotations and translations.%22%22
+%5B%5Btranslation%5D,%5Bquat rotation%5D%5D
%0A
|
70ec2171784ffb3435c108082f3b47c529741392
|
The all-important comma
|
sydent/db/valsession.py
|
sydent/db/valsession.py
|
# -*- coding: utf-8 -*-
# Copyright 2014 matrix.org
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sydent.util.tokenutils
from sydent.validators import ValidationSession, IncorrectClientSecretException, InvalidSessionIdException, \
SessionExpiredException, SessionNotValidatedException
from sydent.util import time_msec
class ThreePidValSessionStore:
def __init__(self, syd):
self.sydent = syd
def getOrCreateTokenSession(self, medium, address, clientSecret):
cur = self.sydent.db.cursor()
cur.execute("select s.id, s.medium, s.address, s.clientSecret, s.validated, s.mtime, "
"t.token, t.sendAttemptNumber from threepid_validation_sessions s,threepid_token_auths t "
"where s.medium = ? and s.address = ? and s.clientSecret = ? and t.validationSession = s.id",
(medium, address, clientSecret))
row = cur.fetchone()
if row:
s = ValidationSession(row[0], row[1], row[2], row[3], row[4], row[5])
s.token = row[6]
s.sendAttemptNumber = row[7]
return s
sid = self.addValSession(medium, address, clientSecret, time_msec(), commit=False)
tokenString = sydent.util.tokenutils.generateNumericTokenOfLength(
int(self.sydent.cfg.get('email', 'token.length')))
cur.execute("insert into threepid_token_auths (validationSession, token, sendAttemptNumber) values (?, ?, ?)",
(sid, tokenString, -1))
self.sydent.db.commit()
s = ValidationSession(sid, medium, address, clientSecret, False, time_msec())
s.token = tokenString
s.sendAttemptNumber = -1
return s
def addValSession(self, medium, address, clientSecret, mtime, commit=True):
cur = self.sydent.db.cursor()
cur.execute("insert into threepid_validation_sessions ('medium', 'address', 'clientSecret', 'mtime')" +
" values (?, ?, ?, ?)", (medium, address, clientSecret, mtime))
if commit:
self.sydent.db.commit()
return cur.lastrowid
def setSendAttemptNumber(self, sid, attemptNo):
cur = self.sydent.db.cursor()
cur.execute("update threepid_token_auths set sendAttemptNumber = ? where id = ?", (attemptNo, sid))
self.sydent.db.commit()
def setValidated(self, sid, validated):
cur = self.sydent.db.cursor()
cur.execute("update threepid_validation_sessions set validated = ? where id = ?", (validated, sid))
self.sydent.db.commit()
def setMtime(self, sid, mtime):
cur = self.sydent.db.cursor()
cur.execute("update threepid_validation_sessions set mtime = ? where id = ?", (mtime, sid))
self.sydent.db.commit()
def getSessionById(self, sid):
cur = self.sydent.db.cursor()
cur.execute("select id, medium, address, clientSecret, validated, mtime from "+
"threepid_validation_sessions where id = ?", (sid))
row = cur.fetchone()
if not row:
return None
return ValidationSession(row[0], row[1], row[2], row[3], row[4], row[5])
def getTokenSessionById(self, sid):
cur = self.sydent.db.cursor()
cur.execute("select s.id, s.medium, s.address, s.clientSecret, s.validated, s.mtime, "
"t.token, t.sendAttemptNumber from threepid_validation_sessions s,threepid_token_auths t "
"where s.id = ? and t.validationSession = s.id", (sid,))
row = cur.fetchone()
if row:
s = ValidationSession(row[0], row[1], row[2], row[3], row[4], row[5])
s.token = row[6]
s.sendAttemptNumber = row[7]
return s
return None
def getValidatedSession(self, sid, clientSecret):
"""
Retrieve a validated and still-valid session whose client secret matches the one passed in
"""
s = self.getSessionById(sid)
if not s:
raise InvalidSessionIdException()
if not s.clientSecret == clientSecret:
raise IncorrectClientSecretException()
if s.mtime + ValidationSession.THREEPID_SESSION_VALID_LIFETIME < time_msec():
raise SessionExpiredException()
if not s.validated:
raise SessionNotValidatedException()
return s
|
Python
| 0.999994
|
@@ -3493,16 +3493,17 @@
?%22, (sid
+,
))%0A
|
74ce3166799f9bf3f5cdce376fd1e0d7e2a894e2
|
Remove some enigmatic or incorrect comments
|
compare.py
|
compare.py
|
import re
from copy import deepcopy
from itertools import ifilter
from parsley import makeGrammar
from sys import argv, stderr, stdout
from dhcp_objects import (Statement, RangeStmt, Pool, Subnet, Class, Subclass,
Group, Host, ConfigFile)
def first(it):
return next(it, None)
symbols = ''.join(chr(i) for i in xrange(0x21, 0x7E + 1))
comment = re.compile(r'(?:"(?:[^"\\]|\\.)*"|[^"#])*(#|$)')
def parsefile(name):
config = ConfigFile()
bindings = {
'symbols': symbols,
'config': config,
'Statement': Statement,
'RangeStmt': RangeStmt,
'Pool': Pool,
'Subnet': Subnet,
'Class': Class,
'Subclass': Subclass,
'Group': Group,
'Host': Host,
'ConfigFile': ConfigFile,
}
with open('dhcp.parsley') as g:
grammar = makeGrammar(g.read(), bindings)
with open(name) as f:
fStr = ''
for line in f:
if not line.startswith('group'):
line = line[:comment.match(line).start(1)]
if not line or line[-1] != '\n':
line += '\n' # not strictly necessary
fStr += line
g = grammar(fStr)
g.configFile()
return config
def find_in(obj, xs):
return first(ifilter(lambda x: x == obj, xs))
def has_contents(x):
return hasattr(x, 'contents') and x.contents
def has_related(x):
return hasattr(x, 'related') and x.related
def has_children(x):
return has_contents(x) or has_related(x)
def add_all(x, zs, side):
z = deepcopy(x)
z.side = side
if hasattr(z, 'related') and z.related:
for a in z.related:
a.side = side
zs.update([z]) # deep add
def deep_compare(x, y, zs):
same = True
z = deepcopy(x)
z.side = ' '
if hasattr(z, 'contents'):
z.contents = set()
if hasattr(z, 'related'):
z.related = set()
if has_contents(x) or has_contents(y):
if not compare(x, y, z, 'contents'):
same = False
zs.update([z])
if has_related(x) or has_related(y):
if not compare(x, y, z, 'related'):
same = False
zs.update([z])
return same
def shallow_compare(x, y, zs):
if not x == y:
zs.update([deepcopy(x), deepcopy(y)])
same = False
def compare(left, right, diff, childtype):
same = True
xs = getattr(left, childtype)
ys = getattr(right, childtype)
zs = getattr(diff, childtype)
for x in xs:
if x in ys: # <>
y = find_in(x, ys)
if has_children(x) or has_children(y): # non-terminal
same = deep_compare(x, y, zs)
else: # terminal
same = shallow_compare(x, y, zs)
else: # <
add_all(x, zs, '<')
same = False
for y in ys - xs: # >
add_all(y, zs, '>')
same = False
#stderr.write('================================\n')
#stderr.write(str(left))
#stderr.write('--------------------------------\n')
#stderr.write(str(right))
#stderr.write('------------- diff -------------\n')
#stderr.write(str(diff))
#stderr.write('================================\n')
return same
def compare_files(filename1, filename2, verbose=False):
if verbose:
stderr.write('## Parsing {0}...\n'.format(filename1))
one = parsefile(filename1)
if verbose:
stderr.write('## Parsing {0}...\n'.format(filename2))
two = parsefile(filename2)
diffFile = ConfigFile()
if verbose:
stderr.write('## Comparing...\n')
compare(one, two, diffFile, 'related')
return str(diffFile)
if __name__ == '__main__':
stdout.write(compare_files(argv[1], argv[2], verbose=True))
|
Python
| 0.000001
|
@@ -1139,33 +1139,8 @@
'%5Cn'
- # not strictly necessary
%0A
@@ -1691,19 +1691,8 @@
%5Bz%5D)
- # deep add
%0A%0A%0Ad
|
198d4944e961fd998d6e896b3e75ca2e815ffaa5
|
Add log to file function for vimapt package
|
src/vimapt/library/vimapt/__init__.py
|
src/vimapt/library/vimapt/__init__.py
|
Python
| 0
|
@@ -0,0 +1,125 @@
+import logging%0A%0Alogging.basicConfig(filename='/var/log/vimapt.log', level=logging.INFO)%0Alogger = logging.getLogger(__name__)%0A
|
|
a84dde598297495fe6f0f8b233b3a3761b0df7d4
|
Update test to check newer logic
|
tests/functional/test_warning.py
|
tests/functional/test_warning.py
|
def test_environ(script, tmpdir):
"""$PYTHONWARNINGS was added in python2.7"""
demo = tmpdir.join('warnings_demo.py')
demo.write('''
from pip._internal.utils import deprecation
deprecation.install_warning_logger()
from logging import basicConfig
basicConfig()
from warnings import warn
warn("deprecated!", deprecation.PipDeprecationWarning)
''')
result = script.run('python', demo, expect_stderr=True)
assert result.stderr == \
'ERROR:pip._internal.deprecations:DEPRECATION: deprecated!\n'
script.environ['PYTHONWARNINGS'] = 'ignore'
result = script.run('python', demo)
assert result.stderr == ''
|
Python
| 0
|
@@ -1,8 +1,25 @@
+import textwrap%0A%0A
%0Adef tes
@@ -156,12 +156,76 @@
ite(
-'''%0A
+textwrap.dedent('''%0A from logging import basicConfig%0A
from
@@ -264,16 +264,25 @@
ecation%0A
+%0A
deprecat
@@ -314,88 +314,51 @@
r()%0A
-%0Afrom logging import basicConfig%0AbasicConfig()%0A%0Afrom warnings import warn%0Awarn(%22
+ basicConfig()%0A%0A deprecation.
depr
@@ -367,20 +367,18 @@
ated
-!%22,
+(%22
deprecat
ion.
@@ -377,39 +377,55 @@
ecat
-ion.PipDeprecationWarning)%0A
+ed!%22, replacement=None, gone_in=None)%0A
''')
+)
%0A%0A
@@ -490,48 +490,27 @@
-assert result.stderr == %5C%0A 'ERROR
+expected = 'WARNING
:pip
@@ -560,16 +560,53 @@
ated!%5Cn'
+%0A assert result.stderr == expected
%0A%0A sc
|
e67f419c9135dd6b5135379bd16a3f06d134259e
|
Fix metadata serialization for OPTIONS request.
|
src/waldur_core/structure/metadata.py
|
src/waldur_core/structure/metadata.py
|
from collections import OrderedDict
from django.utils.encoding import force_text
from django.utils.http import urlencode
from rest_framework import exceptions
from rest_framework import serializers
from rest_framework.metadata import SimpleMetadata
from rest_framework.request import clone_request
from rest_framework.reverse import reverse
from rest_framework.utils.field_mapping import ClassLookupDict
from waldur_core.core.utils import sort_dict
class ActionSerializer:
def __init__(self, func, name, request, view, resource):
self.func = func
self.name = name
self.request = request
self.resource = resource
self.view = view
def serialize(self):
reason = self.get_reason()
return {
'title': self.get_title(),
'method': self.get_method(),
'destructive': self.is_destructive(),
'url': self.get_url(),
'reason': reason,
'enabled': not reason
}
def is_destructive(self):
if self.name == 'destroy':
return True
return getattr(self.func, 'destructive', False)
def get_title(self):
try:
return getattr(self.func, 'title')
except AttributeError:
return self.name.replace('_', ' ').title()
def get_reason(self):
try:
self.view.initial(self.request)
except exceptions.APIException as e:
if isinstance(e, serializers.ValidationError):
return ', '.join(force_text(i) for i in e.detail)
return force_text(e)
def get_method(self):
if self.name == 'destroy':
return 'DELETE'
elif self.name == 'update':
return 'PUT'
return getattr(self.func, 'method', 'POST')
def get_url(self):
base_url = self.request.build_absolute_uri()
method = self.get_method()
if method in ('DELETE', 'PUT'):
return base_url
return base_url + self.name + '/'
def merge_dictionaries(a, b):
new = a.copy()
new.update(b)
return new
class ActionsMetadata(SimpleMetadata):
"""
Difference from SimpleMetadata class:
1) Skip read-only fields, because options are used only for provisioning new resource.
2) Don't expose choices for fields with queryset in order to reduce size of response.
3) Attach actions metadata
"""
label_lookup = ClassLookupDict(
mapping=merge_dictionaries({
serializers.JSONField: 'text'
}, SimpleMetadata.label_lookup.mapping)
)
def determine_metadata(self, request, view):
self.request = request
metadata = OrderedDict()
if view.lookup_field in view.kwargs:
metadata['actions'] = self.get_actions(request, view)
else:
metadata['actions'] = self.determine_actions(request, view)
return metadata
def get_actions(self, request, view):
"""
Return metadata for resource-specific actions,
such as start, stop, unlink
"""
metadata = OrderedDict()
actions = self.get_resource_actions(view)
resource = view.get_object()
for action_name, action in actions.items():
if action_name == 'update':
view.request = clone_request(request, 'PUT')
view.action = action_name
data = ActionSerializer(action, action_name, request, view, resource)
metadata[action_name] = data.serialize()
if not metadata[action_name]['enabled']:
continue
fields = self.get_action_fields(view, action_name, resource)
if not fields:
metadata[action_name]['type'] = 'button'
else:
metadata[action_name]['type'] = 'form'
metadata[action_name]['fields'] = fields
view.action = None
view.request = request
return metadata
@classmethod
def get_resource_actions(cls, view):
actions = {}
disabled_actions = getattr(view.__class__, 'disabled_actions', [])
for key in dir(view.__class__):
callback = getattr(view.__class__, key)
if getattr(callback, 'deprecated', False):
continue
if 'post' not in getattr(callback, 'bind_to_methods', []):
continue
if key in disabled_actions:
continue
actions[key] = callback
if 'DELETE' in view.allowed_methods and 'destroy' not in disabled_actions:
actions['destroy'] = view.destroy
if 'PUT' in view.allowed_methods and 'update' not in disabled_actions:
actions['update'] = view.update
return sort_dict(actions)
def get_action_fields(self, view, action_name, resource):
"""
Get fields exposed by action's serializer
"""
serializer = view.get_serializer(resource)
fields = OrderedDict()
if not isinstance(serializer, view.serializer_class) or action_name == 'update':
fields = self.get_fields(serializer.fields)
return fields
def get_serializer_info(self, serializer):
"""
Given an instance of a serializer, return a dictionary of metadata
about its fields.
"""
if hasattr(serializer, 'child'):
# If this is a `ListSerializer` then we want to examine the
# underlying child serializer instance instead.
serializer = serializer.child
return self.get_fields(serializer.fields)
def get_fields(self, serializer_fields):
"""
Get fields metadata skipping empty fields
"""
fields = OrderedDict()
for field_name, field in serializer_fields.items():
# Skip tags field in action because it is needed only for resource creation
# See also: WAL-1223
if field_name == 'tags':
continue
info = self.get_field_info(field, field_name)
if info:
fields[field_name] = info
return fields
def get_field_info(self, field, field_name):
"""
Given an instance of a serializer field, return a dictionary
of metadata about it.
"""
field_info = OrderedDict()
field_info['type'] = self.label_lookup[field]
field_info['required'] = getattr(field, 'required', False)
attrs = [
'label', 'help_text', 'default_value', 'placeholder', 'required',
'min_length', 'max_length', 'min_value', 'max_value', 'many', 'factor', 'units'
]
if getattr(field, 'read_only', False):
return None
for attr in attrs:
value = getattr(field, attr, None)
if value is not None and value != '':
field_info[attr] = force_text(value, strings_only=True)
if 'label' not in field_info:
field_info['label'] = field_name.replace('_', ' ').title()
if hasattr(field, 'view_name'):
list_view = field.view_name.replace('-detail', '-list')
base_url = reverse(list_view, request=self.request)
field_info['type'] = 'select'
field_info['url'] = base_url
if hasattr(field, 'query_params'):
field_info['url'] += '?%s' % urlencode(field.query_params)
field_info['value_field'] = getattr(field, 'value_field', 'url')
field_info['display_name_field'] = getattr(field, 'display_name_field', 'display_name')
if hasattr(field, 'choices') and not hasattr(field, 'queryset'):
field_info['choices'] = [
{
'value': choice_value,
'display_name': force_text(choice_name, strings_only=True)
}
for choice_value, choice_name in field.choices.items()
]
return field_info
|
Python
| 0
|
@@ -4328,61 +4328,38 @@
if
-'post' not in getattr(callback, 'bind_to_methods', %5B%5D
+not hasattr(callback, 'detail'
):%0A
|
97af84d79eea17f22bc99e432e3ee47edd81123e
|
remove integration test for setup.py error as annotations are only supported on 3.7
|
testing/test_integration.py
|
testing/test_integration.py
|
from __future__ import annotations
import os
import sys
import textwrap
from pathlib import Path
import pytest
from .wd_wrapper import WorkDir
from setuptools_scm import PRETEND_KEY
from setuptools_scm import PRETEND_KEY_NAMED
from setuptools_scm.integration import _warn_on_old_setuptools
from setuptools_scm.utils import do
@pytest.fixture
def wd(wd: WorkDir) -> WorkDir:
wd("git init")
wd("git config user.email test@example.com")
wd('git config user.name "a test"')
wd.add_command = "git add ."
wd.commit_command = "git commit -m test-{reason}"
return wd
def test_pyproject_support(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
pytest.importorskip("tomli")
monkeypatch.delenv("SETUPTOOLS_SCM_DEBUG")
pkg = tmp_path / "package"
pkg.mkdir()
pkg.joinpath("pyproject.toml").write_text(
textwrap.dedent(
"""
[tool.setuptools_scm]
fallback_version = "12.34"
[project]
name = "foo"
description = "Factory ⸻ A code generator 🏭"
authors = [{name = "Łukasz Langa"}]
dynamic = ["version"]
"""
),
encoding="utf-8",
)
pkg.joinpath("setup.py").write_text("__import__('setuptools').setup()")
res = do([sys.executable, "setup.py", "--version"], pkg)
assert res == "12.34"
PYPROJECT_FILES = {
"setup.py": "[tool.setuptools_scm]",
"setup.cfg": "[tool.setuptools_scm]",
"pyproject tool.setuptools_scm": (
"[tool.setuptools_scm]\ndist_name='setuptools_scm_example'"
),
"pyproject.project": (
"[project]\nname='setuptools_scm_example'\n[tool.setuptools_scm]"
),
}
SETUP_PY_PLAIN = "__import__('setuptools').setup()"
SETUP_PY_WITH_NAME = "__import__('setuptools').setup(name='setuptools_scm_example')"
SETUP_PY_FILES = {
"setup.py": SETUP_PY_WITH_NAME,
"setup.cfg": SETUP_PY_PLAIN,
"pyproject tool.setuptools_scm": SETUP_PY_PLAIN,
"pyproject.project": SETUP_PY_PLAIN,
}
SETUP_CFG_FILES = {
"setup.py": "",
"setup.cfg": "[metadata]\nname=setuptools_scm_example",
"pyproject tool.setuptools_scm": "",
"pyproject.project": "",
}
with_metadata_in = pytest.mark.parametrize(
"metadata_in",
["setup.py", "setup.cfg", "pyproject tool.setuptools_scm", "pyproject.project"],
)
@with_metadata_in
def test_pyproject_support_with_git(wd: WorkDir, metadata_in: str) -> None:
pytest.importorskip("tomli")
wd.write("pyproject.toml", PYPROJECT_FILES[metadata_in])
wd.write("setup.py", SETUP_PY_FILES[metadata_in])
wd.write("setup.cfg", SETUP_CFG_FILES[metadata_in])
res = wd([sys.executable, "setup.py", "--version"])
assert res.endswith("0.1.dev0")
def test_pretend_version(monkeypatch: pytest.MonkeyPatch, wd: WorkDir) -> None:
monkeypatch.setenv(PRETEND_KEY, "1.0.0")
assert wd.get_version() == "1.0.0"
assert wd.get_version(dist_name="ignored") == "1.0.0"
@with_metadata_in
def test_pretend_version_named_pyproject_integration(
monkeypatch: pytest.MonkeyPatch, wd: WorkDir, metadata_in: str
) -> None:
test_pyproject_support_with_git(wd, metadata_in)
monkeypatch.setenv(
PRETEND_KEY_NAMED.format(name="setuptools_scm_example".upper()), "3.2.1"
)
res = wd([sys.executable, "setup.py", "--version"])
assert res.endswith("3.2.1")
def test_pretend_version_named(monkeypatch: pytest.MonkeyPatch, wd: WorkDir) -> None:
monkeypatch.setenv(PRETEND_KEY_NAMED.format(name="test".upper()), "1.0.0")
monkeypatch.setenv(PRETEND_KEY_NAMED.format(name="test2".upper()), "2.0.0")
assert wd.get_version(dist_name="test") == "1.0.0"
assert wd.get_version(dist_name="test2") == "2.0.0"
def test_pretend_version_name_takes_precedence(
monkeypatch: pytest.MonkeyPatch, wd: WorkDir
) -> None:
monkeypatch.setenv(PRETEND_KEY_NAMED.format(name="test".upper()), "1.0.0")
monkeypatch.setenv(PRETEND_KEY, "2.0.0")
assert wd.get_version(dist_name="test") == "1.0.0"
def test_pretend_version_accepts_bad_string(
monkeypatch: pytest.MonkeyPatch, wd: WorkDir
) -> None:
monkeypatch.setenv(PRETEND_KEY, "dummy")
wd.write("setup.py", SETUP_PY_PLAIN)
assert wd.get_version(write_to="test.py") == "dummy"
pyver = wd([sys.executable, "setup.py", "--version"])
assert pyver == "0.0.0"
def test_own_setup_fails_on_old_python(monkeypatch: pytest.MonkeyPatch) -> None:
monkeypatch.setattr("sys.version_info", (3, 5))
monkeypatch.syspath_prepend(os.path.dirname(os.path.dirname(__file__)))
import setup
with pytest.raises(
RuntimeError,
match="support for python < 3.6 has been removed in setuptools_scm>=6.0.0",
):
setup.scm_version()
def testwarn_on_broken_setuptools() -> None:
_warn_on_old_setuptools("45")
with pytest.warns(RuntimeWarning, match="ERROR: setuptools==44"):
_warn_on_old_setuptools("44")
@pytest.mark.issue(611)
def test_distribution_procides_extras() -> None:
try:
from importlib.metadata import distribution # type: ignore
except ImportError:
from importlib_metadata import distribution # type: ignore
dist = distribution("setuptools_scm")
assert sorted(dist.metadata.get_all("Provides-Extra")) == ["test", "toml"]
|
Python
| 0
|
@@ -33,18 +33,8 @@
ns%0A%0A
-import os%0A
impo
@@ -4341,403 +4341,8 @@
%22%0A%0A%0A
-def test_own_setup_fails_on_old_python(monkeypatch: pytest.MonkeyPatch) -%3E None:%0A monkeypatch.setattr(%22sys.version_info%22, (3, 5))%0A monkeypatch.syspath_prepend(os.path.dirname(os.path.dirname(__file__)))%0A%0A import setup%0A%0A with pytest.raises(%0A RuntimeError,%0A match=%22support for python %3C 3.6 has been removed in setuptools_scm%3E=6.0.0%22,%0A ):%0A setup.scm_version()%0A%0A%0A
def
|
d49668dfb76e148fab6e878b2d1944a5e70a3c38
|
fix test_cookie test on windows
|
tests/integration/test_cookie.py
|
tests/integration/test_cookie.py
|
# vim:ts=4:sw=4:et:
# Copyright 2018-present Facebook, Inc.
# Licensed under the Apache License, Version 2.0
# no unicode literals
from __future__ import absolute_import, division, print_function
import os
import socket
import pywatchman
import WatchmanTestCase
@WatchmanTestCase.expand_matrix
class TestCookie(WatchmanTestCase.WatchmanTestCase):
def test_delete_cookie_dir(self):
root = self.mkdtemp()
cookie_dir = os.path.join(root, ".hg")
os.mkdir(cookie_dir)
self.touchRelative(root, "foo")
self.watchmanCommand("watch-project", root)
self.assertFileList(root, files=["foo", ".hg"])
os.rmdir(cookie_dir)
self.assertFileList(root, files=["foo"])
os.unlink(os.path.join(root, "foo"))
self.assertFileList(root, files=[])
os.rmdir(root)
with self.assertRaises(pywatchman.WatchmanError) as ctx:
result = self.assertFileList(root, files=[])
print("Should not have gotten here, but the result was:", result)
reason = str(ctx.exception)
self.assertTrue(
("No such file" in reason)
or ("root dir was removed" in reason)
or ("unable to resolve root" in reason),
msg=reason,
)
def test_other_cookies(self):
root = self.mkdtemp()
cookie_dir = os.path.join(root, ".git")
os.mkdir(cookie_dir)
self.watchmanCommand("watch", root)
host = socket.gethostname()
pid = self.watchmanCommand("get-pid")["pid"]
self.assertFileList(root, files=[".git"])
os.mkdir(os.path.join(root, "foo"))
# Same process, same watch
self.touchRelative(root, ".git/.watchman-cookie-%s-%d-1000000" % (host, pid))
cookies = [
# Same process, different watch root
"foo/.watchman-cookie-%s-%d-100000" % (host, pid),
# Same process, root dir instead of VCS dir
".watchman-cookie-%s-%d-100000" % (host, pid),
# Different process, same watch root
".git/.watchman-cookie-%s-1-100000" % host,
# Different process, root dir instead of VCS dir
".watchman-cookie-%s-1-100000" % host,
# Different process, different watch root
"foo/.watchman-cookie-%s-1-100000" % host,
]
for cookie in cookies:
self.touchRelative(root, cookie)
self.assertFileList(root, files=["foo", ".git"] + cookies)
|
Python
| 0.000001
|
@@ -1172,32 +1172,103 @@
ved%22 in reason)%0A
+ or (%22The system cannot find the file specified%22 in reason)%0A
or (
|
a08c54d524e166d913c7e395e6a36cca76243df4
|
add sqlite no-op tests
|
tests/integration/test_sqlite.py
|
tests/integration/test_sqlite.py
|
import os
import unittest
from threading import Thread
from unittest.mock import patch
from requests_cache.backends.sqlite import DbDict, DbPickleDict
from tests.integration.test_backends import BaseStorageTestCase
class SQLiteTestCase(BaseStorageTestCase):
def tearDown(self):
try:
os.unlink(self.NAMESPACE)
except Exception:
pass
def test_bulk_commit(self):
d = self.storage_class(self.NAMESPACE, self.TABLES[0])
with d.bulk_commit():
pass
d.clear()
n = 1000
with d.bulk_commit():
for i in range(n):
d[i] = i
assert list(d.keys()) == list(range(n))
def test_switch_commit(self):
d = self.storage_class(self.NAMESPACE)
d.clear()
d[1] = 1
d = self.storage_class(self.NAMESPACE)
assert 1 in d
d._can_commit = False
d[2] = 2
d = self.storage_class(self.NAMESPACE)
assert 2 not in d
assert d._can_commit is True
def test_fast_save(self):
d1 = self.storage_class(self.NAMESPACE, fast_save=True)
d2 = self.storage_class(self.NAMESPACE, self.TABLES[1], fast_save=True)
d1.clear()
n = 1000
for i in range(n):
d1[i] = i
d2[i * 2] = i
# HACK if we will not sort, fast save can produce different order of records
assert sorted(d1.keys()) == list(range(n))
assert sorted(d2.values()) == list(range(n))
def test_usage_with_threads(self):
def do_test_for(d, n_threads=5):
d.clear()
def do_inserts(values):
for v in values:
d[v] = v
def values(x, n):
return [i * x for i in range(n)]
threads = [Thread(target=do_inserts, args=(values(i, n_threads),)) for i in range(n_threads)]
for t in threads:
t.start()
for t in threads:
t.join()
for i in range(n_threads):
for x in values(i, n_threads):
assert d[x] == x
do_test_for(self.storage_class(self.NAMESPACE))
do_test_for(self.storage_class(self.NAMESPACE, fast_save=True), 20)
do_test_for(self.storage_class(self.NAMESPACE, fast_save=True))
do_test_for(self.storage_class(self.NAMESPACE, self.TABLES[1], fast_save=True))
class DbDictTestCase(SQLiteTestCase, unittest.TestCase):
def __init__(self, *args, **kwargs):
super().__init__(*args, storage_class=DbDict, **kwargs)
class DbPickleDictTestCase(SQLiteTestCase, unittest.TestCase):
def __init__(self, *args, **kwargs):
super().__init__(*args, storage_class=DbPickleDict, picklable=True, **kwargs)
@patch('requests_cache.backends.sqlite.sqlite3')
def test_connection_kwargs(mock_sqlite):
"""A spot check to make sure optional connection kwargs gets passed to connection"""
DbDict('test', timeout=0.5, invalid_kwarg='???')
mock_sqlite.connect.assert_called_with('test', timeout=0.5)
|
Python
| 0.000002
|
@@ -2422,16 +2422,389 @@
True))%0A%0A
+ def test_noop(self):%0A def do_noop_bulk(d):%0A with d.bulk_commit():%0A pass%0A del d%0A%0A d = self.storage_class(self.NAMESPACE)%0A t = Thread(target=do_noop_bulk, args=(d,))%0A t.start()%0A t.join()%0A%0A # make sure connection is not closed by the thread%0A d%5B0%5D = 0%0A assert str(d) == %22%7B0: 0%7D%22%0A%0A
%0Aclass D
|
21e95ff23a4ceca06d4bfd291f0e2b29b896af2f
|
Add tests for timeout and listen stop
|
tests/test_listener.py
|
tests/test_listener.py
|
#!/usr/bin/env python
import argparse
import os
import pytest
import pg_bawler.core
import pg_bawler.listener
class NotificationListener(
pg_bawler.core.BawlerBase,
pg_bawler.core.ListenerMixin
):
pass
class NotificationSender(
pg_bawler.core.BawlerBase,
pg_bawler.core.SenderMixin
):
pass
# TODO: Maybe as a pytest fixtures?
connection_params = dict(
dbname=os.environ.get('POSTGRES_DB', 'bawler_test'),
user=os.environ.get('POSTGRES_USER', 'postgres'),
host=os.environ.get('POSTGRES_HOST'),
password=os.environ.get('POSTGRES_PASSWORD', ''))
def test_register_handlers():
listener = pg_bawler.core.ListenerMixin()
assert listener.register_handler(None) == 0
assert listener.register_handler(True) == 1
assert listener.unregister_handler(None)
assert not listener.unregister_handler(None)
def test_default_cli_parser():
parser = pg_bawler.listener.get_default_cli_args_parser()
assert isinstance(parser, argparse.ArgumentParser)
def test_resolve_handler():
handler = pg_bawler.listener.resolve_handler(
'pg_bawler.listener:default_handler')
assert handler is pg_bawler.listener.default_handler
@pytest.mark.asyncio
async def test_simple_listen():
connection_params = dict(
dbname=os.environ.get('POSTGRES_DB', 'bawler_test'),
user=os.environ.get('POSTGRES_USER', 'postgres'),
host=os.environ.get('POSTGRES_HOST'),
password=os.environ.get('POSTGRES_PASSWORD', ''))
nl = NotificationListener(connection_params=connection_params)
ns = NotificationSender(connection_params=connection_params)
payload = 'aaa'
channel_name = 'pg_bawler_test'
await nl.register_channel(channel='pg_bawler_test')
await ns.send(channel=channel_name, payload=payload)
notification = await nl.get_notification()
assert notification.channel == channel_name
assert notification.payload == payload
@pytest.mark.asyncio
async def test_get_notification_timeout():
nl = NotificationListener(connection_params=connection_params)
nl.listen_timeout = 0
await nl.register_channel(channel='pg_bawler_test')
notification = await nl.get_notification()
assert notification is None
|
Python
| 0
|
@@ -703,126 +703,219 @@
ler(
-None) == 0%0A assert listener.register_handler(True) == 1%0A assert listener.unregister_handler(None)%0A assert not
+'channel', 'handler') is None%0A assert listener.registered_channels%5B'channel'%5D == %5B'handler'%5D%0A%0A listener.unregister_handler('channel', 'handler')%0A assert listener.registered_channels%5B'channel'%5D == %5B%5D%0A
lis
@@ -939,20 +939,36 @@
handler(
-None
+'channel', 'handler'
)%0A%0A%0Adef
@@ -1353,262 +1353,8 @@
():%0A
- connection_params = dict(%0A dbname=os.environ.get('POSTGRES_DB', 'bawler_test'),%0A user=os.environ.get('POSTGRES_USER', 'postgres'),%0A host=os.environ.get('POSTGRES_HOST'),%0A password=os.environ.get('POSTGRES_PASSWORD', ''))%0A%0A
@@ -2044,32 +2044,32 @@
_notification()%0A
-
assert notif
@@ -2076,16 +2076,1529 @@
ication is None%0A
+%0A%0A@pytest.mark.asyncio%0Aasync def test_stop_on_timeout():%0A nl = NotificationListener(connection_params=connection_params)%0A nl.listen_timeout = 0%0A nl.stop_on_timeout = True%0A await nl.register_channel(channel='pg_bawler_test')%0A notification = await nl.get_notification()%0A assert notification is None%0A assert nl.is_stopped%0A%0A%0A@pytest.mark.asyncio%0Aasync def test_stop_listener():%0A nl = NotificationListener(connection_params=connection_params)%0A await nl.stop()%0A await nl.listen()%0A%0A%0A# @pytest.mark.asyncio%0A# async def test_listener_main():%0A# ns = NotificationSender(connection_params=connection_params)%0A# payload = 'pg_bawler_test'%0A#%0A# async def handler(notification, listener):%0A# assert notification.payload == payload%0A# listener.stop()%0A#%0A# pg_bawler.listener._main(%0A# connection_params=connection_params,%0A# channel='pg_bawler_test',%0A# handler=handler)%0A%0A%0A# @pytest.mark.asyncio%0A# async def test_listener_main(event_loop):%0A# ns = NotificationSender(connection_params=connection_params)%0A# nl = NotificationListener(connection_params=connection_params)%0A# payload = 'pg_bawler_test'%0A#%0A# async def handler(notification, listener):%0A# assert notification.payload == payload%0A# await listener.stop()%0A#%0A# nl.timeout = 5%0A# nl.register_handler('channel', handler)%0A# await nl.register_channel('channel')%0A# event_loop.create_task(ns.send(channel='channel', payload=payload))%0A# await nl.listen()%0A
|
9c92cf39a69bbc6a078a8ffd7fcd8ea8f95b2678
|
fix tests
|
tests/test_payments.py
|
tests/test_payments.py
|
# Test cases can be run with either of the following:
# python -m unittest discover
# nosetests -v --rednose --nologcapture
import unittest
import db
from app import payments
from db import db, models
class TestModels(unittest.TestCase):
def setUp(self):
payments.app.debug = True
payments.app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql://payments:payments@localhost:5432/test'
db.drop_all() # clean up the last tests
db.create_all() # make our sqlalchemy tables
data = {'nickname' : 'my credit', 'user_id' : 1, 'payment_type' : 'credit',
'details' : {'user_name' : 'Jimmy Jones', 'card_number' : '1111222233334444',
'expires' : '01/2019', 'card_type' : 'Mastercard'}}
payment = models.Payment()
payment.deserialize(data)
db.session.add(payment)
db.session.commit()
self.app = payments.app.test_client()
def tearDown(self):
db.session.remove()
db.drop_all()
def test_db_has_one_item(self):
p1 = db.session.query(models.Payment).get(1)
self.assertNotEqual(p1, None)
p2 = db.session.query(models.Payment).get(2)
self.assertEqual(p2, None)
def test_credit_has_no_paypal_fields(self):
payment = db.session.query(models.Payment).get(1)
self.assertEqual(payment.nickname, 'my credit')
detail = payment.details
self.assertEqual(detail.is_linked, None)
self.assertEqual(detail.user_email, None)
|
Python
| 0.000001
|
@@ -139,18 +139,8 @@
st%0A%0A
-import db%0A
from
@@ -175,16 +175,20 @@
import
+app_
db, mode
@@ -395,32 +395,36 @@
2/test'%0A
+app_
db.drop_all()
@@ -458,16 +458,20 @@
+app_
db.creat
@@ -818,32 +818,36 @@
e(data)%0A
+app_
db.session.add(p
@@ -854,32 +854,36 @@
ayment)%0A
+app_
db.session.commi
@@ -965,16 +965,20 @@
+app_
db.sessi
@@ -997,16 +997,20 @@
+app_
db.drop_
@@ -1065,16 +1065,20 @@
p1 =
+app_
db.sessi
@@ -1160,16 +1160,20 @@
p2 =
+app_
db.sessi
|
00f15f47f8eeabf336e0e2a71cda48aaef270f85
|
Comment out apparently-unused code.
|
build/getversion.py
|
build/getversion.py
|
#!/usr/bin/env python
#
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
#
#
# getversion.py - Parse version numbers from C header files.
#
import os
import re
import sys
__all__ = ['Parser', 'Result']
class Result:
pass
class Parser:
def __init__(self):
self.patterns = {}
def search(self, define_name, value_name):
'Add the name of a define to the list of search pattenrs.'
self.patterns[define_name] = value_name
def parse(self, file):
'Parse the file, extracting defines into a Result object.'
stream = open(file, 'rt')
result = Result()
regex = re.compile(r'^\s*#\s*define\s+(\w+)\s+(\d+)')
for line in stream.readlines():
match = regex.match(line)
if match:
try:
name = self.patterns[match.group(1)]
except:
continue
setattr(result, name, int(match.group(2)))
stream.close()
return result
def svn_extractor(parser, include_file):
'''Pull values from svn.version.h'''
p.search('SVN_VER_MAJOR', 'major')
p.search('SVN_VER_MINOR', 'minor')
p.search('SVN_VER_PATCH', 'patch')
try:
r = p.parse(include_file)
except IOError, e:
usage_and_exit(str(e))
sys.stdout.write("%d.%d.%d" % (r.major, r.minor, r.patch))
def sqlite_extractor(parser, include_file):
'''Pull values from sqlite3.h'''
p.search('SQLITE_VERSION_NUMBER', 'version')
try:
r = p.parse(include_file)
except IOError, e:
usage_and_exit(str(e))
major = r.version / 1000000
minor = (r.version - (major * 1000000)) / 1000
micro = (r.version - (major * 1000000) - (minor * 1000))
sys.stdout.write("%d.%d.%d" % (major, minor, micro))
extractors = {
'SVN' : svn_extractor,
'SQLITE' : sqlite_extractor,
}
def usage_and_exit(msg):
if msg:
sys.stderr.write("%s\n\n" % msg)
sys.stderr.write("usage: %s [SVN|SQLITE] [header_file]\n" % \
os.path.basename(sys.argv[0]))
sys.stderr.flush()
sys.exit(1)
if __name__ == '__main__':
if len(sys.argv) == 3:
extractor = extractors[sys.argv[1]]
include_file = sys.argv[2]
else:
usage_and_exit("Incorrect number of arguments")
# Extract and print the version number
p = Parser()
extractor(p, include_file)
|
Python
| 0
|
@@ -2427,16 +2427,18 @@
actor,%0A
+ #
'SQLITE
@@ -2458,16 +2458,27 @@
tractor,
+ # not used
%0A %7D%0A%0Ade
|
5d804e3dacd498ce1a5f99db22b808406e7d480c
|
raise runtimeerror for non-axisymmetric potentials in actionAngleTorus
|
galpy/actionAngle_src/actionAngleTorus.py
|
galpy/actionAngle_src/actionAngleTorus.py
|
###############################################################################
# class: actionAngleTorus
#
# Use McMillan, Binney, and Dehnen's Torus code to calculate (x,v)
# given actions and angles
#
#
###############################################################################
import warnings
import numpy
from galpy.potential import MWPotential
from galpy.util import galpyWarning
import galpy.actionAngle_src.actionAngleTorus_c as actionAngleTorus_c
from galpy.actionAngle_src.actionAngleTorus_c import _ext_loaded as ext_loaded
from galpy.potential_src.Potential import _check_c
_autofit_errvals= {}
_autofit_errvals[-1]= 'something wrong with input, usually bad starting values for the parameters'
_autofit_errvals[-2]= 'Fit failed the goal by a factor <= 2'
_autofit_errvals[-3]= 'Fit failed the goal by more than 2'
_autofit_errvals[-4]= 'Fit aborted: serious problems occured'
class actionAngleTorus(object):
"""Action-angle formalism using the Torus machinery"""
def __init__(self,*args,**kwargs):
"""
NAME:
__init__
PURPOSE:
initialize an actionAngleTorus object
INPUT:
pot= potential or list of potentials (3D)
tol= default tolerance to use when fitting tori (|dJ|/J)
OUTPUT:
instance
HISTORY:
2015-08-07 - Written - Bovy (UofT)
"""
if not 'pot' in kwargs: #pragma: no cover
raise IOError("Must specify pot= for actionAngleTorus")
self._pot= kwargs['pot']
if self._pot == MWPotential:
warnings.warn("Use of MWPotential as a Milky-Way-like potential is deprecated; galpy.potential.MWPotential2014, a potential fit to a large variety of dynamical constraints (see Bovy 2015), is the preferred Milky-Way-like potential in galpy",
galpyWarning)
if ext_loaded:
self._c= _check_c(self._pot)
if not self._c:
raise RuntimeError('The given potential is not fully implemented in C; using the actionAngleTorus code is not supported in pure Python')
else:# pragma: no cover
raise RuntimeError('actionAngleTorus instances cannot be used, because the actionAngleTorus_c extension failed to load')
self._tol= kwargs.get('tol',0.001)
return None
def __call__(self,jr,jphi,jz,angler,anglephi,anglez,**kwargs):
"""
NAME:
__call__
PURPOSE:
evaluate the phase-space coordinates (x,v) for a number of angles on a single torus
INPUT:
jr - radial action (scalar)
jphi - azimuthal action (scalar)
jz - vertical action (scalar)
angler - radial angle (array [N])
anglephi - azimuthal angle (array [N])
anglez - vertical angle (array [N])
tol= (object-wide value) goal for |dJ|/|J| along the torus
OUTPUT:
[R,vR,vT,z,vz,phi]
HISTORY:
2015-08-07 - Written - Bovy (UofT)
"""
out= actionAngleTorus_c.actionAngleTorus_xvFreqs_c(\
self._pot,
jr,jphi,jz,
angler,anglephi,anglez,
tol=kwargs.get('tol',self._tol))
if out[9] != 0:
warnings.warn("actionAngleTorus' AutoFit exited with non-zero return status %i: %s" % (out[9],_autofit_errvals[out[9]]),
galpyWarning)
return numpy.array(out[:6]).T
def xvFreqs(self,jr,jphi,jz,angler,anglephi,anglez,**kwargs):
"""
NAME:
xvFreqs
PURPOSE:
evaluate the phase-space coordinates (x,v) for a number of angles on a single torus as well as the frequencies
INPUT:
jr - radial action (scalar)
jphi - azimuthal action (scalar)
jz - vertical action (scalar)
angler - radial angle (array [N])
anglephi - azimuthal angle (array [N])
anglez - vertical angle (array [N])
tol= (object-wide value) goal for |dJ|/|J| along the torus
OUTPUT:
([R,vR,vT,z,vz,phi],OmegaR,Omegaphi,Omegaz,AutoFit error message)
HISTORY:
2015-08-07 - Written - Bovy (UofT)
"""
out= actionAngleTorus_c.actionAngleTorus_xvFreqs_c(\
self._pot,
jr,jphi,jz,
angler,anglephi,anglez,
tol=kwargs.get('tol',self._tol))
if out[9] != 0:
warnings.warn("actionAngleTorus' AutoFit exited with non-zero return status %i: %s" % (out[9],_autofit_errvals[out[9]]),
galpyWarning)
return (numpy.array(out[:6]).T,out[6],out[7],out[8],out[9])
def Freqs(self,jr,jphi,jz,**kwargs):
"""
NAME:
Freqs
PURPOSE:
return the frequencies corresponding to a torus
INPUT:
jr - radial action (scalar)
jphi - azimuthal action (scalar)
jz - vertical action (scalar)
tol= (object-wide value) goal for |dJ|/|J| along the torus
OUTPUT:
(OmegaR,Omegaphi,Omegaz)
HISTORY:
2015-08-07 - Written - Bovy (UofT)
"""
out= actionAngleTorus_c.actionAngleTorus_Freqs_c(\
self._pot,
jr,jphi,jz,
tol=kwargs.get('tol',self._tol))
if out[3] != 0:
warnings.warn("actionAngleTorus' AutoFit exited with non-zero return status %i: %s" % (out[3],_autofit_errvals[out[3]]),
galpyWarning)
return out
def hessianFreqs(self,jr,jphi,jz,**kwargs):
"""
NAME:
hessianFreqs
PURPOSE:
return the Hessian d Omega / d J and frequencies Omega corresponding to a torus
INPUT:
jr - radial action (scalar)
jphi - azimuthal action (scalar)
jz - vertical action (scalar)
tol= (object-wide value) goal for |dJ|/|J| along the torus
nosym= (False) if True, don't explicitly symmetrize the Hessian (good to check errors)
OUTPUT:
(dO/dJ,Omegar,Omegaphi,Omegaz,Autofit error message)
HISTORY:
2016-07-15 - Written - Bovy (UofT)
"""
out= actionAngleTorus_c.actionAngleTorus_hessian_c(\
self._pot,
jr,jphi,jz,
tol=kwargs.get('tol',self._tol))
if out[4] != 0:
warnings.warn("actionAngleTorus' AutoFit exited with non-zero return status %i: %s" % (out[4],_autofit_errvals[out[4]]),
galpyWarning)
if kwargs.get('nosym',False):
return out
else :# explicitly symmetrize
return (0.5*(out[0]+out[0].T),out[1],out[2],out[3],out[4])
|
Python
| 0
|
@@ -376,16 +376,27 @@
otential
+, _isNonAxi
%0Afrom ga
@@ -1577,16 +1577,149 @@
%5B'pot'%5D%0A
+ if _isNonAxi(self._pot):%0A raise RuntimeError(%22actionAngleTorus for non-axisymmetric potentials is not supported%22)%0A
|
182714b6b801107d1e1baec0dd49c218c52b1416
|
handle ctrl+c during parsing, etc
|
core/dbt/task/compile.py
|
core/dbt/task/compile.py
|
import os
import signal
import threading
from dbt.adapters.factory import get_adapter
from dbt.clients.jinja import extract_toplevel_blocks
from dbt.compilation import compile_manifest
from dbt.loader import load_all_projects
from dbt.node_runners import CompileRunner, RPCCompileRunner
from dbt.node_types import NodeType
from dbt.parser.analysis import RPCCallParser
from dbt.parser.macros import MacroParser
from dbt.parser.util import ParserUtils
import dbt.ui.printer
from dbt.logger import RPC_LOGGER as rpc_logger
from dbt.task.runnable import GraphRunnableTask, RemoteCallable
class CompileTask(GraphRunnableTask):
def raise_on_first_error(self):
return True
def build_query(self):
return {
"include": self.args.models,
"exclude": self.args.exclude,
"resource_types": NodeType.executable(),
"tags": [],
}
def get_runner_type(self):
return CompileRunner
def task_end_messages(self, results):
dbt.ui.printer.print_timestamped_line('Done.')
class RemoteCompileTask(CompileTask, RemoteCallable):
METHOD_NAME = 'compile'
def __init__(self, args, config, manifest):
super(RemoteCompileTask, self).__init__(args, config)
self._base_manifest = manifest.deepcopy(config=config)
def get_runner_type(self):
return RPCCompileRunner
def runtime_cleanup(self, selected_uids):
"""Do some pre-run cleanup that is usually performed in Task __init__.
"""
self.run_count = 0
self.num_nodes = len(selected_uids)
self.node_results = []
self._skipped_children = {}
self._skipped_children = {}
self._raise_next_tick = None
def _extract_request_data(self, data):
data = self.decode_sql(data)
macro_blocks = []
data_chunks = []
for block in extract_toplevel_blocks(data):
if block.block_type_name == 'macro':
macro_blocks.append(block.full_block)
else:
data_chunks.append(block.full_block)
macros = '\n'.join(macro_blocks)
sql = ''.join(data_chunks)
return sql, macros
def _get_exec_node(self, name, sql, macros):
request_path = os.path.join(self.config.target_path, 'rpc', name)
all_projects = load_all_projects(self.config)
macro_overrides = {}
sql, macros = self._extract_request_data(sql)
if macros:
macro_parser = MacroParser(self.config, all_projects)
macro_overrides.update(macro_parser.parse_macro_file(
macro_file_path='from remote system',
macro_file_contents=macros,
root_path=request_path,
package_name=self.config.project_name,
resource_type=NodeType.Macro
))
self._base_manifest.macros.update(macro_overrides)
rpc_parser = RPCCallParser(
self.config,
all_projects=all_projects,
macro_manifest=self._base_manifest
)
node_dict = {
'name': name,
'root_path': request_path,
'resource_type': NodeType.RPCCall,
'path': name + '.sql',
'original_file_path': 'from remote system',
'package_name': self.config.project_name,
'raw_sql': sql,
}
unique_id, node = rpc_parser.parse_sql_node(node_dict)
self.manifest = ParserUtils.add_new_refs(
manifest=self._base_manifest,
current_project=self.config,
node=node,
macros=macro_overrides
)
# don't write our new, weird manifest!
self.linker = compile_manifest(self.config, self.manifest, write=False)
return node
def _raise_set_error(self):
if self._raise_next_tick is not None:
raise self._raise_next_tick
def _in_thread(self, node, thread_done):
runner = self.get_runner(node)
try:
self.node_results.append(runner.safe_run(self.manifest))
except Exception as exc:
self._raise_next_tick = exc
finally:
thread_done.set()
def handle_request(self, name, sql, macros=None):
node = self._get_exec_node(name, sql, macros)
selected_uids = [node.unique_id]
self.runtime_cleanup(selected_uids)
thread_done = threading.Event()
thread = threading.Thread(target=self._in_thread,
args=(node, thread_done))
thread.start()
try:
thread_done.wait()
except KeyboardInterrupt:
adapter = get_adapter(self.config)
if adapter.is_cancelable():
for conn_name in adapter.cancel_open_connections():
rpc_logger.debug('canceled query {}'.format(conn_name))
thread.join()
else:
msg = ("The {} adapter does not support query "
"cancellation. Some queries may still be "
"running!".format(adapter.type()))
rpc_logger.debug(msg)
raise dbt.exceptions.RPCKilledException(signal.SIGINT)
self._raise_set_error()
return self.node_results[0].serialize()
|
Python
| 0.000001
|
@@ -4254,24 +4254,134 @@
cros=None):%0A
+ # we could get a ctrl+c at any time, including during parsing.%0A thread = None%0A try:%0A
node
@@ -4431,16 +4431,20 @@
+
+
selected
@@ -4468,16 +4468,20 @@
que_id%5D%0A
+
@@ -4517,32 +4517,36 @@
_uids)%0A%0A
+
+
thread_done = th
@@ -4557,24 +4557,28 @@
ing.Event()%0A
+
thre
@@ -4657,16 +4657,20 @@
+
+
args=(no
@@ -4691,24 +4691,28 @@
e))%0A
+
thread.start
@@ -4714,29 +4714,16 @@
start()%0A
- try:%0A
@@ -5011,17 +5011,47 @@
_name))%0A
-%0A
+ if thread:%0A
|
5856750be58ad6a90ba884948ea5f2a9921cdc65
|
Test with include_player_ids
|
tests/test_push_msg.py
|
tests/test_push_msg.py
|
# -*- coding: utf8 -*-
# This file is part of PYBOSSA.
#
# Copyright (C) 2017 SciFabric LTD.
#
# PYBOSSA is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PYBOSSA is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PYBOSSA. If not, see <http://www.gnu.org/licenses/>.
import json
from collections import namedtuple
from pbsonesignal import PybossaOneSignal
from pbsonesignal.exceptions import *
from nose.tools import raises
from mock import patch, MagicMock
FakeRequest = namedtuple('FakeRequest', ['text', 'status_code', 'headers'])
class TestPybossaOnesignal(object):
valid_notification = {u'id': u'da360e9a-09d9-4992-a803-5bca978c5e0d',
u'recipients': 3}
error_notification = {u'errors': ['an example error']}
payload = None
def setUp(self):
self.payload = {
"included_segments": ["All"],
"excluded_sements": [],
"filters": [],
"contents": {"en": "English Message"},
"headings": {"en": "Heading"},
"url": "https://yoursite.com/",
"web_buttons": [{"id": "read-more-button",
"text": "Read more",
"icon": "http://i.imgur.com/MIxJp1L.png",
"url": "https://yoursite.com"}],
"chrome_web_image": "https://yourimage.com",
"chrome_web_icon": "https://image"}
@patch('pbsonesignal.requests.post')
def test_push_msg(self, mock):
"""Test push_msg works."""
client = PybossaOneSignal(app_id="1", api_key="key")
fakeRequest = MagicMock()
fakeRequest.status_code = 200
fakeRequest.reason = 'OK'
fakeRequest.json.return_value = self.valid_notification
mock.return_value = fakeRequest
tmp = client.push_msg()
assert tmp[0] == 200
assert tmp[1] == 'OK'
assert tmp[2] == self.valid_notification
@patch('pbsonesignal.requests.post')
def test_push_msg_app_ids(self, mock):
"""Test push_msg with array app_ids works."""
client = PybossaOneSignal(app_ids=["1", "2"], api_key="key")
fakeRequest = MagicMock()
fakeRequest.status_code = 200
fakeRequest.reason = 'OK'
fakeRequest.json.return_value = self.valid_notification
mock.return_value = fakeRequest
tmp = client.push_msg()
assert tmp[0] == 200
assert tmp[1] == 'OK'
assert tmp[2] == self.valid_notification
self.payload['app_ids'] = ["1", "2"]
mock.assert_called_with(client.api_url,
headers=client.header,
json=self.payload)
@patch('pbsonesignal.requests.post')
def test_push_msg_app_id(self, mock):
"""Test push_msg with array app_id works."""
client = PybossaOneSignal(app_id="1", api_key="key")
fakeRequest = MagicMock()
fakeRequest.status_code = 200
fakeRequest.reason = 'OK'
fakeRequest.json.return_value = self.valid_notification
mock.return_value = fakeRequest
tmp = client.push_msg()
assert tmp[0] == 200
assert tmp[1] == 'OK'
assert tmp[2] == self.valid_notification
self.payload['app_id'] = "1"
mock.assert_called_with(client.api_url,
headers=client.header,
json=self.payload)
@patch('pbsonesignal.requests.post')
@raises(CreateNotification)
def test_push_msg_fail(self, mock):
"""Test push_msg works."""
client = PybossaOneSignal(app_id="1", api_key="key")
fakeRequest = MagicMock()
fakeRequest.status_code = 400
fakeRequest.reason = 'OK'
fakeRequest.json.return_value = self.error_notification
mock.return_value = fakeRequest
tmp = client.push_msg()
assert tmp[0] == 400
assert tmp[1] == 'BadRequest'
assert tmp[2] == self.error_notification
|
Python
| 0
|
@@ -4034,16 +4034,856 @@
yload)%0A%0A
+ @patch('pbsonesignal.requests.post')%0A def test_push_msg_include_player_ids(self, mock):%0A %22%22%22Test push_msg with array include_player_ids works.%22%22%22%0A client = PybossaOneSignal(app_id=%221%22, api_key=%22key%22)%0A fakeRequest = MagicMock()%0A fakeRequest.status_code = 200%0A fakeRequest.reason = 'OK'%0A fakeRequest.json.return_value = self.valid_notification%0A mock.return_value = fakeRequest%0A tmp = client.push_msg(include_player_ids=%5B%221%22%5D)%0A assert tmp%5B0%5D == 200%0A assert tmp%5B1%5D == 'OK'%0A assert tmp%5B2%5D == self.valid_notification%0A%0A self.payload%5B'app_id'%5D = %221%22%0A self.payload%5B'include_player_ids'%5D = %5B%221%22%5D%0A%0A mock.assert_called_with(client.api_url, %0A headers=client.header,%0A json=self.payload)%0A
%0A%0A @p
|
bc11e1266a5b4cd908af484f8bddf0978d00bfd4
|
Improve "nfsnapi.auth_header()".
|
nfsnapi.py
|
nfsnapi.py
|
"""Stuff to make working with the NearlyFreeSpeech.NET API easier.
>>> import nfsnapi
>>> # Replace USERNAME, API_KEY, and so on with actual values.
>>> nfsnapi.run_request("USERNAME", "API_KEY",
... "/account/ACCOUNT_NUMBER/balance")
'10.56'
>>> nfsnapi.run_request("USERNAME", "API_KEY",
... "/dns/DOMAIN/listRRs", "type=A")
(A bunch of JSON not shown.)
>>> # And so on...
This file was written by Damien Dart, <damiendart@pobox.com>. This is
free and unencumbered software released into the public domain. For more
information, please refer to the accompanying "UNLICENCE" file.
"""
__author__ = "Damien Dart, <damiendart@pobox.com>"
__license__ = "Unlicense"
__title__ = "nfsnapi"
__version__ = "0.1.0"
import hashlib
import httplib
import json
import random
import string
import time
import urllib2
def auth_header(username, API_key, request_path, request_body = ""):
"""Return a NearlyFreeSpeeech.NET authentication HTTP header field.
Returns a dictionary containing an authentication HTTP header field
required for NearlyFreeSpeech.NET API requests. For more information,
see <https://members.nearlyfreespeech.net/wiki/API/Introduction>.
- "username" should be a string containing the member login name of
the user making the request.
- "API_key" should be a string containing the API key associated with
the member login name; an API key can be obtained by submitting a
secure support request to NearlyFreeSpeeech.NET.
- "request_path" should be a string containing the path portion of the
requested URL. For example, if the requested URL is
<https://api.nearlyfreespeech.net/site/example/addAlias>,
"request_path" would be "/site/example/addAlias".
- "request_body" may be a string containing the HTTP request message
body for HTTP POST requests or an empty string if no such data is
required. The data should be in the standard
"application/x-www-form-urlencoded" format.
"""
salt = "".join(random.choice(string.ascii_letters) for i in range(16))
timestamp = str(int(time.time()))
return { "X-NFSN-Authentication" : ";".join([username, timestamp, salt,
hashlib.sha1(";".join([username, timestamp, salt, API_key,
request_path, hashlib.sha1(request_body).hexdigest()])).hexdigest()]) }
def run_request(username, API_key, request_path, request_body = None):
"""Run a NearlyFreeSpeech.NET API request, return a string response.
NOTE: As this method uses the "urllib2.urlopen" function to run
requests, the API server's certificate is not verified.
The NearlyFreeSpeech.net API documentation is unclear on whether every
successful API call returns a valid JSON-encoded associative array,
hence why any response is returned as a string. This method raises
"NFSNAPIRequestError" on errors.
- "username" should be a string containing the member login name of
the user making the request.
- "API_key" should be a string containing the API key associated with
the member login name; an API key can be obtained by submitting a
secure support request to NearlyFreeSpeeech.NET.
- "request_path" should be a string containing the path portion of the
requested URL. For example, if the requested URL is
<https://api.nearlyfreespeech.net/site/example/addAlias>,
"request_path" would be "/site/example/addAlias". The trailing
forward-slash is optional.
- "request_body" may be a string containing the HTTP request message
body for HTTP POST requests or "None" for HTTP GET requests.
Pass an empty string for HTTP POST requests that do not require a
message body. The data should be in the standard
"application/x-www-form-urlencoded" format.
"""
try:
if (request_path[0] != "/"):
request_path = "/%s" % request_path
return urllib2.urlopen(urllib2.Request(
"https://api.nearlyfreespeech.net%s" % request_path, request_body,
dict(auth_header(username, API_key, request_path, request_body or ""),
**{"User-Agent": "nfsnapi/" + __version__ +
" +https://github.com/damiendart/nfsnapi-python"}))).read()
except httplib.HTTPException as e:
raise NFSNAPIRequestError(str(e))
except urllib2.HTTPError as e:
try:
error = json.loads(e.read())
raise NFSNAPIRequestError("\n".join([error["error"], error["debug"]]))
except (KeyError, ValueError):
raise NFSNAPIRequestError(str(e.reason))
except urllib2.URLError as e:
raise NFSNAPIRequestError(str(e.reason))
class NFSNAPIRequestError(Exception):
"""Raised when an NearlyFreeSpeech.NET API request fails.
Every instance will have a "reason" attribute, a string with the
reason for the error. If the offending request resulted in a 4XX or
5XX HTTP response, the attribute will contain the "human-readable" and
debug error messages returned by the NearlyFreeSpeech.NET API,
separated by a new-line (for more information, see
<https://members.nearlyfreespeech.net/wiki/API/Introduction>).
"""
def __init__(self, reason):
Exception.__init__(self, reason)
self.reason = reason
|
Python
| 0
|
@@ -1604,70 +1604,8 @@
is%0A
- %3Chttps://api.nearlyfreespeech.net/site/example/addAlias%3E,%0A
@@ -1653,16 +1653,60 @@
dAlias%22.
+ The trailing%0A forward-slash is optional.
%0A - %22re
@@ -1943,16 +1943,87 @@
%0A %22%22%22%0A%0A
+ if (request_path%5B0%5D != %22/%22):%0A request_path = %22/%25s%22 %25 request_path%0A
salt =
|
ee0f31857028a68116f2912054877f37bd64683a
|
fix vdsClient connections
|
ovirt_hosted_engine_ha/broker/submonitor_util.py
|
ovirt_hosted_engine_ha/broker/submonitor_util.py
|
#
# ovirt-hosted-engine-ha -- ovirt hosted engine high availability
# Copyright (C) 2013 Red Hat, Inc.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
import logging
import socket
import time
from otopi import util
from vdsm import vdscli
from . import constants
def run_vds_client_cmd(address, use_ssl, command):
"""
Run the passed in command name from the vdsClient library and either
throw an exception with the error message or return the results.
"""
# FIXME pass context to allow for shared or persistent vdsm connection
log = logging.getLogger('SubmonitorUtil')
log.debug("Connecting to vdsClient at %s with ssl=%r", address, use_ssl)
vdsClient = util.loadModule(
path=constants.VDS_CLIENT_DIR,
name='vdsClient'
)
if vdsClient._glusterEnabled:
serv = vdsClient.ge.GlusterService()
else:
serv = vdsClient.service()
serv.use_ssl = use_ssl
if hasattr(vdscli, 'cannonizeAddrPort'):
server, server_port = vdscli.cannonizeAddrPort(
address
).split(':', 1)
serv.do_connect(server, server_port)
else:
host_port = vdscli.cannonizeHostPort(address)
serv.do_connect(host_port)
serv.do_connect(server, server_port)
log.debug("Connected")
method = getattr(serv.s, command)
retry = 0
while retry < constants.VDS_CLIENT_MAX_RETRY:
try:
response = method()
break
except socket.error:
log.debug("Error", exc_info=True)
retry += 1
time.sleep(1)
if retry >= constants.VDS_CLIENT_MAX_RETRY:
raise Exception("VDSM initialization timeout")
if response['status']['code'] != 0:
raise Exception("Error {0} from {1}: {2}",
response['status']['code'], command,
response['status']['message'])
return response
|
Python
| 0
|
@@ -1899,49 +1899,8 @@
t)%0A%0A
- serv.do_connect(server, server_port)%0A
|
727b42a1cdec461d715b845872c321326ce18554
|
Load aliases on module load
|
Modules/Alias.py
|
Modules/Alias.py
|
from ModuleInterface import ModuleInterface
from IRCResponse import IRCResponse, ResponseType
import GlobalVars
class Alias(ModuleInterface):
triggers = ["alias"]
help = 'alias <alias> <command> <params> - aliases <alias> to the specified command and parameters\n' \
'you can specify where parameters given to the alias should be inserted with $1, $2, $n. ' \
'you can use $1+, $2+ for all parameters after the first, second one' \
'The whole parameter string is $0. $sender and $channel can also be used.'
def onTrigger(self, message):
if message.User.Name not in GlobalVars.admins:
return IRCResponse(ResponseType.Say, "Only my admins may create new aliases!", message.ReplyTo)
if len(message.ParameterList) <= 1:
return IRCResponse(ResponseType.Say, "Alias what?", message.ReplyTo)
triggerFound = False
for (name, module) in self.bot.moduleHandler.modules.items():
if message.ParameterList[0] in module.triggers:
return IRCResponse(ResponseType.Say, "'{}' is already a command!".format(message.ParameterList[0]), message.ReplyTo)
if message.ParameterList[1] in module.triggers:
triggerFound = True
if not triggerFound:
return IRCResponse(ResponseType.Say, "'{}' is not a valid command!".format(message.ParameterList[1]), message.ReplyTo)
if message.ParameterList[0] in self.bot.moduleHandler.commandAliases.keys():
return IRCResponse(ResponseType.Say, "'{}' is already an alias!".format(message.ParameterList[0]), message.ReplyTo)
newAlias = []
for word in message.ParameterList[1:]:
newAlias.append(word.lower())
self.bot.moduleHandler.commandAliases[message.ParameterList[0]] = newAlias
self.bot.moduleHandler.newAlias(message.ParameterList[0], newAlias)
return IRCResponse(ResponseType.Say, "Created a new alias '{}' for '{}'.".format(message.ParameterList[0], " ".join(message.ParameterList[1:])), message.ReplyTo)
|
Python
| 0
|
@@ -544,16 +544,132 @@
used.'%0A%0A
+ def onLoad(self):%0A self.bot.moduleHandler.commandAliases = self.bot.moduleHandler.loadAliases()%0A %0A
def
|
52cf1efd8b1f721d65732d16b171040d83d02b21
|
fix test_workflow
|
tests/test_workflow.py
|
tests/test_workflow.py
|
from unittest import TestCase
from dvc.graph.workflow import Workflow
from dvc.graph.commit import Commit
class TestWorkflow(TestCase):
def setUp(self):
self._commit4 = Commit('4', '3', 'name1', 'today', 'comment4')
self._commit3 = Commit('3', '2', 'name1', 'today', 'DVC repro-run ...')
self._commit2 = Commit('2', '1', 'name1', 'today', 'DVC repro-run ...')
self._commit1 = Commit('1', '', 'name1', 'today', 'comment1')
def commits_basic_test(self):
self.assertFalse(self._commit1.is_repro)
self.assertTrue(self._commit2.is_repro)
self.assertTrue(self._commit3.is_repro)
self.assertFalse(self._commit4.is_repro)
pass
def workflow_basic_test(self):
wf = Workflow('', '')
wf.add_commit(self._commit1)
wf.add_commit(self._commit2)
wf.add_commit(self._commit3)
wf.add_commit(self._commit4)
self.assertEqual(len(wf._commits), 4)
self.assertEqual(wf._commits['1'].text, self._commit1._comment + '\n' + self._commit1.hash)
self.assertEqual(wf._commits['2'].text, self._commit2._comment + '\n' + self._commit2.hash)
self.assertEqual(wf._commits['3'].text, self._commit3._comment + '\n' + self._commit3.hash)
self.assertEqual(wf._commits['4'].text, self._commit4._comment + '\n' + self._commit4.hash)
pass
def collapse_test(self):
wf = Workflow('', '')
wf.add_commit(self._commit1)
wf.add_commit(self._commit2)
wf.add_commit(self._commit3)
wf.add_commit(self._commit4)
wf.collapse_repro_commits()
self.assertEqual(len(wf._commits), 3)
self.assertEqual(wf._commits[self._commit1.hash].text, self._commit1._comment + '\n' + self._commit1.hash)
self.assertEqual(wf._commits[self._commit3.hash].text, Commit.COLLAPSED_TEXT)
self.assertTrue('2' not in wf._commits)
self.assertFalse('2' in wf._edges)
self.assertFalse('2' in wf._back_edges)
pass
def collapse_at_dead_end_test(self):
wf = Workflow('', '')
wf.add_commit(self._commit1)
wf.add_commit(self._commit2)
wf.add_commit(self._commit3) # Dead end which cannot be collapsed
self.assertEqual(len(wf._commits), 3)
wf.collapse_repro_commits()
self.assertEqual(len(wf._commits), 2)
self.assertEqual(wf._commits[self._commit1.hash].text, self._commit1._comment + '\n' + self._commit1.hash)
self.assertEqual(wf._commits[self._commit3.hash].text, Commit.COLLAPSED_TEXT)
self.assertTrue('2' not in wf._commits)
pass
def collapse_metric_commit_test(self):
value = 0.812345
branches = ['master', 'try_smth']
metric_commit3 = Commit('2', '1', 'name1', 'today', 'DVC repro-run ...',
True, value, branch_tips=branches)
wf = Workflow('', '')
wf.add_commit(self._commit1)
wf.add_commit(metric_commit3)
wf.add_commit(self._commit3)
self.assertEqual(len(wf._commits), 3)
wf.collapse_repro_commits()
self.assertEqual(len(wf._commits), 2)
self.assertEqual(wf._commits['3']._target_metric, value)
self.assertEqual(wf._commits['3'].branch_tips, branches)
pass
|
Python
| 0
|
@@ -997,32 +997,57 @@
mmits%5B'1'%5D.text,
+%0A
self._commit1._
@@ -1069,36 +1069,44 @@
+ self._commit1.
+_text_
hash
+()
)%0A self.a
@@ -1130,32 +1130,57 @@
mmits%5B'2'%5D.text,
+%0A
self._commit2._
@@ -1210,20 +1210,28 @@
commit2.
+_text_
hash
+()
)%0A
@@ -1263,32 +1263,57 @@
mmits%5B'3'%5D.text,
+%0A
self._commit3._
@@ -1343,20 +1343,28 @@
commit3.
+_text_
hash
+()
)%0A
@@ -1400,24 +1400,49 @@
s%5B'4'%5D.text,
+%0A
self._commi
@@ -1476,20 +1476,28 @@
commit4.
+_text_
hash
+()
)%0A
@@ -1849,32 +1849,57 @@
mit1.hash%5D.text,
+%0A
self._commit1._
@@ -1921,36 +1921,44 @@
+ self._commit1.
+_text_
hash
+()
)%0A self.a
@@ -2594,16 +2594,41 @@
h%5D.text,
+%0A
self._c
@@ -2666,20 +2666,28 @@
commit1.
+_text_
hash
+()
)%0A
@@ -3485,24 +3485,24 @@
, branches)%0A
-
pass
@@ -3485,24 +3485,25 @@
, branches)%0A pass
+%0A
|
ede603fd2b63f101174d4312ed77f710aaaeec3a
|
comment out test for `test_data_split_nlu`
|
tests/cli/test_rasa_data.py
|
tests/cli/test_rasa_data.py
|
import argparse
import os
from unittest.mock import Mock
import pytest
from collections import namedtuple
from typing import Callable, Text
from _pytest.monkeypatch import MonkeyPatch
from _pytest.pytester import RunResult
from rasa.cli import data
from rasa.importers.importer import TrainingDataImporter
from rasa.validator import Validator
def test_data_split_nlu(run_in_simple_project: Callable[..., RunResult]):
run_in_simple_project(
"data", "split", "nlu", "-u", "data/nlu.yml", "--training-fraction", "0.75"
)
assert os.path.exists("train_test_split")
assert os.path.exists(os.path.join("train_test_split", "test_data.md"))
assert os.path.exists(os.path.join("train_test_split", "training_data.md"))
def test_data_convert_nlu(run_in_simple_project: Callable[..., RunResult]):
run_in_simple_project(
"data",
"convert",
"nlu",
"--data",
"data/nlu.yml",
"--out",
"out_nlu_data.json",
"-f",
"json",
)
assert os.path.exists("out_nlu_data.json")
def test_data_split_help(run: Callable[..., RunResult]):
output = run("data", "split", "nlu", "--help")
help_text = """usage: rasa data split nlu [-h] [-v] [-vv] [--quiet] [-u NLU]
[--training-fraction TRAINING_FRACTION]
[--random-seed RANDOM_SEED] [--out OUT]"""
lines = help_text.split("\n")
for i, line in enumerate(lines):
assert output.outlines[i] == line
def test_data_convert_help(run: Callable[..., RunResult]):
output = run("data", "convert", "nlu", "--help")
help_text = """usage: rasa data convert nlu [-h] [-v] [-vv] [--quiet] --data DATA --out OUT
[-l LANGUAGE] -f {json,md}"""
lines = help_text.split("\n")
for i, line in enumerate(lines):
assert output.outlines[i] == line
def test_data_validate_help(run: Callable[..., RunResult]):
output = run("data", "validate", "--help")
help_text = """usage: rasa data validate [-h] [-v] [-vv] [--quiet]
[--max-history MAX_HISTORY] [--fail-on-warnings]"""
lines = help_text.split("\n")
for i, line in enumerate(lines):
assert output.outlines[i] == line
def _text_is_part_of_output_error(text: Text, output: RunResult) -> bool:
found_info_string = False
for line in output.errlines:
if text in line:
found_info_string = True
return found_info_string
def test_data_validate_stories_with_max_history_zero(monkeypatch: MonkeyPatch):
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers(help="Rasa commands")
data.add_subparser(subparsers, parents=[])
args = parser.parse_args(["data", "validate", "stories", "--max-history", 0])
async def mock_from_importer(importer: TrainingDataImporter) -> Validator:
return Mock()
monkeypatch.setattr("rasa.validator.Validator.from_importer", mock_from_importer)
with pytest.raises(argparse.ArgumentTypeError):
data.validate_files(args)
def test_validate_files_exit_early():
with pytest.raises(SystemExit) as pytest_e:
args = {
"domain": "data/test_domains/duplicate_intents.yml",
"data": None,
"max_history": None,
}
data.validate_files(namedtuple("Args", args.keys())(*args.values()))
assert pytest_e.type == SystemExit
assert pytest_e.value.code == 1
|
Python
| 0
|
@@ -576,24 +576,140 @@
_split%22)%0A
+ # TODO: Comment back in as soon as NLU YAML writer is merged%0A # https://github.com/RasaHQ/rasa/issues/6363%0A #
assert os.p
@@ -768,24 +768,26 @@
ta.md%22))%0A
+ #
assert os.p
|
a69a346e2fd35e531c72b06a2c895d928340c110
|
Fix `includes_today` trait fo `MembershipFactory`
|
tests/factories/property.py
|
tests/factories/property.py
|
from datetime import datetime, timedelta, timezone
from functools import partial
from itertools import chain
import factory
from pycroft.model.user import Membership, PropertyGroup
from pycroft.helpers import interval
from .base import BaseFactory
from .user import UserFactory
class MembershipFactory(BaseFactory):
class Meta:
model = Membership
exclude = ('begins_at', 'ends_at')
begins_at = datetime.now(timezone.utc)
ends_at = None
active_during = interval.closedopen(begins_at, ends_at)
user = factory.SubFactory(UserFactory)
# note: group is non-nullable!
group = None
class Params:
includes_today = factory.Trait(
begins_at=datetime.now(timezone.utc) - timedelta(1),
ends_at=datetime.now(timezone.utc) + timedelta(1),
)
def _maybe_append_seq(n, prefix):
"""Append a sequence value to a prefix if non-zero"""
if not n:
return prefix
return "{} {}".format(prefix, n)
class PropertyGroupFactory(BaseFactory):
class Meta:
model = PropertyGroup
exclude = ('granted', 'denied')
granted = frozenset()
denied = frozenset()
name = factory.Sequence(lambda n: "Property group %s" % n)
permission_level = factory.LazyAttribute(lambda _: 0)
@factory.lazy_attribute
def property_grants(self):
return dict(chain(((k, True) for k in self.granted),
((k, False) for k in self.denied)))
class AdminPropertyGroupFactory(PropertyGroupFactory):
name = factory.Sequence(partial(_maybe_append_seq, prefix="Admin-Gruppe"))
granted = frozenset((
'user_show', 'user_change', 'user_mac_change',
'infrastructure_show', 'infrastructure_change',
'facilities_show', 'facilities_change',
'groups_show', 'groups_change_membership', 'groups_change',
))
permission_level = 10
class FinancePropertyGroupFactory(PropertyGroupFactory):
name = factory.Sequence(partial(_maybe_append_seq, prefix="Finanzer-Gruppe"))
granted = frozenset(('finance_show', 'finance_change'))
permission_level = 80
class MemberPropertyGroupFactory(PropertyGroupFactory):
name = factory.Sequence(partial(_maybe_append_seq, prefix="Mitglied-Gruppe"))
granted = frozenset((
'ldap', 'ldap_login_enabled', 'mail', 'member', 'membership_fee',
'network_access', 'userdb', 'userwww'
))
|
Python
| 0
|
@@ -693,18 +693,59 @@
-begins_at=
+active_during=interval.closedopen(%0A
date
@@ -799,16 +799,12 @@
-ends_at=
+
date
@@ -834,32 +834,47 @@
+ timedelta(1),%0A
+ ),%0A
)%0A%0A%0Adef
|
ee098fbb610cbd95f55b33ebb69961cb16f1099d
|
Add more tests for block_checksum and content_checksum
|
tests/frame/test_frame_4.py
|
tests/frame/test_frame_4.py
|
from . helpers import (
roundtrip_LZ4FrameCompressor,
roundtrip_LZ4FrameCompressor_LZ4FrameDecompressor,
)
import os
import pytest
test_data=[
(os.urandom(128 * 1024)),
(os.urandom(256 * 1024)),
(os.urandom(512 * 1024)),
(os.urandom(1024 * 1024)),
]
@pytest.fixture(
params=test_data,
ids=[
'data' + str(i) for i in range(len(test_data))
]
)
def data(request):
return request.param
@pytest.fixture(
params=[
(True),
(False)
]
)
def reset(request):
return request.param
@pytest.fixture(
params=[
(1),
(8)
]
)
def chunks(request):
return request.param
def test_roundtrip_LZ4FrameCompressor(data, chunks, block_size, reset):
roundtrip_LZ4FrameCompressor(
data,
chunks=chunks,
block_size=block_size,
reset=reset
)
def test_roundtrip_LZ4FrameCompressor_LZ4FrameDecompressor(
data, chunks, block_size, reset):
roundtrip_LZ4FrameCompressor_LZ4FrameDecompressor(
data,
chunks=chunks,
block_size=block_size,
reset=reset
)
# class TestLZ4FrameModern(unittest.TestCase):
# def test_decompress_truncated(self):
# input_data = b"2099023098234882923049823094823094898239230982349081231290381209380981203981209381238901283098908123109238098123"
# for chksum in (lz4frame.CONTENTCHECKSUM_DISABLED, lz4frame.CONTENTCHECKSUM_ENABLED):
# for conlen in (0, len(input_data)):
# context = lz4frame.create_compression_context()
# compressed = lz4frame.compress_begin(context, content_checksum=chksum, source_size=conlen)
# compressed += lz4frame.compress_update(context, input_data)
# compressed += lz4frame.compress_end(context)
# for i in range(len(compressed)):
# with self.assertRaisesRegexp(RuntimeError, r'^(LZ4F_getFrameInfo failed with code: ERROR_frameHeader_incomplete|LZ4F_freeDecompressionContext reported unclean decompressor state \(truncated frame\?\): \d+)$'):
# lz4frame.decompress(compressed[:i])
# def test_checksum_failure(self):
# input_data = b"2099023098234882923049823094823094898239230982349081231290381209380981203981209381238901283098908123109238098123"
# compressed = lz4frame.compress(input_data, content_checksum=lz4frame.CONTENTCHECKSUM_ENABLED)
# with self.assertRaisesRegexp(RuntimeError, r'^LZ4F_decompress failed with code: ERROR_contentChecksum_invalid'):
# last = struct.unpack('B', compressed[-1:])[0]
# lz4frame.decompress(compressed[:-1] + struct.pack('B', last ^ 0x42))
# # NB: blockChecksumFlag is not supported by lz4 at the moment, so some
# # random 1-bit modifications of input may actually trigger valid output
# # without errors. And content checksum remains the same!
# def test_decompress_trailer(self):
# input_data = b"2099023098234882923049823094823094898239230982349081231290381209380981203981209381238901283098908123109238098123"
# compressed = lz4frame.compress(input_data)
# with self.assertRaisesRegexp(ValueError, r'^Extra data: 64 trailing bytes'):
# lz4frame.decompress(compressed + b'A'*64)
# # This API does not support frame concatenation!
# with self.assertRaisesRegexp(ValueError, r'^Extra data: \d+ trailing bytes'):
# lz4frame.decompress(compressed + compressed)
# def test_LZ4FrameCompressor_fails(self):
# input_data = b"2099023098234882923049823094823094898239230982349081231290381209380981203981209381238901283098908123109238098123"
# with self.assertRaisesRegexp(RuntimeError, r'compress called after flush'):
# with lz4frame.LZ4FrameCompressor() as compressor:
# compressed = compressor.compress_begin()
# compressed += compressor.compress(input_data)
# compressed += compressor.flush()
# compressed = compressor.compress(input_data)
# if sys.version_info < (2, 7):
# # Poor-man unittest.TestCase.skip for Python 2.6
# del TestLZ4FrameModern
# if __name__ == '__main__':
# unittest.main()
|
Python
| 0
|
@@ -714,32 +714,66 @@
lock_size, reset
+, block_checksum, content_checksum
):%0A roundtrip
@@ -872,32 +872,115 @@
reset=reset
+,%0A block_checksum=block_checksum,%0A content_checksum=content_checksum,
%0A )%0A%0Adef test
@@ -1070,16 +1070,50 @@
e, reset
+, block_checksum, content_checksum
):%0A r
@@ -1249,16 +1249,99 @@
et=reset
+,%0A block_checksum=block_checksum,%0A content_checksum=content_checksum,
%0A )%0A%0A
|
d3831a9ec391fb1dc7f8b7fdd2e762d0636f9aa3
|
should set sentence number instead of id
|
jsrs/ratings/models.py
|
jsrs/ratings/models.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
from django.core.urlresolvers import reverse
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from jsrs.users.models import User
from jsrs.audio.models import Audio, Sentence
from .r import mdprefml
from django.utils.translation import ugettext_lazy as _
BOOL_CHOICES = ((True, _('Yes')), (False, _('No')))
@python_2_unicode_compatible
class Ratings(models.Model):
audio_a = models.ForeignKey(Audio, related_name='audio_a_fk', on_delete=models.CASCADE)
audio_b = models.ForeignKey(Audio, related_name='audio_b_fk', on_delete=models.CASCADE)
user = models.ForeignKey(User, on_delete=models.CASCADE)
a_gt_b = models.BooleanField(verbose_name='AがBより良い', choices=BOOL_CHOICES, db_index=True) # True/1 -> a is better; False/0 -> b is better
class Meta:
verbose_name_plural = 'Ratings'
def __str__(self):
return '{}-{}-{}-{}'.format(self.audio_a, self.audio_b, self.user, self.a_gt_b)
from django.db import connection
def get_all_ratings():
cursor = connection.cursor()
cursor.execute('''
SELECT
rl.n,
COUNT(r.a_gt_b) AS f, -- reason for lateral query: need to sum over only TRUE a_gt_b
rl.audio_a_id,
rl.audio_b_id,
rl.subject
FROM
ratings_ratings AS r,
LATERAL (
SELECT
user_id AS subject,
count(a_gt_b) AS n,
audio_a_id,
audio_b_id
FROM ratings_ratings
GROUP BY audio_a_id, audio_b_id, user_id
) AS rl
WHERE
-- r.user_id NOT IN (SELECT id FROM users_user WHERE is_superuser IS TRUE) AND
r.a_gt_b IS TRUE AND
r.user_id=rl.subject AND
r.audio_a_id=rl.audio_a_id AND
r.audio_b_id=rl.audio_b_id
GROUP BY
rl.subject,
rl.n,
rl.audio_a_id,
rl.audio_b_id
ORDER BY
rl.subject,
rl.audio_a_id,
rl.audio_b_id''')
return cursor.fetchall()
# 1.0 両方あるいは片方の評定データのない任意2名の2センテンスを選ぶ。
# 1.1 判定。
# 1.2 データベース登録。
# 1.3 すべてのデータIDに少なくとも1つの判定データがある場合は、2.0.にいく。
# 1.4 その他、すべてのデータIDに評定がつくまでは、1.0.にいく。
#
# 2.1 能力推定値/項目推定値の隣り合った任意2名の同じセンテンスIDのペアを選ぶ。
# 2.2 判定。
# 2.3 データベース登録。
# 2.4 mdpref計算。
# 2.5 2.1 に戻る。
def get_unrated_pair():
'''両方あるいは片方の評定データのない任意2名の2センテンス(同じ文)を選ぶ。'''
cursor = connection.cursor()
cursor.execute('''
SELECT
a1.id,
a2.id
FROM
ratings_ratings AS r
RIGHT JOIN
audio_audio AS a1
ON
r.audio_a_id=a1.id OR
r.audio_b_id=a1.id
JOIN -- get the pair --
audio_audio AS a2
ON
a1.id!=a2.id AND
a1.sentence=a2.sentence
WHERE
r.audio_a_id IS NULL OR
r.audio_b_id IS NULL
GROUP BY
a1.id,
a2.id
ORDER BY
count(a1.id) ASC,
count(a2.id) ASC,
a1.id,
a2.id
LIMIT 1''')
return cursor.fetchall()
def get_random_pair():
'''評価回数が少ない任意2名の2センテンス(同じ文)を選ぶ。'''
cursor = connection.cursor()
cursor.execute('''
SELECT
a1.id,
a2.id
FROM
ratings_ratings AS r
RIGHT JOIN
audio_audio AS a1
ON
r.audio_a_id=a1.id OR
r.audio_b_id=a1.id
JOIN -- get the pair --
audio_audio AS a2
ON
a1.id!=a2.id AND
a1.sentence=a2.sentence
GROUP BY
a1.id,
a2.id
ORDER BY
count(r.audio_a_id) ASC,
count(r.audio_b_id) ASC,
a1.id,
a2.id
LIMIT 1''')
return cursor.fetchall()
## SELECT
## a.id
## FROM
## ratings_ratings AS r
## RIGHT JOIN
## audio_audio AS a
## ON
## r.audio_a_id=a.id OR
## r.audio_b_id=a.id
## WHERE
## r.audio_a_id IS NULL OR
## r.audio_b_id IS NULL
## GROUP BY
## a.sentence,
## a.group,
## a.id
## ORDER BY
## a.sentence,
## a.group
## LIMIT 2
from itertools import chain
import random
def get_next_rating(user_id):
# TODO use user_id to join with Users table
# ratings = Ratings.objects.values()
audio_files = get_unrated_pair()
mdpref_results = None
mdpref_svg = None
if len(audio_files)==0:
try:
ratings = get_all_ratings()
#print('ratings = {}'.format(ratings))
f = [r[0] for r in ratings]
n = [r[1] for r in ratings]
ij = list(chain.from_iterable(r[2:4] for r in ratings))
#print(ij)
subj = [r[4] for r in ratings]
# f = [] # TODO -> direct SQL query easier???
mdpref_results, mdpref_svg = mdprefml(f, n, ij, subj)
except Exception as e:
print('Exception occured while running mdprefml:', e)
audio_files = get_random_pair()
a_model = Audio.objects.get(id=audio_files[0][0])
b_model = Audio.objects.get(id=audio_files[0][1])
ab = [a_model, b_model]
random.shuffle(ab)
a, b = ab
try:
sentence = Sentence.objects.get(sentence=a_model.id)
except Exception as e:
print('Exception getting sentence id="{}"'.format(a_model.id))
sentence = Sentence(id=a_model.id, text='')
return (a, b, sentence.text, (mdpref_results, mdpref_svg))
def ratings_done(user_id):
return Ratings.objects.filter(user_id=user_id).count()
|
Python
| 0.999999
|
@@ -4780,18 +4780,24 @@
entence(
-id
+sentence
=a_model
|
637ae783dabd268ab03dfdf88592e733675bebbf
|
Make number of seconds to sleep a command line option.
|
thingiverse_crawler.py
|
thingiverse_crawler.py
|
#!//usr/bin/env python
import argparse
import datetime
import os.path
import requests
import re
import time
def utc_mktime(utc_tuple):
"""Returns number of seconds elapsed since epoch
Note that no timezone are taken into consideration.
utc tuple must be: (year, month, day, hour, minute, second)
"""
if len(utc_tuple) == 6:
utc_tuple += (0, 0, 0)
return time.mktime(utc_tuple) - time.mktime((1970, 1, 1, 0, 0, 0, 0, 0, 0))
def datetime_to_timestamp(dt):
"""Converts a datetime object to UTC timestamp"""
return int(utc_mktime(dt.timetuple()))
def parse_thing_ids(text):
pattern = "thing:(\d{5,7})";
matched = re.findall(pattern, text);
return set([int(val) for val in matched]);
def parse_file_ids(text):
pattern = "download:(\d{5,7})";
matched = re.findall(pattern, text);
return [int(val) for val in matched];
def crawl_thing_ids(N, end_date=None):
""" This method extract N things that were uploaded to thingiverse.com
before end_date. If end_date is None, use today's date.
"""
baseurl = "http://www.thingiverse.com/search/recent/things/page:{}?q=&start_date=&stop_date={}&search_mode=advanced&description=&username=&tags=&license=";
end_date = datetime_to_timestamp(end_date);
thing_ids = [];
for i in range(N/12 + 1):
url = baseurl.format(i, end_date);
r = requests.get(url);
assert(r.status_code==200);
thing_ids += parse_thing_ids(r.text);
if len(thing_ids) > N:
break;
# Sleep a bit to avoid being mistaken as DoS.
time.sleep(0.5);
return thing_ids[:N];
def crawl_new_things(N):
baseurl = "http://www.thingiverse.com/newest/page:{}";
thing_ids = [];
for i in range(N/12 + 1):
url = baseurl.format(i+1);
r = requests.get(url);
if r.status_code != 200:
print("failed to retrieve page {}".format(i));
thing_ids += parse_thing_ids(r.text);
if len(thing_ids) > N:
break;
# Sleep a bit to avoid being mistaken as DoS.
time.sleep(0.5);
return thing_ids[:N];
def get_download_links(thing_ids):
base_url = "http://www.thingiverse.com/{}:{}";
file_ids = [];
for thing_id in thing_ids:
url = base_url.format("thing", thing_id);
r = requests.get(url);
if r.status_code != 200:
print("failed to retrieve thing {}".format(thing_id));
file_ids.append(parse_file_ids(r.text));
links = [];
for i, thing_id in enumerate(thing_ids):
for file_id in file_ids[i]:
url = base_url.format("download", file_id);
r = requests.head(url);
link = r.headers.get("Location", None);
if link is not None:
__, ext = os.path.splitext(link);
if ext.lower() not in [".stl", ".obj", ".ply", ".off"]:
continue;
links.append([thing_id, file_id, link]);
# Sleep a bit to avoid being mistaken as DoS.
time.sleep(0.5);
return links;
def parse_args():
parser = argparse.ArgumentParser(
description="Crawl data from thingiverse",
epilog="Written by Qingnan Zhou <qnzhou at gmail dot com>");
#parser.add_argument("--end-date", help="e.g. 06/22/2015", default=None);
parser.add_argument("N", type=int,
help="how many files to crawl");
return parser.parse_args();
def main():
args = parse_args();
#if args.end_date is not None:
# month, day, year = args.end_date.split("/");
# args.end_date = datetime.date(year, month, day);
#else:
# args.end_date = datetime.datetime.now().date();
#print("Crawling things uploaded before {}".format(args.end_date));
#thing_ids = crawl_thing_ids(args.N, args.end_date);
thing_ids = crawl_new_things(args.N);
links = get_download_links(thing_ids);
with open("summary.csv", 'w') as fout:
fout.write("thing_id, fild_id, link\n");
for link in links:
fout.write(",".join([str(val) for val in link]) + "\n");
with open("links.txt", 'w') as fout:
fout.write("\n".join([row[2] for row in links]));
if __name__ == "__main__":
main();
|
Python
| 0.000002
|
@@ -1654,16 +1654,31 @@
things(N
+, sleep_seconds
):%0A b
@@ -2101,35 +2101,45 @@
time.sleep(
-0.5
+sleep_seconds
);%0A%0A return t
@@ -2181,24 +2181,39 @@
ks(thing_ids
+, sleep_seconds
):%0A base_
@@ -3084,35 +3084,45 @@
time.sleep(
-0.5
+sleep_seconds
);%0A%0A return l
@@ -3386,24 +3386,121 @@
ault=None);%0A
+ parser.add_argument(%22--sleep%22, type=float,%0A help=%22pause between downloads in s%22);%0A
parser.a
@@ -3979,32 +3979,64 @@
args.end_date);%0A
+ sleep_seconds = args.sleep;%0A
thing_ids =
@@ -4058,16 +4058,31 @@
s(args.N
+, sleep_seconds
);%0A l
@@ -4116,16 +4116,31 @@
hing_ids
+, sleep_seconds
);%0A%0A
|
acce8817eae67dc605ffe628d0d536511d3ea915
|
remove dead code
|
corehq/apps/ota/forms.py
|
corehq/apps/ota/forms.py
|
from django import forms
from django.utils.translation import gettext
from crispy_forms import layout as crispy
# todo proper B3 Handle
from crispy_forms.bootstrap import StrictButton
from crispy_forms.helper import FormHelper
from corehq.apps.hqwebapp import crispy as hqcrispy
class PrimeRestoreCacheForm(forms.Form):
info_text = gettext(
"For projects where mobile users manage a lot of cases (e.g. more than 10,000), "
"this tool can be used to temporarily speed up phone sync times. Once activated, "
"it will ensure that the 'Sync with Server' functionality runs faster on the phone for 24 hours.")
def __init__(self, *args, **kwargs):
super(PrimeRestoreCacheForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_method = 'post'
self.helper.form_action = '.'
self.helper.layout = crispy.Layout(
crispy.HTML("<p>" + self.info_text + "</p>"),
hqcrispy.FormActions(
StrictButton(
"Click here to speed up 'Sync with Server'",
css_class="btn-primary",
type="submit",
),
),
)
class AdvancedPrimeRestoreCacheForm(forms.Form):
check_cache_only = forms.BooleanField(
label='Check cache only',
help_text="Just check the cache, don't actually generate the restore response.",
required=False
)
overwrite_cache = forms.BooleanField(
label='Overwrite existing cache',
help_text=('This will ignore any existing cache and '
're-calculate the restore response for each user'),
required=False
)
all_users = forms.BooleanField(
label='Include all users',
required=False
)
users = forms.CharField(
label='User list',
help_text=('One username or user_id per line '
'(username e.g. mobile_worker_1)'),
widget=forms.Textarea(attrs={'rows': '5', 'cols': '50'}),
required=False
)
def __init__(self, *args, **kwargs):
super(AdvancedPrimeRestoreCacheForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_class = 'form-horizontal'
self.helper.label_class = 'col-lg-2'
self.helper.field_class = 'col-lg-4'
self.helper.form_method = 'post'
self.helper.form_action = '.'
self.helper.layout = crispy.Layout(
crispy.Field('check_cache_only', data_ng_model='check_cache_only'),
crispy.Div(
'version',
'cache_timeout',
'overwrite_cache',
data_ng_hide='check_cache_only'
),
crispy.Field('all_users', data_ng_model='all_users'),
'domain',
crispy.Div('users', data_ng_hide='all_users'),
hqcrispy.FormActions(
StrictButton(
"Submit",
css_class="btn-primary",
type="submit",
),
),
)
def clean_users(self):
user_ids = self.cleaned_data['users'].splitlines()
self.user_ids = [_f for _f in user_ids if _f]
return self.cleaned_data['users']
def clean(self):
cleaned_data = super(AdvancedPrimeRestoreCacheForm, self).clean()
if not self.user_ids and not cleaned_data['all_users']:
raise forms.ValidationError("Please supply user IDs or select the 'All Users' option")
return cleaned_data
|
Python
| 0.999454
|
@@ -1216,2374 +1216,4 @@
)%0A
-%0A%0Aclass AdvancedPrimeRestoreCacheForm(forms.Form):%0A check_cache_only = forms.BooleanField(%0A label='Check cache only',%0A help_text=%22Just check the cache, don't actually generate the restore response.%22,%0A required=False%0A )%0A overwrite_cache = forms.BooleanField(%0A label='Overwrite existing cache',%0A help_text=('This will ignore any existing cache and '%0A 're-calculate the restore response for each user'),%0A required=False%0A )%0A all_users = forms.BooleanField(%0A label='Include all users',%0A required=False%0A )%0A users = forms.CharField(%0A label='User list',%0A help_text=('One username or user_id per line '%0A '(username e.g. mobile_worker_1)'),%0A widget=forms.Textarea(attrs=%7B'rows': '5', 'cols': '50'%7D),%0A required=False%0A )%0A%0A def __init__(self, *args, **kwargs):%0A super(AdvancedPrimeRestoreCacheForm, self).__init__(*args, **kwargs)%0A self.helper = FormHelper()%0A self.helper.form_class = 'form-horizontal'%0A self.helper.label_class = 'col-lg-2'%0A self.helper.field_class = 'col-lg-4'%0A self.helper.form_method = 'post'%0A self.helper.form_action = '.'%0A%0A self.helper.layout = crispy.Layout(%0A crispy.Field('check_cache_only', data_ng_model='check_cache_only'),%0A crispy.Div(%0A 'version',%0A 'cache_timeout',%0A 'overwrite_cache',%0A data_ng_hide='check_cache_only'%0A ),%0A crispy.Field('all_users', data_ng_model='all_users'),%0A 'domain',%0A crispy.Div('users', data_ng_hide='all_users'),%0A hqcrispy.FormActions(%0A StrictButton(%0A %22Submit%22,%0A css_class=%22btn-primary%22,%0A type=%22submit%22,%0A ),%0A ),%0A )%0A%0A def clean_users(self):%0A user_ids = self.cleaned_data%5B'users'%5D.splitlines()%0A self.user_ids = %5B_f for _f in user_ids if _f%5D%0A return self.cleaned_data%5B'users'%5D%0A%0A def clean(self):%0A cleaned_data = super(AdvancedPrimeRestoreCacheForm, self).clean()%0A if not self.user_ids and not cleaned_data%5B'all_users'%5D:%0A raise forms.ValidationError(%22Please supply user IDs or select the 'All Users' option%22)%0A%0A return cleaned_data%0A
|
24439d318668897d8d1aff99df1606e80d45b875
|
add watchdog test
|
tests/test_bmc.py
|
tests/test_bmc.py
|
#!/usr/bin/env python
#-*- coding: utf-8 -*-
from nose.tools import eq_, raises
from pyipmi.bmc import *
import pyipmi.msgs.bmc
from pyipmi.msgs import encode_message
from pyipmi.msgs import decode_message
def test_deviceid_object():
m = pyipmi.msgs.bmc.GetDeviceIdRsp()
decode_message(m, '\x00\x12\x84\x05\x67\x51\x55\x12\x34\x56\x44\x55')
d = DeviceId(m)
eq_(d.device_id, 18)
eq_(d.revision, 4)
eq_(d.provides_sdrs, True)
eq_(str(d.fw_revision), '5.67')
eq_(str(d.ipmi_version), '1.5')
eq_(d.manufacturer_id, 5649426)
eq_(d.product_id, 21828)
eq_(d.aux, None)
|
Python
| 0
|
@@ -202,16 +202,468 @@
essage%0A%0A
+def test_watchdog_object():%0A m = pyipmi.msgs.bmc.GetWatchdogTimerRsp()%0A decode_message(m, '%5Cx00%5Cx41%5Cx42%5Cx33%5Cx44%5Cx55%5Cx66%5Cx77%5Cx88')%0A%0A w = Watchdog(m)%0A eq_(w.timer_use, 1)%0A eq_(w.is_running, 1)%0A eq_(w.dont_log, 0)%0A eq_(w.timeout_action, 2)%0A eq_(w.pre_timeout_interrupt, 4)%0A eq_(w.pre_timeout_interval, 0x33)%0A%0A eq_(w.timer_use_expiration_flags, 0x44)%0A eq_(w.initial_countdown, 0x6655)%0A eq_(w.present_countdown, 0x8877)%0A%0A
def test
|
5ae1d9ebcc34d47c858ba63e26121be92771d812
|
temporary fix test_bot login
|
tests/test_bot.py
|
tests/test_bot.py
|
import json
import requests
from instabot import Bot
try:
from unittest.mock import Mock, patch
except ImportError:
from mock import Mock, patch
class TestBot:
def setup(self):
self.USER_ID = 1234567
self.USERNAME = "test_username"
self.PASSWORD = "test_password"
self.FULLNAME = "test_full_name"
self.TOKEN = "abcdef123456"
self.bot = Bot()
self.prepare_api(self.bot)
def prepare_api(self, bot):
bot.api.is_logged_in = True
bot.api.session = requests.Session()
cookies = Mock()
cookies.return_value = {"csrftoken": self.TOKEN, "ds_user_id": self.USER_ID}
bot.api.session.cookies.get_dict = cookies
bot.api.set_user(self.USERNAME, self.PASSWORD)
class TestBotAPI(TestBot):
@patch("instabot.API.load_uuid_and_cookie")
def test_login(self, load_cookie_mock):
self.bot = Bot()
load_cookie_mock.side_effect = Exception()
def mockreturn(*args, **kwargs):
r = Mock()
r.status_code = 200
r.text = '{"status": "ok"}'
return r
def mockreturn_login(*args, **kwargs):
r = Mock()
r.status_code = 200
r.text = json.dumps(
{
"logged_in_user": {
"pk": self.USER_ID,
"username": self.USERNAME,
"full_name": self.FULLNAME,
},
"status": "ok",
}
)
return r
with patch("requests.Session") as Session:
instance = Session.return_value
instance.get.return_value = mockreturn()
instance.post.return_value = mockreturn_login()
instance.cookies = requests.cookies.RequestsCookieJar()
instance.cookies.update(
{"csrftoken": self.TOKEN, "ds_user_id": self.USER_ID}
)
assert self.bot.api.login(username=self.USERNAME, password=self.PASSWORD)
assert self.bot.api.username == self.USERNAME
assert self.bot.user_id == self.USER_ID
assert self.bot.api.is_logged_in
assert self.bot.api.uuid
assert self.bot.api.token
def test_generate_uuid(self):
from uuid import UUID
generated_uuid = self.bot.api.generate_UUID(True)
assert isinstance(UUID(generated_uuid), UUID)
assert UUID(generated_uuid).hex == generated_uuid.replace("-", "")
def test_set_user(self):
test_username = "abcdef"
test_password = "passwordabc"
self.bot.api.set_user(test_username, test_password)
assert self.bot.api.username == test_username
assert self.bot.api.password == test_password
assert hasattr(self.bot.api, "uuid")
def test_reset_counters(self):
keys = [
"liked",
"unliked",
"followed",
"messages",
"unfollowed",
"commented",
"blocked",
"unblocked",
]
for key in keys:
self.bot.total[key] = 1
assert self.bot.total[key] == 1
self.bot.reset_counters()
for key in keys:
assert self.bot.total[key] == 0
|
Python
| 0.997568
|
@@ -2012,16 +2012,33 @@
i.login(
+%0A
username
@@ -2052,16 +2052,32 @@
SERNAME,
+%0A
passwor
@@ -2091,16 +2091,63 @@
PASSWORD
+,%0A use_cookie=False%0A
)%0A%0A
|
e6519d121ab80467fafdab6a2183964d97ef60e8
|
Add test for set_meta command.
|
tests/test_cli.py
|
tests/test_cli.py
|
# -*- coding: utf-8 -*-
import os
from click.testing import CliRunner
from sigal import init
from sigal import serve
def test_init(tmpdir):
config_file = str(tmpdir.join('sigal.conf.py'))
runner = CliRunner()
result = runner.invoke(init, [config_file])
assert result.exit_code == 0
assert result.output.startswith('Sample config file created:')
assert os.path.isfile(config_file)
result = runner.invoke(init, [config_file])
assert result.exit_code == 1
assert result.output == ("Found an existing config file, will abort to "
"keep it safe.\n")
def test_serve(tmpdir):
config_file = str(tmpdir.join('sigal.conf.py'))
runner = CliRunner()
result = runner.invoke(init, [config_file])
assert result.exit_code == 0
result = runner.invoke(serve)
assert result.exit_code == 2
result = runner.invoke(serve, ['-c', config_file])
assert result.exit_code == 1
|
Python
| 0
|
@@ -112,16 +112,43 @@
t serve%0A
+from sigal import set_meta%0A
%0A%0Adef te
@@ -956,28 +956,1059 @@
ssert result.exit_code == 1%0A
+%0Adef test_set_meta(tmpdir):%0A%0A testdir = tmpdir.mkdir(%22test%22)%0A%0A testfile = tmpdir.join(%22test.jpg%22)%0A testfile.write(%22%22)%0A%0A runner = CliRunner()%0A result = runner.invoke(set_meta, %5Bstr(testdir), %22title%22, %22testing%22%5D)%0A%0A assert result.exit_code == 0%0A assert result.output.startswith(%221 metadata key(s) written to%22)%0A assert os.path.isfile(str(testdir.join(%22index.md%22)))%0A assert testdir.join(%22index.md%22).read() == %22Title: testing%5Cn%22%0A%0A # Run again, should give file exists error%0A result = runner.invoke(set_meta, %5Bstr(testdir), %22title%22, %22testing%22%5D)%0A assert result.exit_code == 2%0A%0A result = runner.invoke(set_meta, %5Bstr(testdir.join(%22non-existant.jpg%22)), %22title%22, %22testing%22%5D)%0A assert result.exit_code == 1%0A%0A result = runner.invoke(set_meta, %5Bstr(testfile), %22title%22, %22testing%22%5D)%0A%0A assert result.exit_code == 0%0A assert result.output.startswith(%221 metadata key(s) written to%22)%0A assert os.path.isfile(str(tmpdir.join(%22test.md%22)))%0A assert tmpdir.join(%22test.md%22).read() == %22Title: testing%5Cn%22%0A
|
d2de2d44a46ff521ab8c1d8bbc57d4eeb8d5dc53
|
Fix an error
|
taiga/users/services.py
|
taiga/users/services.py
|
# Copyright (C) 2014 Andrey Antukh <niwi@niwi.be>
# Copyright (C) 2014 Jesús Espino <jespinog@gmail.com>
# Copyright (C) 2014 David Barragán <bameda@dbarragan.com>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
This model contains a domain logic for users application.
"""
from django.db.models.loading import get_model
from django.db.models import Q
from easy_thumbnails.files import get_thumbnailer
from taiga.base import exceptions as exc
from taiga.base.utils.urls import get_absolute_url
from .gravatar import get_gravatar_url
def get_and_validate_user(*, username:str, password:str) -> bool:
"""
Check if user with username/email exists and specified
password matchs well with existing user password.
if user is valid, user is returned else, corresponding
exception is raised.
"""
user_model = get_model("users", "User")
qs = user_model.objects.filter(Q(username=username) |
Q(email=username))
if len(qs) == 0:
raise exc.WrongArguments("Username or password does not matches user.")
user = qs[0]
if not user.check_password(password):
raise exc.WrongArguments("Username or password does not matches user.")
return user
def get_photo_url(photo):
"""Get a photo absolute url and the photo automatically cropped."""
url = get_thumbnailer(photo)['avatar'].url
return get_absolute_url(url)
def get_photo_or_gravatar_url(user):
"""Get the user's photo/gravatar url."""
return get_photo_url(user.photo) if user.photo else get_gravatar_url(user.email)
|
Python
| 0.998142
|
@@ -2100,24 +2100,41 @@
tar url.%22%22%22%0A
+ if user:%0A
return g
@@ -2206,8 +2206,22 @@
.email)%0A
+ return %22%22%0A
|
0f5d0353f9faad9bb34432cd047540b81c6ea643
|
add exception test for invalid authentication
|
tests/test_tpm.py
|
tests/test_tpm.py
|
import requests
import requests_mock
import unittest
import os.path
import tpm
import json
import logging
log = logging.getLogger(__name__)
api_url = 'https://tpm.example.com/index.php/api/v4/'
local_path = 'tests/resources/'
item_limit = 20
def fake_data(url, m):
"""
A stub urlopen() implementation that load json responses from
the filesystem.
"""
# Map path from url to a file
path_parts = url.split('/')[6:]
path = '/'.join(path_parts)
resource_file = os.path.normpath('tests/resources/{}'.format(path))
data_file = open(resource_file)
data = json.load(data_file)
# Must return a json-like object
count = 0
header = {}
while True:
count += 1
if len(data) > item_limit:
returndata = data[:item_limit]
data = data[item_limit:]
pageingurl = url.replace('.json', '/page/{}.json'.format(count))
log.debug("Registering URL: {}".format(pageingurl))
log.debug("Registering data: {}".format(returndata))
log.debug("Data length: {}".format(len(returndata)))
log.debug("Registering header: {}".format(header))
m.get(pageingurl, json=returndata, headers=header.copy())
header = { 'link': '{}; rel="next"'.format(pageingurl)}
else:
log.debug("Registering URL: {}".format(url))
log.debug("Registering data: {}".format(data))
log.debug("Registering header: {}".format(header))
log.debug("Data length: {}".format(len(data)))
m.get(url, json=data, headers=header.copy())
header.clear()
break
class ClientTestCase(unittest.TestCase):
"""Test case for the client methods."""
def setUp(self):
self.client = tpm.TpmApiv4('https://tpm.example.com', username='USER', password='PASS')
def test_user_auth_method(self):
"""Test user based authentication method."""
pass
def test_paging(self):
"""Test paging, if number of items is same as from original data source."""
path_to_mock = 'passwords.json'
request_url = api_url + path_to_mock
request_path = local_path + path_to_mock
resource_file = os.path.normpath(request_path)
data_file = open(resource_file)
data = json.load(data_file)
with requests_mock.Mocker() as m:
fake_data(request_url, m)
response = self.client.list_passwords()
# number of passwords as from original json file.
source_items = len(data)
response_items = len(response)
log.debug("Source Items: {}; Response Items: {}".format(source_items, response_items))
self.assertEqual(source_items, response_items)
def test_logging(self):
"""Test Logging."""
pass
class ExceptionTestCase(unittest.TestCase):
"""Test case for all kind of Exceptions."""
def test_wrong_url_exception(self):
"""Exception if URL does not match REGEXurl."""
wrong_url = 'ftp://tpm.example.com'
with self.assertRaises(tpm.TpmApiv4.ConfigError) as context:
tpm.TpmApiv4(wrong_url, username='USER', password='PASS')
log.debug("context exception: {}".format(context.exception))
self.assertEqual("'Invalid URL: {}'".format(wrong_url), str(context.exception))
|
Python
| 0.000001
|
@@ -2906,24 +2906,467 @@
eptions.%22%22%22%0A
+ def test_wrong_auth_exception(self):%0A %22%22%22Exception if wrong authentication mehtod.%22%22%22%0A with self.assertRaises(tpm.TpmApi.ConfigError) as context:%0A tpm.TpmApiv4('https://tpm.example.com', username='USER', private_key='PASS')%0A log.debug(%22context exception: %7B%7D%22.format(context.exception))%0A self.assertEqual(%22'No authentication specified (user/password or private/public key)'%22, str(context.exception))%0A%0A
def test
|
d43d4638eefe6d08dcb9ad739753bc4c43647c2a
|
fix another lazy test
|
tests/legacy/test_xmlrpc.py
|
tests/legacy/test_xmlrpc.py
|
# Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
import pretend
import pytest
from werkzeug.exceptions import BadRequest
from warehouse.packaging.models import Project
from warehouse.legacy import xmlrpc
def test_xmlrpc_handler(monkeypatch):
Response = pretend.call_recorder(lambda *a, **k: 'response')
monkeypatch.setattr(xmlrpc, "Response", Response)
interface = pretend.stub(
list_packages=pretend.call_recorder(lambda *a, **k: 'one two'.split())
)
Interface = lambda a, r: interface
monkeypatch.setattr(xmlrpc, "Interface", Interface)
app = pretend.stub()
xml_request = '''<?xml version="1.0"?><methodCall>
<methodName>list_packages</methodName></methodCall>'''
request = pretend.stub(
headers={
'Content-Type': 'text/xml',
'Content-Length': str(len(xml_request)),
},
get_data=lambda **k: xml_request,
)
assert xmlrpc.handle_request(app, request) == 'response'
assert interface.list_packages.calls
response_xml = Response.calls[0].args[0]
assert response_xml == u'''<?xml version='1.0'?>
<methodResponse>
<params>
<param>
<value><array><data>
<value><string>one</string></value>
<value><string>two</string></value>
</data></array></value>
</param>
</params>
</methodResponse>
'''
assert Response.calls[0].kwargs == dict(mimetype='text/xml')
def test_xmlrpc_list_packages():
all_projects = [Project("bar"), Project("foo")]
app = pretend.stub(
models=pretend.stub(
packaging=pretend.stub(
all_projects=pretend.call_recorder(lambda: all_projects),
),
),
)
request = pretend.stub(
headers={'Content-Type': 'text/xml'}
)
interface = xmlrpc.Interface(app, request)
result = interface.list_packages()
assert app.models.packaging.all_projects.calls == [pretend.call()]
assert result == ['bar', 'foo']
def test_xmlrpc_size(monkeypatch):
app = pretend.stub()
request = pretend.stub(
headers={
'Content-Type': 'text/xml',
'Content-Length': str(10 * 1024 * 1024 + 1)
},
)
with pytest.raises(BadRequest):
xmlrpc.handle_request(app, request)
|
Python
| 0.000034
|
@@ -1646,16 +1646,36 @@
es.calls
+ == %5Bpretend.call()%5D
%0A res
|
87ff78dfe54795f9067fa45f832e8bc84b16c894
|
Fix integer division
|
tf_rl/simulate.py
|
tf_rl/simulate.py
|
import math
import time
from IPython.display import clear_output, display, HTML
from itertools import count
from os.path import join, exists
from os import makedirs
def simulate(simulation,
controller= None,
fps=60,
visualize_every=1,
action_every=1,
simulation_resolution=None,
wait=False,
disable_training=False,
save_path=None):
"""Start the simulation. Performs three tasks
- visualizes simulation in iPython notebook
- advances simulator state
- reports state to controller and chooses actions
to be performed.
Parameters
-------
simulation: tr_lr.simulation
simulation that will be simulated ;-)
controller: tr_lr.controller
controller used
fps: int
frames per seconds
visualize_every: int
visualize every `visualize_every`-th frame.
action_every: int
take action every `action_every`-th frame
simulation_resolution: float
simulate at most 'simulation_resolution' seconds at a time.
If None, the it is set to 1/FPS (default).
wait: boolean
whether to intentionally slow down the simulation
to appear real time.
disable_training: bool
if true training_step is never called.
save_path: str
save svg visualization (only tl_rl.utils.svg
supported for the moment)
"""
# prepare path to save simulation images
if save_path is not None:
if not exists(save_path):
makedirs(save_path)
last_image = 0
# calculate simulation times
chunks_per_frame = 1
chunk_length_s = 1.0 / fps
if simulation_resolution is not None:
frame_length_s = 1.0 / fps
chunks_per_frame = int(math.ceil(frame_length_s / simulation_resolution))
chunks_per_frame = max(chunks_per_frame, 1)
chunk_length_s = frame_length_s / chunks_per_frame
# state transition bookkeeping
last_observation = None
last_action = None
simulation_started_time = time.time()
for frame_no in count():
for _ in range(chunks_per_frame):
simulation.step(chunk_length_s)
if frame_no % action_every == 0:
new_observation = simulation.observe()
reward = simulation.collect_reward()
# store last transition
if last_observation is not None:
controller.store(last_observation, last_action, reward, new_observation)
# act
new_action = controller.action(new_observation)
simulation.perform_action(new_action)
#train
if not disable_training:
controller.training_step()
# update current state as last state.
last_action = new_action
last_observation = new_observation
# adding 1 to make it less likely to happen at the same time as
# action taking.
if (frame_no + 1) % visualize_every == 0:
fps_estimate = frame_no / (time.time() - simulation_started_time)
clear_output(wait=True)
svg_html = simulation.to_html(["fps = %.1f" % (fps_estimate,)])
display(svg_html)
if save_path is not None:
img_path = join(save_path, "%d.svg" % (last_image,))
with open(img_path, "w") as f:
svg_html.write_svg(f)
last_image += 1
time_should_have_passed = frame_no / fps
time_passed = (time.time() - simulation_started_time)
if wait and (time_should_have_passed > time_passed):
time.sleep(time_should_have_passed - time_passed)
|
Python
| 0.999999
|
@@ -1,16 +1,49 @@
+from __future__ import division%0A%0A
import math%0Aimpo
|
31a2439c1137068d8532c5f85cc1c8fb913d7ee8
|
Add reconnect to clamscan
|
modules/Antivirus/ClamAVScan.py
|
modules/Antivirus/ClamAVScan.py
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import division, absolute_import, with_statement, print_function, unicode_literals
try:
import pyclamd
except:
print("pyclamd module not installed...")
pyclamd = None
__author__ = 'Mike Long'
__license__ = "MPL 2.0"
DEFAULTCONF ={
"ENABLED": True,
}
def check(conf=DEFAULTCONF):
if not conf['ENABLED']:
return False
if not pyclamd:
return False
return True
def scan(filelist, conf=DEFAULTCONF):
results = []
try:
clamScanner = pyclamd.ClamdUnixSocket()
clamScanner.ping()
except:
clamScanner = pyclamd.ClamdNetworkSocket()
try:
clamScanner.ping()
except:
raise ValueError("Unable to connect to clamd")
# Scan each file from filelist for virus
for f in filelist:
output = clamScanner.scan_file(f)
if output is None:
continue
if list(output.values())[0][0] == 'ERROR':
with open(f, 'rb') as file_handle:
try:
output = clamScanner.scan_stream(file_handle.read())
except pyclamd.BufferTooLongError:
continue
if output is None:
continue
if list(output.values())[0][0] == 'FOUND':
results.append((f, list(output.values())[0][1]))
elif list(output.values())[0][0] == 'ERROR':
print('ClamAV: ERROR:', list(output.values())[0][1])
# Set metadata tags
metadata = {
'Name': "ClamAV",
'Type': "Antivirus",
'Version': clamScanner.version()
}
return (results, metadata)
|
Python
| 0.000001
|
@@ -623,59 +623,24 @@
def
-scan(filelist, conf=DEFAULTCONF):%0A results = %5B%5D%0A
+_connect_clam():
%0A
@@ -901,16 +901,130 @@
clamd%22)
+%0A return clamScanner%0A%0Adef scan(filelist, conf=DEFAULTCONF):%0A results = %5B%5D%0A%0A clamScanner = _connect_clam()
%0A%0A #
@@ -1419,16 +1419,16 @@
gError:%0A
-
@@ -1439,32 +1439,223 @@
continue
+%0A except Exception as e:%0A print(e)%0A clamScanner = _connect_clam()%0A output = clamScanner.scan_stream(file_handle.read())
%0A%0A if out
|
821e191e05269b9c1cc5f58b3d4cecf5bd20e896
|
Correct Range sample
|
samples/python/com.ibm.streamsx.topology.pysamples/opt/python/streams/spl_sources.py
|
samples/python/com.ibm.streamsx.topology.pysamples/opt/python/streams/spl_sources.py
|
# Licensed Materials - Property of IBM
# Copyright IBM Corp. 2015, 2016
from __future__ import absolute_import, division, print_function
# Simple inclusion of Python logic within an SPL application
# as a SPL "Function" operator. A "Function" operator has
# a single input port and single output port, a function
# is called for every input tuple, and results in
# no submission or a single tuple being submitted.
# Import the SPL decorators
from streamsx.spl import spl
# Any function in a Python module (.py file) within the
# toolkit's opt/python/streams directory is converted to a primitive operator
# with a single input and output port. The primitive operator
# is a C++ primitive that embeds the Python runtime.
#
# The function must be decorated with one of these
#
# @spl.pipe - Function is a pipe operator
# @spl.sink - Function is a sink operator
# @spl.ignore - Function is ignored
# Attributes of the input SPL tuple are passed
# as a Python Tuple and thus are available as positional arguments.
# (see examples below)
# Any returned value from a function must be a Tuple.
#
# If nothing is returned then no tuple is submitted
# by the operator for the input tuple.
#
# When a Tuple is returned, its values are assigned
# to the first N attributes of the output tuple,
# that is by position.
# The returned values in the Tuple must be assignable
# to the output tuple attribute types.
#
# If the output port has more than N attributes
# then any remaining attributes are set from the
# input tuple if there is a matching input attribute by
# name and type, otherwise the attribute remains at
# its default value.
#
# If the output port has fewer attributes than N
# then any additional values are ignored.
# Any function whose name starts with spl is not created
# as an operator, such functions are reserved as a mechanism
# to pass information back to the primitive operator generator.
# The description of the function becomes the description
# of the primitive operator model in its operator model.
#------------------------------------------------------------------
# Example functions
#------------------------------------------------------------------
# Defines the SPL namespace for any functions in this module
# Multiple modules can map to the same namespace
def splNamespace():
return "com.ibm.streamsx.topology.pysamples.sources"
@spl.source()
class Range:
def __init__(self, count):
self.count = count
def __iter__(self):
return map(tuple, iter(range(self.count)))
|
Python
| 0.000001
|
@@ -2486,30 +2486,130 @@
-return map(tuple, iter
+# Use zip to convert the single returned value%0A # into a tuple to allow it to be returned to SPL%0A return zip
(ran
@@ -2623,10 +2623,9 @@
.count))
-)
%0A
|
b23a887edd6b55f2386c45c9b93c04431bceba5e
|
remove all__vary_rounds setting (deprecated in Passlib 1.7)
|
coremods/login.py
|
coremods/login.py
|
"""
login.py - Implement core login abstraction.
"""
from pylinkirc import conf, utils, world
from pylinkirc.log import log
try:
from passlib.context import CryptContext
except ImportError:
CryptContext = None
log.warning("Hashed passwords are disabled because passlib is not installed. Please install "
"it (pip3 install passlib) and restart for this feature to work.")
pwd_context = None
if CryptContext:
pwd_context = CryptContext(["sha512_crypt", "sha256_crypt"],
all__vary_rounds=0.1,
sha256_crypt__default_rounds=180000,
sha512_crypt__default_rounds=90000)
def checkLogin(user, password):
"""Checks whether the given user and password is a valid combination."""
accounts = conf.conf['login'].get('accounts')
if not accounts:
# No accounts specified, return.
return False
# Lowercase account names to make them case insensitive. TODO: check for
# duplicates.
user = user.lower()
accounts = {k.lower(): v for k, v in accounts.items()}
try:
account = accounts[user]
except KeyError: # Invalid combination
return False
else:
passhash = account.get('password')
if not passhash:
# No password given, return. XXX: we should allow plugins to override
# this in the future.
return False
# Encryption in account passwords is optional (to not break backwards
# compatibility).
if account.get('encrypted', False):
return verifyHash(password, passhash)
else:
return password == passhash
def verifyHash(password, passhash):
"""Checks whether the password given matches the hash."""
if password:
if not pwd_context:
raise utils.NotAuthorizedError("Cannot log in to an account with a hashed password "
"because passlib is not installed.")
return pwd_context.verify(password, passhash)
return False # No password given!
|
Python
| 0
|
@@ -500,61 +500,8 @@
%22%5D,%0A
- all__vary_rounds=0.1,%0A
|
b79a80d894bdc39c8fa6f76fe50e222567f00df1
|
Update cofnig_default: add elastic search config
|
config_default.py
|
config_default.py
|
# -*- coding: utf-8 -*-
"""
Created on 2015-10-23 08:06:00
@author: Tran Huu Cuong <tranhuucuong91@gmail.com>
"""
import os
# Blog configuration values.
# You may consider using a one-way hash to generate the password, and then
# use the hash again in the login view to perform the comparison. This is just
# for simplicity.
ADMIN_PASSWORD = 'admin@secret'
APP_DIR = os.path.dirname(os.path.realpath(__file__))
PATH_SQLITE_DB=os.path.join(APP_DIR, 'blog.db')
# The playhouse.flask_utils.FlaskDB object accepts database URL configuration.
DATABASE = 'sqliteext:///{}'.format(PATH_SQLITE_DB)
DEBUG = False
# The secret key is used internally by Flask to encrypt session data stored
# in cookies. Make this unique for your app.
SECRET_KEY = 'shhh, secret!'
# This is used by micawber, which will attempt to generate rich media
# embedded objects with maxwidth=800.
SITE_WIDTH = 800
APP_HOST='127.0.0.1'
APP_PORT=5000
|
Python
| 0
|
@@ -915,8 +915,124 @@
RT=5000%0A
+%0AES_HOST = %7B%0A %22host%22: %22172.17.42.1%22,%0A %22port%22: 9200%0A%7D%0A%0AES_INDEX_NAME = 'notebooks'%0AES_TYPE_NAME = 'notebooks'%0A%0A
|
a7c084b4ff3d5529ca54209283d0e1a5984ebea2
|
Fix lint error
|
tldextract/cli.py
|
tldextract/cli.py
|
'''tldextract CLI'''
import logging
import sys
from .tldextract import TLDExtract
from ._version import version as __version__
def main():
'''tldextract CLI main command.'''
import argparse
logging.basicConfig()
parser = argparse.ArgumentParser(
prog='tldextract',
description='Parse hostname from a url or fqdn')
parser.add_argument('--version', action='version', version='%(prog)s ' + __version__)
parser.add_argument('input', metavar='fqdn|url',
type=str, nargs='*', help='fqdn or url')
parser.add_argument('-u', '--update', default=False, action='store_true',
help='force fetch the latest TLD definitions')
parser.add_argument('-c', '--cache_dir',
help='use an alternate TLD definition caching folder')
parser.add_argument('-p', '--private_domains', default=False, action='store_true',
help='Include private domains')
args = parser.parse_args()
tld_extract = TLDExtract(include_psl_private_domains=args.private_domains)
if args.cache_dir:
tld_extract.cache_file = args.cache_file
if args.update:
tld_extract.update(True)
elif not args.input:
parser.print_usage()
sys.exit(1)
return
for i in args.input:
print(' '.join(tld_extract(i))) # pylint: disable=superfluous-parens
|
Python
| 0.000035
|
@@ -124,16 +124,17 @@
sion__%0A%0A
+%0A
def main
|
493fc3dcbb192ac226aa94f11602ced843f27195
|
Add debug text for scrapped last race ID
|
granjaRaces/spiders/granjaRaces_spider.py
|
granjaRaces/spiders/granjaRaces_spider.py
|
import scrapy
import re
import os
from scrapy.loader import ItemLoader
from granjaRaces.items import GranjaRacesItem
LOGIN_URL = 'http://www.kartodromogranjaviana.com.br/resultados/resultados_cad.php'
RESULT_URL = 'http://www.kartodromogranjaviana.com.br/resultados/resultados_folha.php'
RESULT_TYPE = 1 # race
# TRIVIA:
# At Jan 2017, the asfalt of KGV race track was completly rebuild.
# Thus all previous race and lap data is 'useless' for actual predictions.
# The folloing ID refers to the first race at KGV after race track rebuild.
MIN_RACE_ID = 36612
DICT_HEADER = {
u'POS' : 'racePosition',
u'NO.' : 'kartNumber',
u'NOME' : 'driverName',
u'CLASSE' : 'driverClass',
u'VOLTAS' : 'numOfLaps',
u'TOTAL TEMPO' : 'raceTime',
u'MELHOR TEMPO' : 'bestLapTime'
}
# DICT_SKIP = {
# u'COMENT\xc1RIOS' : 'comments',
# u'PONTOS' : 'points',
# u'DIFF' : 'diffToLeader',
# u'ESPA\xc7O' : 'diffToPrevious'
# }
class GranjaRaceSpider(scrapy.Spider):
name = 'granjaRaces'
start_urls = ['http://www.kartodromogranjaviana.com.br/resultados/resultados_cad.php']
def start_requests(self):
return [scrapy.FormRequest(
LOGIN_URL,
formdata = {
'email': 'granja@macedo.me',
'opt': 'L'
},
callback = self.after_login
)]
def after_login(self, response):
# check login succeed before going on
if 'Informe o e-mail cadastrado' in response.body:
self.logger.error('Login failed')
return
# $> scrapy crawl granjaRaces -a begin=36620 -a end=36642
firstRaceId = int(getattr(self, 'begin', MIN_RACE_ID))
if firstRaceId < MIN_RACE_ID:
firstRaceId = MIN_RACE_ID
lastRaceId = int(getattr(self, 'end', -1))
if lastRaceId < 0:
raceIdList = response.css('a').re(r'resultados_folha\.php\?tipo=1\&id=(\d+)')
lastRaceId = int(max(raceIdList))
if lastRaceId < firstRaceId:
lastRaceId = firstRaceId
self.logger.info('Scrapping races from %i to %i', firstRaceId, lastRaceId)
# continue scraping with authenticated session...
for raceId in range(firstRaceId, 1 + lastRaceId, 1):
url = '%s?tipo=%i&id=%i' % (RESULT_URL, RESULT_TYPE, raceId)
self.logger.debug('yielding a start url: %s' % url)
yield scrapy.Request(url, callback=self.parse)
def parse(self, response):
# http://www.kartodromogranjaviana.com.br/resultados/resultados_folha.php?tipo=1&id=36612
self.logger.debug('response.url = [' + response.url + ']')
raceId = response.url.split('=')[-1]
if not raceId:
self.logger.error('Invalid URL: ' + response.url)
return
# filter body only with 'GRANJA VIANA'
if 'GRANJA VIANA' not in response.body:
self.logger.warning('Skipping RACE (Not GRANJA VIANA): ' + raceId)
return
# discart INTERLAGOS races (for now...)
if 'INTERLAGOS' in response.body:
self.logger.warning('Skipping RACE (INTERLAGOS): ' + raceId)
return
# filter body only with 'GRANJA VIANA'
if 'INDOOR' not in response.body:
self.logger.warning('Skipping RACE (Missing INDOOR): ' + raceId)
return
self.logger.info('Scrapping RACE: %s' % raceId)
self.persistToFile(raceId, response)
# get track configuration
# KARTODROMO INTERNACIONAL GRANJA VIANA KGV RACE TRACKS - CIRCUITO 01
headerbig = response.css('div.headerbig::text').extract_first()
if headerbig is None:
self.logger.error('Missing headerbig (%s)' % raceId)
return
if '-' not in headerbig:
self.logger.error('INVALID HEADER (Missing separator): %s' % headerbig)
return
self.logger.debug('headerbig = "%s"' % headerbig)
trackConfig = headerbig.split('-')[1].strip()
self.logger.debug('trackConfig = "%s"' % trackConfig)
# get table header
listHeader = [h.strip().upper() for h in response.css('th.column::text').extract()]
if not listHeader:
self.logger.error('No table header for RACE %s' % raceId)
return
# check header
for h in DICT_HEADER.keys():
if h not in listHeader:
self.logger.error('MISSING HEADER COLUMN (%s): %s' % (raceId, h))
return
# get table data
tableData = response.xpath('//table/tr')[1:]
for line in tableData:
raceEntryData = {}
i = 1
for h in listHeader:
if h in DICT_HEADER.keys():
key = DICT_HEADER[h]
value = line.xpath('td[%i]/text()' % i).extract_first()
raceEntryData[key] = value
i += 1
raceLoader = ItemLoader(item=GranjaRacesItem(), response=response)
raceLoader.add_value('raceId', raceId)
raceLoader.add_value('trackConfig', trackConfig)
for col in raceEntryData.keys():
raceLoader.add_value(col, raceEntryData[col])
if not raceEntryData['racePosition'].isdigit():
raceEntryData['racePosition'] = 99
raceLoader.add_value('id', int(raceEntryData['racePosition']) + 100 * int(raceId))
yield raceLoader.load_item()
def persistToFile(self, raceId, response):
filename = 'raceResults/%s.html' % raceId
with open(filename, 'wb') as file:
file.write(response.body)
self.log('RACE %s saved file %s' % (raceId, filename))
|
Python
| 0.000001
|
@@ -1778,16 +1778,81 @@
dList))%0A
+%09%09%09self.logger.info('Using scarapped END RACE: %25i', lastRaceId)%0A%0A
%09%09if las
|
c22ffd3c2c8feb0dfba2eb6df6fb8cbb49475cee
|
Remove un-used `message` arg, Fixes #4824
|
salt/returners/sentry_return.py
|
salt/returners/sentry_return.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
Salt returner that report error back to sentry
Pillar need something like::
raven:
servers:
- http://192.168.1.1
- https://sentry.example.com
public_key: deadbeefdeadbeefdeadbeefdeadbeef
secret_key: beefdeadbeefdeadbeefdeadbeefdead
project: 1
and http://pypi.python.org/pypi/raven installed
'''
import logging
try:
from raven import Client
has_raven = True
except ImportError:
has_raven = False
logger = logging.getLogger(__name__)
def __virtual__():
if not has_raven:
return False
return 'sentry'
def returner(ret):
'''
If an error occurs, log it to sentry
'''
def connect_sentry(message, result):
pillar_data = __salt__['pillar.raw']()
sentry_data = {
'result': result,
'returned': ret,
'pillar': pillar_data,
'grains': __salt__['grains.items']()
}
servers = []
try:
for server in pillar_data['raven']['servers']:
servers.append(server + '/api/store/')
client = Client(
servers=servers,
public_key=pillar_data['raven']['public_key'],
secret_key=pillar_data['raven']['secret_key'],
project=pillar_data['raven']['project'],
)
except KeyError as missing_key:
logger.error("Sentry returner need config '%s' in pillar",
missing_key)
else:
try:
client.captureMessage(ret['comment'], extra=sentry_data)
except Exception as err:
logger.error("Can't send message to sentry: %s", err,
exc_info=True)
requisite_error = 'One or more requisite failed'
try:
if 'success' not in ret:
logger.debug('no success data, report')
connect_sentry(ret['return'], ret)
else:
if not ret['success']:
logger.debug('not a success, report')
connect_sentry(ret['return'], ret)
else:
for state in ret['return']:
if not ret['return'][state]['result'] and \
ret['return'][state]['comment'] != requisite_error:
connect_sentry(state, ret['return'][state])
except Exception as err:
logger.error("Can't run connect_sentry: %s", err, exc_info=True)
|
Python
| 0
|
@@ -727,17 +727,8 @@
try(
-message,
resu
|
e7c462af8382a5eb7f5fee2abfc04f002e36b193
|
Add varint and varlong tests
|
tests/mcp/test_datautils.py
|
tests/mcp/test_datautils.py
|
Python
| 0
|
@@ -0,0 +1,1065 @@
+from spock.mcp import datautils%0Afrom spock.utils import BoundBuffer%0A%0A%0Adef test_unpack_varint():%0A largebuff = BoundBuffer(b'%5Cx80%5Cx94%5Cxeb%5Cxdc%5Cx03')%0A smallbuff = BoundBuffer(b'%5Cx14')%0A assert datautils.unpack_varint(smallbuff) == 20%0A assert datautils.unpack_varint(largebuff) == 1000000000%0A%0A%0Adef test_pack_varint():%0A assert datautils.pack_varint(20) == b'%5Cx14'%0A assert datautils.pack_varint(1000000000) == b'%5Cx80%5Cx94%5Cxeb%5Cxdc%5Cx03'%0A assert datautils.pack_varint(-10000000000) is None%0A assert datautils.pack_varint(10000000000) is None%0A%0A%0Adef test_unpack_varlong():%0A largebuff = BoundBuffer(b'%5Cx80%5Cxc8%5Cxaf%5Cxa0%25')%0A smallbuff = BoundBuffer(b'%5Cx14')%0A assert datautils.unpack_varlong(smallbuff) == 20%0A assert datautils.unpack_varlong(largebuff) == 10000000000%0Apass%0A%0A%0Adef test_pack_varlong():%0A assert datautils.pack_varlong(20) == b'%5Cx14'%0A assert datautils.pack_varlong(10000000000) == b'%5Cx80%5Cxc8%5Cxaf%5Cxa0%25'%0A assert datautils.pack_varlong(10000000000000000000) is None%0A assert datautils.pack_varlong(-10000000000000000000) is None%0A
|
|
ce34c78c627addb20199a79f375073102050d9a8
|
fix following Barry's resolution of PETSc issue #139
|
c/ch5/plotTS.py
|
c/ch5/plotTS.py
|
#!/usr/bin/env python
help =\
'''
Plot trajectory, or frames if solution has two spatial dimensions, generated by
running a PETSc TS program. Reads output from
-ts_monitor binary:TDATA -ts_monitor_solution binary:UDATA
Requires access to bin/PetscBinaryIO.py and bin/petsc_conf.py, e.g. sym-links.
'''
import PetscBinaryIO
from sys import exit, stdout
from time import sleep
from argparse import ArgumentParser, RawTextHelpFormatter
import numpy as np
import matplotlib.pyplot as plt
parser = ArgumentParser(description=help,
formatter_class=RawTextHelpFormatter)
parser.add_argument('tfile',metavar='TDATA',
help='from -ts_monitor binary:TDATA')
parser.add_argument('ufile',metavar='UDATA',
help='from -ts_monitor_solution binary:UDATA')
parser.add_argument('-mx',metavar='MX', type=int, default=-1,
help='spatial grid with MX points in x direction')
parser.add_argument('-my',metavar='MY', type=int, default=-1,
help='spatial grid with MY points in y direction; =MX by default')
parser.add_argument('-dof',metavar='DOF', type=int, default=1,
help='degrees of freedom of solution; =1 by default')
parser.add_argument('-c',metavar='C', type=int, default=0,
help='component; =0,1,..,dof-1; ignored if dof=1)')
parser.add_argument('-o',metavar='FILE',dest='filename',
help='image file FILE (trajectory case)')
parser.add_argument('-oroot',metavar='ROOT',dest='rootname',
help='frame files ROOT000.png,ROOT001.png,... (movie case)')
args = parser.parse_args()
if args.mx > 0 and args.my < 1:
args.my = args.mx
frames = (args.mx > 0)
tfile = open(args.tfile,'r')
t = np.fromfile(tfile, dtype='>d')
tfile.close()
io = PetscBinaryIO.PetscBinaryIO()
U = np.array(io.readBinaryFile(args.ufile)).transpose()
dims = np.shape(U)
if len(t) != dims[1]:
print 'time dimension mismatch: %d != %d' % (len(t),dims[1])
exit(1)
if frames:
if args.dof == 1:
if dims[0] != args.mx * args.my:
print 'spatial dimension mismatch: %d != %d * %d (and dof=1)' % \
(dims[0],args.mx,args.my)
exit(2)
U = np.reshape(U,(args.my,args.mx,len(t)))
dims = np.shape(U)
print 'solution U is shape=(%d,%d,%d)' % tuple(dims)
else:
if dims[0] != args.mx * args.my * args.dof:
print 'spatial dimension mismatch: %d != %d * %d * %d' % \
(dims[0],args.mx,args.my,args.dof)
exit(3)
U = np.reshape(U,(args.my,args.mx,args.dof,len(t)))
dims = np.shape(U)
print 'solution U is shape=(%d,%d,%d,%d)' % tuple(dims)
print 'time t has length=%d, with mx x my = %d x %d frames' % (dims[-1],dims[1],dims[0])
else:
print 'time t has length=%d, solution Y is shape=(%d,%d)' % \
(len(t),dims[0],dims[1])
if frames:
print 'generating files %s000.png .. %s%03d.png:' % \
(args.rootname,args.rootname,len(t)-1)
if args.dof == 1:
plt.imshow(U[:,:,0])
else:
plt.imshow(U[:,:,args.c,0])
plt.title('t = %g' % t[0])
if args.rootname:
plt.savefig(args.rootname + "%03d.png" % 0)
else:
plt.ion()
plt.show()
for k in range(len(t)-1):
print '.',
stdout.flush()
if args.dof == 1:
plt.imshow(U[:,:,k+1])
else:
plt.imshow(U[:,:,args.c,k+1])
plt.title('t = %g' % t[k+1])
if args.rootname:
plt.savefig(args.rootname + "%03d.png" % (k+1))
else:
plt.pause(0.1)
print '.'
else:
for k in range(dims[0]):
plt.plot(t,U[k],label='y[%d]' % k)
plt.xlabel('t')
plt.legend()
if args.filename:
plt.savefig(args.filename)
else:
plt.show()
|
Python
| 0
|
@@ -1725,119 +1725,94 @@
0)%0A%0A
-tfile = open(args.tfile,'r')%0At = np.fromfile(tfile, dtype='%3Ed')%0Atfile.close()%0A%0Aio = PetscBinaryIO.PetscBinaryIO
+io = PetscBinaryIO.PetscBinaryIO()%0At = np.array(io.readBinaryFile(args.tfile)).flatten
()%0AU
|
5e3f99093dfe7392fcbbc0b39582e4b0d3a64511
|
Update DLA
|
models/dla.py
|
models/dla.py
|
'''DLA in PyTorch.
Reference:
Deep Layer Aggregation. https://arxiv.org/abs/1707.06484
'''
import torch
import torch.nn as nn
import torch.nn.functional as F
class BasicBlock(nn.Module):
expansion = 1
def __init__(self, in_planes, planes, stride=1):
super(BasicBlock, self).__init__()
self.conv1 = nn.Conv2d(
in_planes, planes, kernel_size=3, stride=stride, padding=1, bias=False)
self.bn1 = nn.BatchNorm2d(planes)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3,
stride=1, padding=1, bias=False)
self.bn2 = nn.BatchNorm2d(planes)
self.shortcut = nn.Sequential()
if stride != 1 or in_planes != self.expansion*planes:
self.shortcut = nn.Sequential(
nn.Conv2d(in_planes, self.expansion*planes,
kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(self.expansion*planes)
)
def forward(self, x):
out = F.relu(self.bn1(self.conv1(x)))
out = self.bn2(self.conv2(out))
out += self.shortcut(x)
out = F.relu(out)
return out
class Root(nn.Module):
def __init__(self, in_channels, out_channels, kernel_size=1):
super(Root, self).__init__()
self.conv = nn.Conv2d(
in_channels, out_channels, kernel_size,
stride=1, padding=(kernel_size - 1) // 2, bias=False)
self.bn = nn.BatchNorm2d(out_channels)
def forward(self, xs):
x = torch.cat(xs, 1)
out = F.relu(self.bn(self.conv(x)))
return out
class Tree(nn.Module):
def __init__(self, block, in_channels, out_channels, level=1, stride=1):
super(Tree, self).__init__()
self.level = level
self.root = Root((level+1)*out_channels, out_channels)
if level == 1:
self.left_node = block(in_channels, out_channels, stride=stride)
self.right_node = block(out_channels, out_channels, stride=1)
else:
for i in reversed(range(1, level)):
subtree = Tree(block, in_channels, out_channels,
level=i, stride=stride)
self.__setattr__('level_%d' % i, subtree)
self.left_node = block(out_channels, out_channels, stride=1)
self.right_node = block(out_channels, out_channels, stride=1)
def forward(self, x):
xs = []
for i in reversed(range(1, self.level)):
level_i = self.__getattr__('level_%d' % i)
x = level_i(x)
xs.append(x)
x = self.left_node(x)
xs.append(x)
x = self.right_node(x)
xs.append(x)
out = self.root(xs)
return out
class DLA(nn.Module):
def __init__(self, block=BasicBlock, num_classes=10):
super(DLA, self).__init__()
self.base = nn.Sequential(
nn.Conv2d(3, 16, kernel_size=3, stride=1, padding=1, bias=False),
nn.BatchNorm2d(16),
nn.ReLU(True)
)
self.layer1 = nn.Sequential(
nn.Conv2d(16, 16, kernel_size=3, stride=1, padding=1, bias=False),
nn.BatchNorm2d(16),
nn.ReLU(True)
)
self.layer2 = nn.Sequential(
nn.Conv2d(16, 32, kernel_size=3, stride=1, padding=1, bias=False),
nn.BatchNorm2d(32),
nn.ReLU(True)
)
self.layer3 = Tree(block, 32, 64, level=1, stride=1)
self.layer4 = Tree(block, 64, 128, level=2, stride=2)
self.layer5 = Tree(block, 128, 256, level=2, stride=2)
self.layer6 = Tree(block, 256, 512, level=1, stride=2)
self.linear = nn.Linear(512, num_classes)
def forward(self, x):
out = self.base(x)
out = self.layer1(out)
out = self.layer2(out)
out = self.layer3(out)
out = self.layer4(out)
out = self.layer5(out)
out = self.layer6(out)
out = F.avg_pool2d(out, 4)
out = out.view(out.size(0), -1)
out = self.linear(out)
return out
def test():
net = DLA()
print(net)
x = torch.randn(1, 3, 32, 32)
y = net(x)
print(y.size())
if __name__ == '__main__':
test()
|
Python
| 0.000001
|
@@ -1771,16 +1771,43 @@
= level%0A
+ if level == 1:%0A
@@ -1827,17 +1827,9 @@
oot(
-(level+1)
+2
*out
@@ -1853,39 +1853,16 @@
annels)%0A
- if level == 1:%0A
@@ -2018,16 +2018,83 @@
else:%0A
+ self.root = Root((level+2)*out_channels, out_channels)%0A
@@ -2311,16 +2311,93 @@
ubtree)%0A
+ self.prev_root = block(in_channels, out_channels, stride=stride)%0A
@@ -2554,32 +2554,32 @@
rward(self, x):%0A
-
xs = %5B%5D%0A
@@ -2574,16 +2574,59 @@
xs =
+ %5Bself.prev_root(x)%5D if self.level %3E 1 else
%5B%5D%0A
|
71cdeb48fd0924680a74261fc59950f3b9878426
|
Fix try job status PRESUBMIT check when an issue is private.
|
PRESUBMIT.py
|
PRESUBMIT.py
|
#!/usr/bin/python
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Top-level presubmit script for Chromium.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for details on the presubmit API built into gcl.
"""
EXCLUDED_PATHS = (
r"breakpad[\\\/].*",
r"skia[\\\/].*",
r"v8[\\\/].*",
)
def CheckChangeOnUpload(input_api, output_api):
results = []
# What does this code do?
# It loads the default black list (e.g. third_party, experimental, etc) and
# add our black list (breakpad, skia and v8 are still not following
# google style and are not really living this repository).
# See presubmit_support.py InputApi.FilterSourceFile for the (simple) usage.
black_list = input_api.DEFAULT_BLACK_LIST + EXCLUDED_PATHS
sources = lambda x: input_api.FilterSourceFile(x, black_list=black_list)
results.extend(input_api.canned_checks.CheckLongLines(
input_api, output_api, sources))
results.extend(input_api.canned_checks.CheckChangeHasNoTabs(
input_api, output_api, sources))
results.extend(input_api.canned_checks.CheckChangeHasNoStrayWhitespace(
input_api, output_api, sources))
results.extend(input_api.canned_checks.CheckChangeHasBugField(
input_api, output_api))
results.extend(input_api.canned_checks.CheckChangeHasTestField(
input_api, output_api))
results.extend(input_api.canned_checks.CheckChangeSvnEolStyle(
input_api, output_api, sources))
results.extend(input_api.canned_checks.CheckSvnForCommonMimeTypes(
input_api, output_api))
return results
def CheckChangeOnCommit(input_api, output_api):
results = []
black_list = input_api.DEFAULT_BLACK_LIST + EXCLUDED_PATHS
sources = lambda x: input_api.FilterSourceFile(x, black_list=black_list)
results.extend(input_api.canned_checks.CheckLongLines(
input_api, output_api, sources))
results.extend(input_api.canned_checks.CheckChangeHasNoTabs(
input_api, output_api, sources))
results.extend(input_api.canned_checks.CheckChangeHasNoStrayWhitespace(
input_api, output_api, sources))
results.extend(input_api.canned_checks.CheckChangeHasBugField(
input_api, output_api))
results.extend(input_api.canned_checks.CheckChangeHasTestField(
input_api, output_api))
results.extend(input_api.canned_checks.CheckChangeSvnEolStyle(
input_api, output_api, sources))
results.extend(input_api.canned_checks.CheckSvnForCommonMimeTypes(
input_api, output_api))
results.extend(input_api.canned_checks.CheckSvnModifiedDirectories(
input_api, output_api, sources))
# Make sure the tree is 'open'.
# TODO(maruel): Run it in a separate thread to parallelize checks?
results.extend(CheckTreeIsOpen(
input_api,
output_api,
'http://chromium-status.appspot.com/status',
'0',
'http://chromium-status.appspot.com/current?format=raw'))
results.extend(CheckTryJobExecution(input_api, output_api))
return results
def CheckTryJobExecution(input_api, output_api):
outputs = []
if not input_api.change.issue or not input_api.change.patchset:
return outputs
url = "http://codereview.chromium.org/%d/get_build_results/%d" % (
input_api.change.issue, input_api.change.patchset)
PLATFORMS = ('win', 'linux', 'mac')
try:
connection = input_api.urllib2.urlopen(url)
# platform|status|url
values = [item.split('|', 2) for item in connection.read().splitlines()]
connection.close()
# Reformat as an dict of platform: [status, url]
values = dict([[v[0], [v[1], v[2]]] for v in values])
for platform in PLATFORMS:
values.setdefault(platform, ['not started', ''])
message = None
non_success = [k.upper() for k,v in values.iteritems() if v[0] != 'success']
if 'failure' in [v[0] for v in values.itervalues()]:
message = 'Try job failures on %s!\n' % ', '.join(non_success)
elif non_success:
message = ('Unfinished (or not even started) try jobs on '
'%s.\n') % ', '.join(non_success)
if message:
message += (
'Is try server wrong or broken? Please notify maruel@chromium.org. '
'Thanks.\n')
outputs.append(output_api.PresubmitPromptWarning(message=message))
except input_api.urllib2.HTTPError, e:
if e.code == 404:
# Fallback to no try job.
# TODO(maruel): Change to a PresubmitPromptWarning once the try server is
# stable enough and it seems to work fine.
outputs.append(output_api.PresubmitNotifyResult(
'You should try the patch first.'))
else:
# Another HTTP error happened, warn the user.
# TODO(maruel): Change to a PresubmitPromptWarning once it deemed to work
# fine.
outputs.append(output_api.PresubmitNotifyResult(
'Got %s while looking for try job status.' % str(e)))
return outputs
def CheckTreeIsOpen(input_api, output_api, url, closed, url_text):
"""Similar to the one in presubmit_canned_checks except it shows an helpful
status text instead.
"""
assert(input_api.is_committing)
try:
connection = input_api.urllib2.urlopen(url)
status = connection.read()
connection.close()
if input_api.re.match(closed, status):
long_text = status + '\n' + url
try:
connection = input_api.urllib2.urlopen(url_text)
long_text = connection.read().strip()
connection.close()
except IOError:
pass
return [output_api.PresubmitError("The tree is closed.",
long_text=long_text)]
except IOError:
pass
return []
|
Python
| 0.000007
|
@@ -3568,24 +3568,126 @@
ion.close()%0A
+ if not values:%0A # It returned an empty list. Probably a private review.%0A return outputs%0A
# Reform
|
4e887718e44453f0f0cd65addc0284668b31bbd2
|
Disable session cache
|
src/clarityv2/conf/production.py
|
src/clarityv2/conf/production.py
|
from .base import *
import raven
#
# Standard Django settings.
#
DEBUG = False
ENVIRONMENT = 'production'
ADMINS = (
'Alex', 'khomenkodev17@gmail.com'
)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': os.getenv('DB_NAME'),
'USER': os.getenv('DB_USER'),
'PASSWORD': os.getenv('DB_PASSWORD'),
'HOST': '', # Empty for localhost through domain sockets or '127.0.0.1' for localhost through TCP.
'PORT': '', # Set to empty string for default.
}
}
# Make this unique, and don't share it with anybody.
SECRET_KEY = os.getenv('SECRET_KEY')
ALLOWED_HOSTS = ['claritydev.net', '188.166.1.116', '0.0.0.0']
# Redis cache backend
# NOTE: If you do not use a cache backend, do not use a session backend or
# cached template loaders that rely on a backend.
CACHES = {
"default": {
"BACKEND": "django_redis.cache.RedisCache",
"LOCATION": "redis://127.0.0.1:6379/2", # NOTE: watch out for multiple projects using the same cache!
"OPTIONS": {
"CLIENT_CLASS": "django_redis.client.DefaultClient",
"IGNORE_EXCEPTIONS": True,
}
}
}
# Caching sessions.
SESSION_ENGINE = 'django.contrib.sessions.backends.cache'
SESSION_CACHE_ALIAS = "default"
# Caching templates.
TEMPLATES[0]['OPTIONS']['loaders'] = [
('django.template.loaders.cached.Loader', RAW_TEMPLATE_LOADERS),
]
# The file storage engine to use when collecting static files with the
# collectstatic management command.
# STATICFILES_STORAGE = 'django.contrib.staticfiles.storage.ManifestStaticFilesStorage'
# Production logging facility.
LOGGING['loggers'].update({
'': {
'handlers': ['sentry'],
'level': 'ERROR',
'propagate': False,
},
'clarityv2': {
'handlers': ['project'],
'level': 'WARNING',
'propagate': True,
},
'django': {
'handlers': ['django'],
'level': 'WARNING',
'propagate': True,
},
'django.security.DisallowedHost': {
'handlers': ['django'],
'level': 'CRITICAL',
'propagate': False,
},
})
#
# Custom settings
#
# Show active environment in admin.
SHOW_ALERT = False
# We will assume we're running under https
SESSION_COOKIE_SECURE = True
SESSION_COOKIE_HTTPONLY = True
CSRF_COOKIE_SECURE = False # TODO enable after SSL is setup
X_FRAME_OPTIONS = 'DENY'
# Only set this when we're behind Nginx as configured in our example-deployment
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
#
# Library settings
#
# Raven
INSTALLED_APPS = INSTALLED_APPS + [
'raven.contrib.django.raven_compat',
]
# RAVEN_CONFIG = {
# 'dsn': 'https://',
# 'release': raven.fetch_git_sha(os.path.dirname(os.pardir)),
# }
LOGGING['handlers'].update({
'sentry': {
'level': 'WARNING',
'class': 'raven.handlers.logging.SentryHandler',
# 'dsn': RAVEN_CONFIG['dsn']
},
})
|
Python
| 0.000001
|
@@ -686,16 +686,18 @@
.0.0'%5D%0A%0A
+#
# Redis
@@ -711,16 +711,18 @@
ackend%0A#
+ #
NOTE: I
@@ -788,16 +788,18 @@
end or%0A#
+ #
cached
@@ -839,16 +839,18 @@
ackend.%0A
+#
CACHES =
@@ -852,20 +852,22 @@
HES = %7B%0A
+#
+
%22default
@@ -867,24 +867,26 @@
default%22: %7B%0A
+#
%22BAC
@@ -925,16 +925,18 @@
Cache%22,%0A
+#
@@ -1038,16 +1038,18 @@
cache!%0A
+#
@@ -1057,24 +1057,25 @@
OPTIONS%22: %7B%0A
+#
@@ -1074,16 +1074,17 @@
+
%22CLIENT_
@@ -1128,16 +1128,18 @@
lient%22,%0A
+#
@@ -1169,24 +1169,25 @@
: True,%0A
+#
%7D%0A %7D%0A
@@ -1182,19 +1182,27 @@
+
%7D%0A
+#
%7D%0A
-%7D%0A%0A
+# %7D%0A#%0A#
# Ca
@@ -1217,16 +1217,18 @@
ssions.%0A
+#
SESSION_
@@ -1277,16 +1277,18 @@
.cache'%0A
+#
SESSION_
|
ca885203ab82026ca21a200c1bee5ad3c0a82cb5
|
Change default interval
|
src/cmsplugin_carousel/models.py
|
src/cmsplugin_carousel/models.py
|
from adminsortable.models import SortableMixin
from cms.models import CMSPlugin
from cms.models.fields import PageField
from django.db import models
from django.utils.translation import ugettext_lazy as _
from filer.fields.image import FilerImageField
class CarouselPlugin(CMSPlugin):
interval = models.PositiveIntegerField(_('Interval'), default=1)
title = models.CharField(_('Title'), max_length=255, default='', blank=True)
def __str__(self):
return self.title or str(self.pk)
def copy_relations(self, oldinstance):
super(CarouselPlugin, self).copy_relations(oldinstance)
for picture in oldinstance.pictures.all().iterator():
picture.pk = None
picture.plugin = self
picture.save()
class CarouselPicture(SortableMixin):
plugin = models.ForeignKey(CarouselPlugin, related_name='pictures')
image = FilerImageField(verbose_name=_('Image'), related_name='+')
alt_tag = models.CharField(_('Alt tag'), max_length=255, blank=True)
text = models.TextField(verbose_name=_('Text over image'), blank=True)
url = models.CharField(verbose_name=_('URL'), blank=True, null=True, max_length=500)
page = PageField(verbose_name=_("Page"), blank=True, null=True)
open_in_tab = models.BooleanField(verbose_name=_('Open in new window'))
ordering = models.PositiveIntegerField(default=0, editable=False, db_index=True)
class Meta:
ordering = ['ordering', ]
def link(self):
if self.page is not None:
return self.page
else:
return self.url
def __str__(self):
return self.alt_tag
|
Python
| 0.000001
|
@@ -350,9 +350,9 @@
ult=
-1
+5
)%0A
|
401b442aef25d17dafb46f6d3ecd96a7ed1b1e13
|
Add back doctest, with execution suppressed
|
itertoolz/core.py
|
itertoolz/core.py
|
import itertools
identity = lambda x: x
def remove(predicate, coll):
""" Return those items of collection for which predicate(item) is true.
>>> from itertoolz import remove
>>> def even(x):
... return x % 2 == 0
>>> remove(even, [1, 2, 3, 4])
[1, 3]
"""
return filter(lambda x: not predicate(x), coll)
def groupby(f, coll):
""" Group a collection by a key function
>>> from itertoolz import groupby
>>> names = ['Alice', 'Bob', 'Charlie', 'Dan', 'Edith', 'Frank']
>>> groupby(len, names)
{3: ['Bob', 'Dan'], 5: ['Alice', 'Edith', 'Frank'], 7: ['Charlie']}
"""
d = dict()
for item in coll:
key = f(item)
if key not in d:
d[key] = []
d[key].append(item)
return d
from Queue import PriorityQueue
def merge_sorted(*iters, **kwargs):
""" Merge and sort a collection of sorted collections
>>> from itertoolz import merge_sorted
>>> list(merge_sorted([1, 3, 5], [2, 4, 6]))
[1, 2, 3, 4, 5, 6]
>>> ''.join(merge_sorted('abc', 'abc', 'abc'))
'aaabbbccc'
"""
key = kwargs.get('key', identity)
iters = map(iter, iters)
pq = PriorityQueue()
def inject_first_element(it):
try:
item = next(it)
pq.put((key(item), item, it))
except StopIteration:
pass
# Initial population
for it in iters:
inject_first_element(it)
# Repeatedly yield and then repopulate from the same iterator
while not pq.empty():
_, item, it = pq.get()
yield item
inject_first_element(it)
def merge_dict(*dicts):
""" Merge a collection of dictionaries
>>> from itertoolz import merge_dict
>>> merge_dict({1: 'one'}, {2: 'two'})
{1: 'one', 2: 'two'}
Later dictionaries have precedence
>>> merge_dict({1: 2, 3: 4}, {3: 3, 4: 4})
{1: 2, 3: 3, 4: 4}
"""
rv = dict()
for d in dicts:
rv.update(d)
return rv
def interleave(seqs, pass_exceptions=()):
""" Interleave a sequence of sequences
>>> from itertoolz import interleave
>>> list(interleave([[1, 2], [3, 4]]))
[1, 3, 2, 4]
>>> ''.join(interleave(('ABC', 'XY')))
'AXBYC'
Both the individual sequences and the sequence of sequences may be infinite
Returns a lazy iterator
"""
iters = itertools.imap(iter, seqs)
while iters:
newiters = []
for itr in iters:
try:
yield next(itr)
newiters.append(itr)
except (StopIteration,) + tuple(pass_exceptions):
pass
iters = newiters
def unique(seq, key=identity):
""" Return only unique elements of a sequence
>>> from itertoolz import unique
>>> tuple(unique((1, 2, 3)))
(1, 2, 3)
>>> tuple(unique((1, 2, 1, 3)))
(1, 2, 3)
Uniqueness can be defined by key keyword
>>> def mod_10(x):
... return x % 10
>>> tuple(unique((5, 10, 15, 18, 20, 38), key=mod_10))
(5, 10, 18)
"""
seen = set()
for item in seq:
tag = key(item)
if tag not in seen:
seen.add(tag)
yield item
def intersection(*seqs):
""" Lazily evaluated intersection of sequences
>>> from itertoolz import intersection
>>> list(intersection([1, 2, 3], [2, 3, 4]))
[2, 3]
"""
return (item for item in seqs[0]
if all(item in seq for seq in seqs[1:]))
def iterable(x):
""" Is x iterable?
>>> from itertoolz import iterable
>>> iterable([1, 2, 3])
True
>>> iterable('abc')
True
>>> iterable(5)
False
"""
try:
iter(x)
return True
except TypeError:
return False
def distinct(seq):
""" All values in sequence are distinct
>>> from itertoolz import distinct
>>> distinct([1, 2, 3])
True
>>> distinct([1, 2, 1])
False
>>> distinct("Hello")
False
>>> distinct("World")
True
"""
return len(seq) == len(set(seq))
def frequencies(seq):
""" Find number of occurrences of each value in seq
"""
identity = lambda x: x
return dict([(k, len(v)) for k, v in groupby(identity, seq).items()])
|
Python
| 0.000001
|
@@ -4108,16 +4108,131 @@
e in seq
+%0A%0A %3E%3E%3E frequencies(%5B'cat', 'cat', 'ox', 'pig', 'pig', 'cat'%5D) #doctest: +SKIP%0A %7B'cat': 3, 'ox': 1, 'pig': 2%7D
%0A %22%22%22
|
4ce4938a03bcd71c3eb545d8caa9798d50bd99e5
|
fix bug when deal with 'in_app' format
|
itunesiap/core.py
|
itunesiap/core.py
|
import json
import requests
import contextlib
from six import u
from . import exceptions
RECEIPT_PRODUCTION_VALIDATION_URL = "https://buy.itunes.apple.com/verifyReceipt"
RECEIPT_SANDBOX_VALIDATION_URL = "https://sandbox.itunes.apple.com/verifyReceipt"
USE_PRODUCTION = True
USE_SANDBOX = False
def config_from_mode(mode):
if mode not in ('production', 'sandbox', 'review', 'reject'):
raise exceptions.ModeNotAvailable(mode)
production = mode in ('production', 'review')
sandbox = mode in ('sandbox', 'review')
return production, sandbox
def set_verification_mode(mode):
"""Set global verification mode that where allows production or sandbox.
`production`, `sandbox`, `review` or `reject` availble. Or raise
an exception.
`production`: Allows production receipts only. Default.
`sandbox`: Allows sandbox receipts only.
`review`: Allows production receipts but use sandbox as fallback.
`reject`: Reject all receipts.
"""
global USE_PRODUCTION, USE_SANDBOX
USE_PRODUCTION, USE_SANDBOX = config_from_mode(mode)
class Request(object):
"""Validation request with raw receipt. Receipt must be base64 encoded string.
Use `verify` method to try verification and get Receipt or exception.
"""
def __init__(self, receipt, password='', **kwargs):
self.receipt = receipt
self.password = password
self.use_production = kwargs.get('use_production', USE_PRODUCTION)
self.use_sandbox = kwargs.get('use_sandbox', USE_SANDBOX)
self.response = None
self.result = None
def __repr__(self):
valid = None
if self.result:
valid = self.result['status'] == 0
return u'<Request(valid:{0}, data:{1}...)>'.format(valid, self.receipt[:20])
def verify_from(self, url):
"""Try verification from given url."""
#If the password exists from kwargs, pass it up with the request, otherwise leave it alone
print json.dumps({'receipt-data': self.receipt, 'password': self.password})
if len(self.password) > 1:
self.response = requests.post(url, json.dumps({'receipt-data': self.receipt, 'password': self.password}), verify=False)
else:
self.response = requests.post(url, json.dumps({'receipt-data': self.receipt}), verify=False)
if self.response.status_code != 200:
raise exceptions.ItunesServerNotAvailable(self.response.status_code, self.response.content)
self.result = self._extract_receipt(json.loads(self.response.content))
status = self.result['status']
if status != 0:
raise exceptions.InvalidReceipt(status, receipt=self.result.get('receipt', None))
return self.result
def _extract_receipt(self, receipt_data):
"""There are two formats that itunes iap purchase receipts are
sent back in
"""
in_app_purchase = receipt_data.get('in_app', [])
if len(in_app_purchase) > 0:
receipt_data.update(in_app_purchase[0])
return receipt_data
def validate(self):
return self.verify()
def verify(self):
"""Try verification with settings. Returns a Receipt object if successed.
Or raise an exception. See `self.response` or `self.result` to see details.
"""
receipt = None
if self.use_production:
try:
receipt = self.verify_from(RECEIPT_PRODUCTION_VALIDATION_URL)
except exceptions.InvalidReceipt as e:
pass
if not receipt and self.use_sandbox:
try:
receipt = self.verify_from(RECEIPT_SANDBOX_VALIDATION_URL)
except exceptions.InvalidReceipt as ee:
if not self.use_production:
e = ee
if not receipt:
raise e # raise original error
return Receipt(receipt)
@contextlib.contextmanager
def verification_mode(self, mode):
configs = self.use_production, self.use_sandbox
self.use_production, self.use_sandbox = config_from_mode(mode)
yield
self.use_production, self.use_sandbox = configs
class Receipt(object):
"""Pretty interface for decoded receipt obejct.
"""
def __init__(self, data):
self.data = data
self.receipt = data['receipt']
self.receipt_keys = list(self.receipt.keys())
def __repr__(self):
return u'<Receipt({0}, {1})>'.format(self.status, self.receipt)
@property
def status(self):
return self.data['status']
@property
def latest_receipt(self):
return self.data['latest_receipt']
def __getattr__(self, key):
if key in self.receipt_keys:
return self.receipt[key]
try:
return super(Receipt, self).__getattr__(key)
except AttributeError:
return super(Receipt, self).__getattribute__(key)
|
Python
| 0.000001
|
@@ -2931,16 +2931,27 @@
ipt_data
+%5B'receipt'%5D
.get('in
@@ -3022,16 +3022,27 @@
ipt_data
+%5B'receipt'%5D
.update(
|
25508fef8d2632835bf29e22a39ef1d70b615f62
|
make PooledConnection more robust to other kinds of exceptions
|
connection.py
|
connection.py
|
import threading
from Queue import Queue
from thrift import Thrift
from thrift.transport import TTransport
from thrift.transport import TSocket
from thrift.protocol import TBinaryProtocol
from cassandra import Cassandra
__all__ = ['connect', 'connect_thread_local', 'connect_pooled']
DEFAULT_SERVER = 'localhost:9160'
def create_client_transport(server):
host, port = server.split(":")
socket = TSocket.TSocket(host, int(port))
transport = TTransport.TBufferedTransport(socket)
protocol = TBinaryProtocol.TBinaryProtocolAccelerated(transport)
client = Cassandra.Client(protocol)
transport.open()
return client, transport
def connect(server=DEFAULT_SERVER):
"""
Construct a Cassandra connection
Parameters
----------
server : str
Cassandra server with format: "hostname:port"
Default: 'localhost:9160'
Returns
-------
Cassandra client
"""
return create_client_transport(server)[0]
def connect_pooled(servers=None):
"""
Construct a pooled queue of Cassandra connections, given by the servers list
Parameters
----------
servers: [server]
List of Cassandra servers with format: "hostname:port"
Create duplicate server entries if you want multiple connections
to the same server.
Default: 5 * ['localhost:9160']
(5 connections to the server at localhost)
Returns
-------
Cassandra client
"""
if servers is None:
servers = 5 * [DEFAULT_SERVER]
return PooledConnection(servers)
class PooledConnection(object):
def __init__(self, servers):
self.queue = Queue()
for server in servers:
self.queue.put((server, None, None))
def __getattr__(self, attr):
def client_call(*args, **kwargs):
server, client, transport = self.queue.get()
try:
if client is None:
client, transport = create_client_transport(server)
ret = getattr(client, attr)(*args, **kwargs)
self.queue.put((server, client, transport))
return ret
except Thrift.TException, exc:
# Connection error, try a new server next time
transport.close()
self.queue.put((server, None, None))
raise exc
setattr(self, attr, client_call)
return getattr(self, attr)
def connect_thread_local(servers=None):
"""
Construct a Cassandra connection for each thread
Parameters
----------
servers: [server]
List of Cassandra servers with format: "hostname:port"
Default: ['localhost:9160']
Returns
-------
Cassandra client
"""
if servers is None:
servers = [DEFAULT_SERVER]
return ThreadLocalConnection(servers)
class ThreadLocalConnection(object):
def __init__(self, servers):
self.queue = Queue()
for server in servers:
self.queue.put(server)
self.local = threading.local()
def __getattr__(self, attr):
def client_call(*args, **kwargs):
if getattr(self.local, 'client', None) is None:
server = self.queue.get()
self.queue.put(server)
self.local.client, self.local.transport = create_client_transport(server)
try:
return getattr(self.local.client, attr)(*args, **kwargs)
except Thrift.TException, exc:
# Connection error, try a new server next time
self.local.transport.close()
self.local.client = None
raise exc
setattr(self, attr, client_call)
return getattr(self, attr)
|
Python
| 0
|
@@ -2040,18 +2040,19 @@
ret
- =
+urn
getattr
@@ -2087,95 +2087,8 @@
gs)%0A
- self.queue.put((server, client, transport))%0A return ret%0A
@@ -2239,39 +2239,35 @@
-self.queue.put((server,
+client, transport =
None, N
@@ -2269,18 +2269,16 @@
ne, None
-))
%0A
@@ -2287,32 +2287,113 @@
raise exc
+%0A finally:%0A self.queue.put((server, client, transport))
%0A%0A setatt
|
d8077e7de68d2059ba338b650cfd1904686af754
|
fix problem in thread-local connections where it was reconnecting every function call
|
connection.py
|
connection.py
|
from exceptions import Exception
import threading
from Queue import Queue
from thrift import Thrift
from thrift.transport import TTransport
from thrift.transport import TSocket
from thrift.protocol import TBinaryProtocol
from cassandra import Cassandra
__all__ = ['connect', 'connect_thread_local', 'NoServerAvailable']
DEFAULT_SERVER = 'localhost:9160'
class NoServerAvailable(Exception):
pass
def create_client_transport(server):
host, port = server.split(":")
socket = TSocket.TSocket(host, int(port))
transport = TTransport.TBufferedTransport(socket)
protocol = TBinaryProtocol.TBinaryProtocolAccelerated(transport)
client = Cassandra.Client(protocol)
transport.open()
return client, transport
def connect(servers=None):
"""
Constructs a single Cassandra connection. Initially connects to the first
server on the list.
If the connection fails, it will attempt to connect to each server on the
list in turn until one succeeds. If it is unable to find an active server,
it will throw a NoServerAvailable exception.
Parameters
----------
servers : [server]
List of Cassandra servers with format: "hostname:port"
Default: ['localhost:9160']
Returns
-------
Cassandra client
"""
if servers is None:
servers = [DEFAULT_SERVER]
return SingleConnection(servers)
def connect_thread_local(servers=None, round_robin=True):
"""
Constructs a Cassandra connection for each thread. By default, it attempts
to connect in a round_robin (load-balancing) fashion. Turn it off by
setting round_robin=False
If the connection fails, it will attempt to connect to each server on the
list in turn until one succeeds. If it is unable to find an active server,
it will throw a NoServerAvailable exception.
Parameters
----------
servers : [server]
List of Cassandra servers with format: "hostname:port"
Default: ['localhost:9160']
round_robin : bool
Balance the connections. Set to False to connect to each server
in turn.
Returns
-------
Cassandra client
"""
if servers is None:
servers = [DEFAULT_SERVER]
return ThreadLocalConnection(servers, round_robin)
class SingleConnection(object):
def __init__(self, servers):
self._servers = servers
self._client = None
def __getattr__(self, attr):
def client_call(*args, **kwargs):
if self._client is None:
self._find_server()
try:
return getattr(self._client, attr)(*args, **kwargs)
except Thrift.TException as exc:
# Connection error, try to connect to all the servers
self._transport.close()
self._client = None
self._find_server()
setattr(self, attr, client_call)
return getattr(self, attr)
def _find_server(self):
for server in self._servers:
try:
self._client, self._transport = create_client_transport(server)
return
except Thrift.TException as exc:
continue
raise NoServerAvailable()
class ThreadLocalConnection(object):
def __init__(self, servers, round_robin):
self._servers = servers
self._queue = Queue()
for i in xrange(len(servers)):
self._queue.put(i)
self._local = threading.local()
self._round_robin = round_robin
def __getattr__(self, attr):
def client_call(*args, **kwargs):
if getattr(self.local, 'client', None) is None:
self._find_server()
try:
return getattr(self._local.client, attr)(*args, **kwargs)
except Thrift.TException as exc:
# Connection error, try to connect to all the servers
self._local.transport.close()
self._local.client = None
self._find_server()
setattr(self, attr, client_call)
return getattr(self, attr)
def _find_server(self):
servers = self._servers
if self._round_robin:
i = self._queue.get()
self._queue.put(i)
servers = servers[i:]+servers[:i]
for server in servers:
try:
self._local.client, self._local.transport = create_client_transport(server)
return
except Thrift.TException as exc:
continue
raise NoServerAvailable()
|
Python
| 0.000001
|
@@ -3665,16 +3665,17 @@
tr(self.
+_
local, '
|
ade18daea0046d4366e29a8000713c16edcde2c3
|
Replace generate_timeseries method by a more general generate method in ChartController
|
chartflo/factory.py
|
chartflo/factory.py
|
# -*- coding: utf-8 -*-
import pandas as pd
from altair import Chart, X, Y
from blessings import Terminal
from .models import Chart as ChartFlo
COLOR = Terminal()
OK = "[" + COLOR.bold_green("ok") + "] "
class ChartController():
"""
Charts builder: handles serialization into Vega Lite format
"""
def serialize_count(self, dataset, xfield, yfield, chart_type="bar",
width=800, height=300, color=None, size=None):
"""
Serialize a chart from a count dataset:
Ex: {"users":200, "groups":30}
"""
x = []
y = []
xfieldtype = xfield[1]
yfieldtype = yfield[1]
for datapoint in dataset:
x.append(datapoint)
y.append(dataset[datapoint])
df = pd.DataFrame({xfield[0]: x, yfield[0]: y})
xencode, yencode = self._encode_fields(xfieldtype, yfieldtype)
chart = self._chart_class(df, chart_type).encode(
x=xencode,
y=yencode,
color=color,
size=size
).configure_cell(
width=width,
height=height
)
return chart
def serialize_timeseries(self, query, xfield, yfield, time_unit,
chart_type="line", width=800,
height=300, color=None, size=None):
"""
Serialize a timeseries chart from a query
"""
xfieldname = xfield[0]
xfieldtype = xfield[1]
dates = []
vals = []
yfieldname = yfield[0]
yfieldtype = yfield[1]
for row in query:
# date
has_date = False
d = getattr(row, xfieldname)
if d is not None:
dstr = d.strftime("%Y-%m-%d %H:%M:%S")
dates.append(dstr)
has_date = True
if has_date is True:
v = getattr(row, yfieldname)
vals.append(v)
df = pd.DataFrame({xfieldname: dates, yfieldname: vals})
# print(df)
xencode, yencode = self._encode_fields(
xfieldtype, yfieldtype, time_unit)
if chart_type != "tick":
chart = self._chart_class(df, chart_type).encode(
x=xencode,
y=yencode,
color=color,
size=size,
).configure_cell(
width=width,
height=height
)
else:
chart = self._chart_class(df, chart_type).encode(
x=xencode,
color=color,
size=size,
).configure_cell(
width=width,
height=height
).configure_scale(
bandSize=30
)
return chart
def count(self, query, field=None, func=None):
"""
Count values for a query doing custom checks on fields
"""
pack = {}
if field is not None:
pack = {field: func}
return self._count_for_query(query, pack)
def generate_timeseries(self, slug, name, chart_type,
query, x, y, width, height,
time_unit, verbose=False):
"""
Generates a timeseries chart from a query
"""
global OK, COLOR
if verbose is True:
print("Serializing", slug, "chart...")
chart = ChartController()
dataset = chart.serialize_timeseries(
query, x, y, time_unit=time_unit, chart_type=chart_type,
width=width, height=height
)
chart, _ = ChartFlo.objects.get_or_create(slug=slug)
chart.generate(chart, slug, name, dataset)
if verbose is True:
print(OK + "Chart", COLOR.bold(slug), "saved")
def _chart_class(self, df, chart_type):
"""
Get the right chart class from a string
"""
if chart_type == "bar":
return Chart(df).mark_bar()
elif chart_type == "circle":
return Chart(df).mark_circle()
elif chart_type == "line":
return Chart(df).mark_line()
elif chart_type == "point":
return Chart(df).mark_point()
elif chart_type == "area":
return Chart(df).mark_area()
elif chart_type == "tick":
return Chart(df).mark_tick()
elif chart_type == "text":
return Chart(df).mark_text()
elif chart_type == "square":
return Chart(df).mark_square()
elif chart_type == "rule":
return Chart(df).mark_rule()
return None
def _encode_fields(self, xfieldtype, yfieldtype, time_unit=None):
"""
Encode the fields in Altair format
"""
if time_unit is not None:
xencode = X(xfieldtype, timeUnit=time_unit)
else:
xencode = X(xfieldtype)
yencode = Y(yfieldtype)
return xencode, yencode
def _count_for_query(self, query, fieldchecks):
"""
Do custom checks on fields and returns a count
"""
counter = 0
for obj in query:
commit = True
if len(fieldchecks.keys()) > 0:
for fieldname in fieldchecks.keys():
fieldval = str(getattr(obj, fieldname))
func = fieldchecks[fieldname]
if func is not None:
if func(fieldval) is False:
commit = False
if commit is True:
counter += 1
return counter
|
Python
| 0.000004
|
@@ -2125,32 +2125,60 @@
ype, time_unit)%0A
+ print(%22COL%22, color)%0A
if chart
@@ -3098,27 +3098,16 @@
generate
-_timeseries
(self, s
@@ -3124,30 +3124,38 @@
chart_type,
-%0A
+ query, x, y,%0A
@@ -3163,53 +3163,51 @@
- query, x, y, width, height,%0A
+width, height, time_unit=None, color=None,%0A
@@ -3219,25 +3219,25 @@
-time_unit
+size=None
, verbos
@@ -3276,34 +3276,42 @@
nerates
-a timeseries chart
+or update a chart instance
from a
@@ -3462,24 +3462,62 @@
ontroller()%0A
+ if time_unit is not None:%0A
data
@@ -3562,16 +3562,20 @@
+
query, x
@@ -3627,32 +3627,36 @@
pe,%0A
+
width=width, hei
@@ -3657,33 +3657,253 @@
h, height=height
-%0A
+, size=size, color=color%0A )%0A else:%0A dataset = chart.serialize_count(%0A query, x, y, chart_type=chart_type,%0A width=width, height=height, size=size, color=color%0A
)%0A
|
a6b084f3888f26aa025634b436f4573c8ffcf177
|
Change is_active to is_public, reflecting updated usage
|
mixmind/models.py
|
mixmind/models.py
|
# -*- coding: utf-8 -*-
from sqlalchemy.orm import relationship, backref
from sqlalchemy import Boolean, DateTime, Column, Integer, String, ForeignKey, Enum, Float, Text, Unicode
import pendulum
from flask_security import UserMixin, RoleMixin
from . import db
from .util import VALID_UNITS
class RolesUsers(db.Model):
id = Column(Integer(), primary_key=True)
user_id = Column('user_id', Integer(), ForeignKey('user.id'))
role_id = Column('role_id', Integer(), ForeignKey('role.id'))
class Role(db.Model, RoleMixin):
id = Column(Integer(), primary_key=True)
name = Column(String(80), unique=True)
description = Column(String(255))
class User(db.Model, UserMixin):
id = Column(Integer, primary_key=True)
# flask-security attributes
email = Column(Unicode(length=127), unique=True)
first_name = Column(Unicode(length=127))
last_name = Column(Unicode(length=127))
nickname = Column(Unicode(length=127))
password = Column(Unicode(length=127))
last_login_at = Column(DateTime())
current_login_at = Column(DateTime())
last_login_ip = Column(String(63))
current_login_ip = Column(String(63))
login_count = Column(Integer())
active = Column(Boolean())
confirmed_at = Column(DateTime())
# other attributes
current_bar_id = Column(Integer())
roles = relationship('Role', secondary='roles_users', backref=backref('users', lazy='dynamic')) # many to many
orders = relationship('Order', back_populates="user", foreign_keys='Order.user_id')# primaryjoin="User.id==Order.user_id") # one to many
orders_served = relationship('Order', back_populates="bartender", foreign_keys='Order.bartender_id')#primaryjoin="User.id==Order.bartender_id") # one to many (for bartenders)
works_at = relationship('Bar', secondary='bartenders', backref=backref('bartenders', lazy='dynamic')) # many to many
owns = relationship('Bar', back_populates="owner", foreign_keys='Bar.owner_id') # one to many
venmo_id = Column(String(63)) # venmo id as a string
def get_name(self, short=False):
if short:
if self.nickname:
return unicode(self.nickname)
else:
return unicode(self.first_name)
return u'{} {}'.format(self.first_name, self.last_name)
def get_name_with_email(self):
return u'{} ({})'.format(self.get_name(short=True), self.email)
def get_role_names(self):
return u', '.join([role.name for role in self.roles])
def get_bar_names(self):
return u', '.join([bar.cname for bar in self.works_at])
class Order(db.Model):
id = Column(Integer, primary_key=True)
bar_id = Column(Integer, ForeignKey('bar.id'))
user_id = Column(Integer, ForeignKey('user.id'))
bartender_id = Column(Integer, ForeignKey('user.id'))
user = relationship('User', back_populates="orders", foreign_keys=[user_id])
bartender = relationship('User', back_populates="orders_served", foreign_keys=[bartender_id])
timestamp = Column(DateTime())
confirmed = Column(DateTime())
recipe_name = Column(Unicode(length=127))
recipe_html = Column(Text())
def where(self):
bar = Bar.query.filter_by(id=self.bar_id).one_or_none()
if bar:
return bar.name
def time_to_confirm(self):
if not self.confirmed:
return "N/A"
diff = pendulum.instance(self.confirmed) - pendulum.instance(self.timestamp)
return "{} minutes, {} seconds".format(diff.minutes, diff.remaining_seconds)
class Bar(db.Model):
id = Column(Integer(), primary_key=True)
cname = Column(Unicode(length=63), unique=True) # unique name for finding the bar
name = Column(Unicode(length=63))
tagline = Column(Unicode(length=255), default=u"Tips – always appreciated, never required")
is_active = Column(Boolean(), default=False)
is_default = Column(Boolean(), default=False) # the current default bar
bartender_on_duty = Column(Integer(), ForeignKey('user.id'))
owner_id = Column(Integer(), ForeignKey('user.id'))
owner = relationship('User', back_populates="owns", foreign_keys=[owner_id])
ingredients = relationship('Ingredient') # one to many
orders = relationship('Order') # one to many
# browse display settings
markup = Column(Float(), default=1.10)
prices = Column(Boolean(), default=True)
stats = Column(Boolean(), default=False)
examples = Column(Boolean(), default=False)
convert = Column(Enum(*(['']+VALID_UNITS)), default='oz')
prep_line = Column(Boolean(), default=False)
origin = Column(Boolean(), default=False)
info = Column(Boolean(), default=True)
variants = Column(Boolean(), default=False)
summarize = Column(Boolean(), default=True)
class Bartenders(db.Model):
id = Column(Integer(), primary_key=True)
user_id = Column(Integer(), ForeignKey('user.id'))
bar_id = Column(Integer(), ForeignKey('bar.id'))
|
Python
| 0
|
@@ -3826,22 +3826,22 @@
%0A is_
-active
+public
= Colum
@@ -3857,30 +3857,59 @@
(), default=
-False)
+True) # visible to public customers
%0A is_defa
|
4d2c37fc9f5e73996226888e4e1ffe9b8ca1f190
|
Fix wrong func call
|
tc-coalesce/listener.py
|
tc-coalesce/listener.py
|
import traceback
import sys
import os
import json
import socket
import logging
import redis
import signal
from urlparse import urlparse
from stats import Stats
from coalescer import CoalescingMachine
from mozillapulse.config import PulseConfiguration
from mozillapulse.consumers import GenericConsumer
class StateError(Exception):
pass
log = None
class Options(object):
options = {}
def __init__(self):
self._parse_env()
self._parse_args()
def _parse_env(self):
try:
self.options['user'] = os.environ['PULSE_USER']
self.options['passwd'] = os.environ['PULSE_PASSWD']
except KeyError:
traceback.print_exc()
sys.exit(1)
try:
self.options['redis'] = urlparse(os.environ['REDIS_URL'])
except KeyError:
traceback.print_exc()
sys.exit(1)
def _parse_args(self):
# TODO: parse args and return them as options
pass
class TcPulseConsumer(GenericConsumer):
def __init__(self, exchanges, **kwargs):
super(TcPulseConsumer, self).__init__(
PulseConfiguration(**kwargs), exchanges, **kwargs)
def listen(self, callback=None, on_connect_callback=None):
while True:
consumer = self._build_consumer(
callback=callback,
on_connect_callback=on_connect_callback
)
with consumer:
self._drain_events_loop()
def _drain_events_loop(self):
while True:
try:
self.connection.drain_events(timeout=self.timeout)
except socket.timeout:
logging.warning("Timeout! Restarting pulse consumer.")
try:
self.disconnect()
except Exception:
logging.warning("Problem with disconnect().")
break
class TaskEventApp(object):
# ampq/pulse listener
listener = None
# State transitions
# pending --> running
# \-> exception
exchanges = ['exchange/taskcluster-queue/v1/task-pending',
'exchange/taskcluster-queue/v1/task-running',
'exchange/taskcluster-queue/v1/task-exception']
# TODO: move these to args and env options
# TODO: make perm coalescer service pulse creds
consumer_args = {
'applabel': 'releng-tc-coalesce',
'topic': ['#', '#', '#'],
'durable': True,
'user': 'public',
'password': 'public'
}
options = None
# Coalesing machine
coalescer = None
# pending_tasks is a dict where all pending tasks are kept along with the
# received amqp msg and taskdef. Tasks are added or removed based on msgs
# from task-pending, task-running, and task-exception
# { 'task_id':
# {'task_msg_body': body,
# 'coalesce_key': coalesce_key
# }
def __init__(self, redis_prefix, options, stats, datastore):
self.pf = redis_prefix
self.options = options
self.stats = stats
self.rds = datastore
self.coalescer = CoalescingMachine(redis_prefix,
datastore,
stats=stats)
route_key = "route." + redis_prefix + "#"
self.consumer_args['topic'] = [route_key] * len(self.exchanges)
self.consumer_args['user'] = self.options['user']
self.consumer_args['password'] = self.options['passwd']
log.info("Binding to queue with route key: %s" % (route_key))
self.listener = TcPulseConsumer(self.exchanges,
callback=self._route_callback_handler,
**self.consumer_args)
def run(self):
while True:
try:
self.listener.listen()
except KeyboardInterrupt:
# Handle both SIGTERM and SIGINT
self._graceful_shutdown()
except:
traceback.print_exc()
def _graceful_shutdown(self):
log.info("Gracefully shutting down")
log.info("Deleting Pulse queue")
self.listener.delete_queue()
sys.exit(1)
def delete_queue(self):
self._check_params()
if not self.connection:
self.connect()
queue = self._create_queue()
try:
queue(self.connection).delete()
except ChannelError as e:
if e.message != 404:
raise
except:
raise
def _route_callback_handler(self, body, message):
"""
Route call body and msg to proper callback handler
"""
taskState = body['status']['state']
taskId = body['status']['taskId']
# Extract first coalesce key that matches
for route in message.headers['CC']:
route = route[6:]
if self.pf == route[:len(self.pf)]:
coalesce_key = route[len(self.pf):]
break
if taskState == 'pending':
self._add_task_callback(taskId, coalesce_key)
elif taskState == 'running' or taskState == 'exception':
self.coalescer.insert_task(taskId, coalesce_key)
else:
raise StateError
message.ack()
self.stats.notch('total_msgs_handled')
log.debug("taskId: %s (%s)" % (taskId, taskState))
def setup_log():
# TODO: pass options and check for log level aka debug or not
global log
log = logging.getLogger(__name__)
log.setLevel(logging.DEBUG)
console_handler = logging.StreamHandler()
console_handler.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
console_handler.setFormatter(formatter)
log.addHandler(console_handler)
return log
def main():
setup_log()
options = Options().options
log.info("Starting Coalescing Service")
# TODO: parse args
# TODO: pass args and options
# prefix for all redis keys and route key
redis_prefix = "coalesce.v1."
# setup redis object
rds = redis.Redis(host=options['redis'].hostname,
port=options['redis'].port,
password=options['redis'].password)
stats = Stats(redis_prefix, datastore=rds)
app = TaskEventApp(redis_prefix, options, stats, datastore=rds)
signal.signal(signal.SIGTERM, signal_term_handler)
app.run()
# graceful shutdown via SIGTERM
def signal_term_handler(signal, frame):
log.info("Handling signal: term")
raise KeyboardInterrupt
if __name__ == '__main__':
main()
|
Python
| 0.998929
|
@@ -5111,25 +5111,28 @@
elf.
-_add_task_callbac
+coalescer.insert_tas
k(ta
@@ -5235,38 +5235,38 @@
self.coalescer.
-insert
+remove
_task(taskId, co
|
64e3f7c56d8c395aebf5bc15fb03264fb9b390bb
|
Update Admin.py
|
Plugins/Admin.py
|
Plugins/Admin.py
|
import discord
from discord.ext import commands
import random
import asyncio
import Dependencies
from datetime import datetime
class Admin():
def __init__(self, bot):
self.bot = bot
# strike command
@commands.has_role("Mods")
@commands.command(pass_context=True)
async def strike(self, ctx, member : str=None, *, reason : str=None):
'''Gives a strike to a specified person.'''
if member is None:
await self.bot.say('Please input a user.')
elif member is not None and reason is None:
await self.bot.say('Please input a reason')
elif member is not None and reason is not None:
member = ctx.message.mentions[0]
strike_embed = discord.Embed(title="Strike", description= 'User: **{0}** \nReason: {1}'.format(member, reason), color=discord.Color.red())
strike_embed.set_footer(text='Strike')
await self.bot.send_message(discord.utils.get(ctx.message.server.channels, name="strikes"), '<@&332973960318943233>', embed=strike_embed)
strike_embed = discord.Embed(title="Strike", description= 'You have been given a strike on the {0} server. \nReason: {1}'.format(ctx.message.server, reason), color=discord.Color.red())
strike_embed.set_footer(text='Strike')
await self.bot.send_message(member, embed=strike_embed)
logMsg = "{0} has been striked on the {1} server. Reason: {2}".format(member, ctx.message.server, reason)
log(logMsg)
@commands.has_permissions(kick_members=True)
@commands.command(pass_context=True)
async def kick(self, ctx, member : str=None, *, reason : str=None):
'''Kicks a specified person.'''
if member is None:
await self.bot.say('Please input a user.')
elif member is not None and reason is None:
await self.bot.say('Please input a reason')
elif member is not None and reason is not None:
member = ctx.message.mentions[0]
await self.bot.kick(member)
await self.bot.say('Kicked {0}. Reason: {1}'.format(member, reason))
log('{0} has been kicked from {1}. Reason: {2}'.format(member, ctx.message.server, reason))
@commands.has_permissions(ban_members=True)
@commands.command(pass_context=True)
async def ban(self, ctx, member : str=None, *, reason : str=None):
'''Bans a specified person.'''
if member is None:
await self.bot.say('Please input a user.')
elif member is not None and reason is None:
await self.bot.say('Please input a reason')
elif member is not None and reason is not None:
member = ctx.message.mentions[0]
await self.bot.ban(member)
await self.bot.say('Kicked {0}. Reason: {1}'.format(member, reason))
log('{0} has been kicked from {1}. Reason: {2}'.format(member, ctx.message.server, reason))
def log(message):
print(datetime.now(), message)
def setup(bot):
bot.add_cog(Admin(bot))
|
Python
| 0.000001
|
@@ -1187,25 +1187,14 @@
ike
-on the %7B0%7D server
+in %7B0%7D
. %5Cn
|
8bcd0063ce0ede395172409c5bcbe778a54cf92c
|
Fix bug in api related to querying mapobject types
|
tmaps/mapobject/api.py
|
tmaps/mapobject/api.py
|
import os.path as p
import json
from flask.ext.jwt import jwt_required
from flask.ext.jwt import current_identity
from flask.ext.jwt import jwt_required
from flask import jsonify, request
from sqlalchemy.sql import text
from tmaps.api import api
from tmaps.extensions import db
from tmaps.mapobject import MapobjectOutline, MapobjectType
from tmaps.experiment import Experiment
from tmaps.response import (
MALFORMED_REQUEST_RESPONSE,
RESOURCE_NOT_FOUND_RESPONSE,
NOT_AUTHORIZED_RESPONSE
)
@api.route('/experiments/<experiment_id>/mapobjects/<object_name>', methods=['GET'])
def get_mapobjects_tile(experiment_id, object_name):
ex = db.session.query(Experiment).get_with_hash(experiment_id)
if not ex:
return RESOURCE_NOT_FOUND_RESPONSE
# TODO: Requests should have a auth token
# if not ex.belongs_to(current_identity):
# return NOT_AUTHORIZED_RESPONSE
# The coordinates of the requested tile
x = request.args.get('x')
y = request.args.get('y')
z = request.args.get('z')
zlevel = request.args.get('zlevel')
t = request.args.get('t')
# Check arguments for validity and convert to integers
if any([var is None for var in [x, y, z, zlevel, t]]):
return MALFORMED_REQUEST_RESPONSE
else:
x, y, z, zlevel, t = map(int, [x, y, z, zlevel, t])
if object_name == 'DEBUG_TILE':
maxzoom = ex.channels[0].layers[0].maxzoom_level_index
minx, miny, maxx, maxy = MapobjectOutline.create_tile(x, y, z, maxzoom)
return jsonify({
'type': 'Feature',
'geometry': {
'type': 'Polygon',
'coordinates': [[
[maxx, maxy], [minx, maxy], [minx, miny], [maxx, miny],
[maxx, maxy]
]]
},
'properties': {
'x': x,
'y': y,
'z': z,
'type': 'DEBUG_TILE'
}
})
mapobject_type = \
db.session.query(MapobjectType).\
filter_by(name=object_name).one()
query_res = mapobject_type.get_mapobject_outlines_within_tile(
x, y, z, t, zlevel)
features = []
if len(query_res) > 0:
# Try to estimate how many points there are in total within
# the polygons of this tile.
for mapobject_id, geom_geojson_str in query_res:
feature = {
"type": "Feature",
"id": mapobject_id,
"geometry": json.loads(geom_geojson_str),
"properties": {
"type": object_name
}
}
features.append(feature)
return jsonify({
"type": "FeatureCollection",
"features": features
})
|
Python
| 0
|
@@ -2070,16 +2070,37 @@
ect_name
+, experiment_id=ex.id
).one()%0A
|
f96f3f6ac5ca5f9301c2c463b0a3f4f710187f21
|
Use utf-8
|
constantes.py
|
constantes.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from BeautifulSoup import BeautifulSoup
import requests
def get_profs():
r = requests.get("http://www.heb.be/esi/personnel_fr.htm")
soup = BeautifulSoup(r.text)
soup = soup.findAll('ul')[2]
profs = {}
for line in soup:
line = str(line)
if "profs" in line:
abbr = line.split("(")[1].split(")")[0]
prof = line.split(">")[2].split("<")[0]
profs[abbr] = prof.decode('utf-8')
HOURS = [
'08:15',
'09:15',
'10:30',
'11:30',
'12:30',
'13:45',
'14:45',
'16:00',
'17:00',
]
DAYS = {
0: 'Lundi',
1: 'Mardi',
2: 'Mercredi',
3: 'Jeudi',
4: 'Vendredi',
}
MONTHS = {
'janvier' : '01',
'février' : '02',
'mars' : '03',
'avril' : '04',
'mai' : '05',
'juin' : '06',
'juillet' : '07',
'aout' : '08',
'septembre': '09',
'octobre' : '10',
'novembre' : '11',
'décembre' : '12',
}
PROFS = {
'ADT': 'Alain Detaille',
'ARO': 'Anne Rousseau',
'ART': 'Anne Rayet',
'BDL': 'Bénoni Delfosse',
'BEJ': 'Jonas Beleho',
'CIH': 'Yashar Cihan',
'CLG': 'Christine Leignel',
'CLR': 'Catherine Leruste',
'CUV': 'Geneviève Cuvelier',
'DNA': 'David Nabet',
'DWI': 'Didier Willame',
'EFO': 'Eric Fontaine',
'EGR': 'Eric Georges',
'ELV': 'Eytan Levy',
'FPL': 'Frédéric Pluquet',
'GVA': 'Gilles Van Assche',
'HAL': 'Amine Hallal',
'JCJ': 'Jean-Claude Jaumain',
'JDM': 'Jacqueline De Mesmaeker',
'JDS': 'Jérôme Dossogne',
'JMA': 'Jean-Marc André',
'LBC': 'Laurent Beeckmans',
'MAP': 'Michel Applaincourt',
'MBA': 'Monica Bastreghi',
'MCD': 'Marco Codutti',
'MHI': 'Mohamed Hadjili',
'MWA': 'Moussa Wahid',
'MWI': 'Michel Willemse',
'NPX': 'Nicolas Pettiaux',
'NVS': 'Nicolas Vansteenkiste',
'PBT': 'Pierre Bettens',
'PMA': 'Pantelis Matsos',
'RPL': 'René-Philippe Legrand',
'SRV': 'Fréd\éric Servais',
'YPR': 'Yves Pierseaux',
}
|
Python
| 0
|
@@ -1860,9 +1860,8 @@
Fr%C3%A9d
-%5C
%C3%A9ric
|
ea7f7338773c5e4f2f538740d850990b1107e94e
|
Remove some debug code.
|
console.py
|
console.py
|
"""
@copyright: 2013 Single D Software - All Rights Reserved
@summary: Provides a console API for Light Maestro.
"""
# Standard library imports
import collections
import json
import logging
import os
import re
import threading
import time
# Named logger for this module
_logger = logging.getLogger(__name__)
# Maximum number of channels supported
maxchannels = 96
class SceneAlreadyLoadedError(Exception):
"""Requested scene is loading or already loaded."""
pass
class SceneNotFoundError(Exception):
"""Missing or corrupt scene file."""
pass
class NotSupportedError(Exception):
"""Console does not support this function."""
pass
class CommunicationError(Exception):
"""Communication with the console failed."""
pass
def _alphasort(items):
""" Sort the given list in the way that humans expect."""
convert = lambda t: int(t) if t.isdigit() else t
alphakey = lambda key: [convert(c) for c in re.split('([0-9]+)', key)]
return sorted(items, key=alphakey)
class Console():
"""Abstract class from which all other console classes inherit."""
def _getscenefilename(self, sceneid):
return os.path.join(self._scenepath, sceneid)
def getstatus(self):
"""
Provide status information for the connection to the console.
@return: Dictionary containing status information
"""
status = {'condition': 'operational'}
status['interface'] = self.__class__.__name__.lower()
status['fading'] = self._target is not None
if self._sceneid is not None:
status['scene'] = self._sceneid
return status
def getchannels(self):
"""
Provide all DMX channel values.
@return: Dictionary containing all channel numbers and values
"""
return {'channels': self._channels}
def loadchannels(self, data, sceneid=None):
with self._lock:
self._target = data.get('channels', {})
self._fadetime = time.time() + data.get('fade', 0.0)
self._sceneid = sceneid
def getscenes(self):
try:
return {'scenes': _alphasort(os.listdir(self._scenepath))}
except OSError:
raise CommunicationError
def getscene(self, sceneid):
try:
with open(self._getscenefilename(sceneid)) as f:
return json.load(f)
except IOError:
raise SceneNotFoundError
except ValueError:
raise CommunicationError
def loadscene(self, sceneid):
if self._sceneid == sceneid:
raise SceneAlreadyLoadedError
scene = self.getscene(sceneid)
self.loadchannels(scene, sceneid)
_logger.debug('Loading scene {0}'.format(sceneid))
def savescene(self, sceneid, scene=None):
if scene is None:
scene = self.getchannels()
try:
print(scene)
with open(self._getscenefilename(sceneid), 'w') as f:
json.dump(scene, f, indent=4)
except IOError:
raise CommunicationError
def deletescene(self, sceneid):
try:
os.remove(self._getscenefilename(sceneid))
except FileNotFoundError:
return
except OSError:
raise CommunicationError
def _setchannels(self, channels):
self._channels.update(channels)
_logger.debug(list(self._channels.values())[:16])
def _fader(self):
fadedelay = 0.1
while True:
time.sleep(fadedelay)
if self._target:
with self._lock:
remainingfade = self._fadetime - time.time()
if remainingfade > fadedelay:
fadechannels = {}
for c, v in self._target.items():
delta = (self._target[c] - self._channels[c]) * fadedelay / remainingfade
fadechannels[c] = self._channels[c] + delta
self._setchannels(fadechannels)
else:
self._setchannels(self._target)
self._target = None
def __init__(self, parameter='scenes'):
"""Initialize the console object."""
self._channels = collections.OrderedDict((str(c+1), 0.0) for c in range(maxchannels))
self._target = None
self._fadetime = time.time()
self._sceneid = None
self._lock = threading.Lock()
self._scenepath = parameter
try:
self.loadscene('Default')
except SceneNotFoundError:
_logger.warning('Unable to load default scene, all channels set to zero')
# Start the scene transition task
threading.Thread(target=self._fader).start()
|
Python
| 0.000003
|
@@ -3389,66 +3389,8 @@
els)
-%0A _logger.debug(list(self._channels.values())%5B:16%5D)
%0A%0A
|
0726bc2cabd98639214e2cd14c49d30262e75d5e
|
Streamline setup of deCONZ button platform (#70593)
|
homeassistant/components/deconz/button.py
|
homeassistant/components/deconz/button.py
|
"""Support for deCONZ buttons."""
from __future__ import annotations
from dataclasses import dataclass
from pydeconz.models.scene import Scene as PydeconzScene
from homeassistant.components.button import (
DOMAIN,
ButtonEntity,
ButtonEntityDescription,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity import EntityCategory
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from .deconz_device import DeconzSceneMixin
from .gateway import DeconzGateway, get_gateway_from_config_entry
@dataclass
class DeconzButtonDescriptionMixin:
"""Required values when describing deCONZ button entities."""
suffix: str
button_fn: str
@dataclass
class DeconzButtonDescription(ButtonEntityDescription, DeconzButtonDescriptionMixin):
"""Class describing deCONZ button entities."""
ENTITY_DESCRIPTIONS = {
PydeconzScene: [
DeconzButtonDescription(
key="store",
button_fn="store",
suffix="Store Current Scene",
icon="mdi:inbox-arrow-down",
entity_category=EntityCategory.CONFIG,
)
]
}
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up the deCONZ button entity."""
gateway = get_gateway_from_config_entry(hass, config_entry)
gateway.entities[DOMAIN] = set()
@callback
def async_add_scene(scenes: list[PydeconzScene] | None = None) -> None:
"""Add scene button from deCONZ."""
entities = []
if scenes is None:
scenes = list(gateway.api.scenes.values())
for scene in scenes:
known_entities = set(gateway.entities[DOMAIN])
for description in ENTITY_DESCRIPTIONS.get(PydeconzScene, []):
new_entity = DeconzButton(scene, gateway, description)
if new_entity.unique_id not in known_entities:
entities.append(new_entity)
if entities:
async_add_entities(entities)
config_entry.async_on_unload(
async_dispatcher_connect(
hass,
gateway.signal_new_scene,
async_add_scene,
)
)
async_add_scene()
class DeconzButton(DeconzSceneMixin, ButtonEntity):
"""Representation of a deCONZ button entity."""
TYPE = DOMAIN
def __init__(
self,
device: PydeconzScene,
gateway: DeconzGateway,
description: DeconzButtonDescription,
) -> None:
"""Initialize deCONZ number entity."""
self.entity_description: DeconzButtonDescription = description
super().__init__(device, gateway)
self._attr_name = f"{self._attr_name} {description.suffix}"
async def async_press(self) -> None:
"""Store light states into scene."""
async_button_fn = getattr(self._device, self.entity_description.button_fn)
await async_button_fn()
def get_device_identifier(self) -> str:
"""Return a unique identifier for this scene."""
return f"{super().get_device_identifier()}-{self.entity_description.key}"
|
Python
| 0
|
@@ -99,16 +99,60 @@
aclass%0A%0A
+from pydeconz.models.event import EventType%0A
from pyd
@@ -420,78 +420,8 @@
ack%0A
-from homeassistant.helpers.dispatcher import async_dispatcher_connect%0A
from
@@ -1572,49 +1572,35 @@
ene(
-scenes: list%5BPydeconzScene%5D %7C None = None
+_: EventType, scene_id: str
) -%3E
@@ -1662,193 +1662,125 @@
-entities = %5B%5D%0A%0A if scenes is None:%0A scenes = list(gateway.api.scenes.values())%0A%0A for scene in scenes:%0A%0A known_entities = set(gateway.entities%5BDOMAIN%5D
+scene = gateway.api.scenes%5Bscene_id%5D%0A async_add_entities(%0A DeconzButton(scene, gateway, description
)%0A
@@ -1850,18 +1850,16 @@
ene, %5B%5D)
-:%0A
%0A
@@ -1863,380 +1863,206 @@
- new_entity = DeconzButton(scene, gateway, description)%0A if new_entity.unique_id not in known_entities:%0A entities.append(new_entity)%0A%0A if entities:%0A async_add_entities(entities)%0A%0A config_entry.async_on_unload(%0A async_dispatcher_connect(%0A hass,
+)%0A%0A config_entry.async_on_unload(%0A gateway.api.scenes.subscribe(%0A async_add_scene,%0A EventType.ADDED,%0A )
%0A
+)%0A%0A
- gateway.signal_new_
+for scene_id in gateway.api.
scene
-,%0A
+s:%0A
as
@@ -2049,34 +2049,32 @@
scenes:%0A
-
async_add_scene,
@@ -2076,47 +2076,34 @@
cene
-,%0A )%0A )%0A%0A async_add_
+(EventType.ADDED,
scene
-(
+_id
)%0A%0A%0A
|
afdc58945c710f623714e6b07c593489c0cd42be
|
Implement basic list command
|
src/xii/builtin/commands/list/list.py
|
src/xii/builtin/commands/list/list.py
|
from xii import definition, command, error
from xii.need import NeedLibvirt, NeedSSH
class ListCommand(command.Command):
"""List all currently defined components
"""
name = ['list', 'ls']
help = "list all currently defined components"
@classmethod
def argument_parser(cls):
parser = command.Command.argument_parser(cls.name[0])
parser.add_argument("-d", "--definition", default=None,
help="Define which xii definition file should be used")
parser.add_argument("--all", default=False, action="store_true",
help="Show all components defined by the xii")
parser.add_argument("--host", default=None,
help="Specify host to connect to. (A libvirt url is required)")
parser.add_argument("--only", type=str, default=None,
help="Show only secified components [nodes,pools,networks]")
return parser
def run(self):
pass
|
Python
| 0.999947
|
@@ -1,16 +1,33 @@
+import datetime%0A%0A
from xii import
@@ -1003,28 +1003,933 @@
def
-run(self):%0A pass
+_get_uptime(self, time):%0A now = datetime.datetime.now()%0A delta = now - datetime.datetime.fromtimestamp(time)%0A%0A if delta.days %3E 1:%0A return %22%7B%7D days%22.format(delta.days)%0A%0A if delta.seconds / 3600 %3E 1:%0A return %22%7B%7D hours%22.format(delta.seconds / 3600)%0A%0A if delta.seconds / 60 %3E 1:%0A return %22%7B%7D minutes%22.format(delta.seconds / 60)%0A return %22%7B%7D seconds%22.format(delta.seconds)%0A%0A def run(self):%0A rows = %5B%5D%0A for c in self.children():%0A meta = c.fetch_metadata()%0A%0A create = %22---%22%0A if meta is not None:%0A created_at = float(meta%5B%22created%22%5D)%0A create = self._get_uptime(created_at)%0A%0A rows.append((c.entity(),%0A c.get_virt_url(),%0A create,%0A c.status()%0A ))%0A self.show_table(%5B%22name%22, %22host%22, %22uptime%22, %22status%22%5D, rows)
%0A
|
095ec4c38015f1b1b53cb88ae59fbf6a7596b492
|
update VAF
|
mnist/training.py
|
mnist/training.py
|
# Copyright 2017 Max W. Y. Lam
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
import sys
sys.path.append("../")
import os
import time
import tensorflow as tf
import tensorflow.contrib.layers as layers
from six.moves import range, zip
import numpy as np
import zhusuan as zs
import six
from six.moves import cPickle as pickle
from expt import run_experiment
DATA_PATH = 'mnist.pkl.gz'
def load_data(n_folds):
def to_one_hot(x, depth):
ret = np.zeros((x.shape[0], depth))
ret[np.arange(x.shape[0]), x] = 1
return ret
f = gzip.open(path, 'rb')
if six.PY2:
train_set, valid_set, test_set = pickle.load(f)
else:
train_set, valid_set, test_set = pickle.load(f, encoding='latin1')
f.close()
X_train, y_train = train_set[0], train_set[1]
X_valid, y_valid = valid_set[0], valid_set[1]
X_test, y_test = test_set[0], test_set[1]
X_train = np.vstack([X_train, X_valid]).astype('float32')
y_train = np.vstack([y_train, y_valid])
return [X_train, to_one_hot(y_train, 10), X_test, to_one_hot(y_test, 10)]
if __name__ == '__main__':
if('cpu' in sys.argv):
os.environ['CUDA_VISIBLE_DEVICES'] = '-1'
model_names = ['VAFNN', 'BNN']
train_test_set = load_data(5)
D, P = train_test_set[0][0].shape[1], train_test_set[0][1].shape[1]
# Fair Model Comparison - Same Architecture & Optimization Rule
training_settings = {
'task': 'classification',
'plot_err': True,
'lb_samples': 20,
'll_samples': 100,
'n_basis': 50,
'n_hiddens': [100],
'batch_size': 10,
'learn_rate': 1e-3,
'max_epochs': 10000,
'early_stop': 10,
'check_freq': 5,
}
eval_mses, eval_lls = run_experiment(
model_names, 'MNIST', load_data(5), **training_settings)
|
Python
| 0
|
@@ -888,16 +888,28 @@
ort six%0A
+import gzip%0A
from six
|
393d3d0382eb5208a039347b851597986a6469fe
|
Make output default
|
srttools/core/inspect_observations.py
|
srttools/core/inspect_observations.py
|
"""From a given list of directories, read the relevant information and link observations to calibrators."""
from __future__ import (absolute_import, division,
print_function)
import os
import glob
import warnings
import numpy as np
from astropy.table import Table, Column
from .io import read_data
from .calibration import read_calibrator_config
import sys
def standard_string(s):
if sys.version_info >= (3, 0, 0):
# for Python 3
if isinstance(s, bytes):
s = s.decode('ascii') # or s = str(s)[2:-1]
else:
# for Python 2
if isinstance(s, unicode):
s = str(s)
return s
def inspect_directories(directories):
info = Table()
names = ["Dir", "Sample File", "Source", "Receiver", "Backend",
"Time", "Frequency", "Bandwidth"]
dtype = ['S200', 'S200', 'S200', 'S200', 'S200',
np.double, np.float, np.float]
for n, d in zip(names, dtype):
if n not in info.keys():
info.add_column(Column(name=n, dtype=d))
for d in directories:
fits_files = glob.glob(os.path.join(d, '*.fits'))
for f in fits_files:
try:
print("Reading {}".format(f), end="\r")
data = read_data(f)
backend = data.meta['backend']
receiver = data.meta['receiver']
frequency = data["Ch0"].meta['frequency']
bandwidth = data["Ch0"].meta['bandwidth']
source = data.meta['SOURCE']
time = data[0]['time']
info.add_row([d, f, source, receiver, backend,
time, frequency, bandwidth])
break
except:
continue
return(info)
def split_observation_table(info, max_calibrator_delay=0.4, max_source_delay=0.2):
grouped_table = info.group_by(["Receiver", "Backend"])
indices = grouped_table.groups.indices
groups = {}
for i, ind in enumerate(zip(indices[:-1], indices[1:])):
start_row = grouped_table[ind[0]]
print("Group {}, Backend = {}, Receiver = {}".format(i, standard_string(start_row["Backend"]),
standard_string(start_row["Receiver"])))
s = split_by_source(grouped_table[ind[0]:ind[1]],
max_calibrator_delay=max_calibrator_delay,
max_source_delay=max_source_delay)
receiver = start_row["Receiver"]
backend = start_row["Backend"]
label = standard_string(receiver) + ',' + standard_string(backend)
groups[label] = s
return groups
def split_by_source(info, max_calibrator_delay=0.4, max_source_delay=0.2):
cal_config = read_calibrator_config()
calibrators = cal_config.keys()
sources = list(set(info["Source"]))
# Find observation blocks of a given source
retval = {}
for s in sources:
if standard_string(s) in calibrators:
continue
condition = info["Source"] == s
s = standard_string(s)
retval[s] = {}
filtered_table = info[condition]
start_idxs = []
end_idxs = []
for i, f in enumerate(filtered_table):
if i == 0:
start_idxs.append(0)
continue
if filtered_table[i]["Time"] - filtered_table[i-1]["Time"] > max_source_delay:
start_idxs.append(i)
end_idxs.append(i)
end_idxs.append(len(filtered_table))
contiguous = list(zip(start_idxs, end_idxs))
for i, cont in enumerate(contiguous):
retval[s]["Obs{}".format(i)] = {}
print("---------------")
print("{}, observation {}\n".format(s, i + 1))
ft = filtered_table[cont[0]:cont[1]]
observation_start = ft[0]["Time"]
observation_end = ft[-1]["Time"]
print("Source observations:")
retval[s]["Obs{}".format(i)]["Src"] = []
for c in range(cont[0], cont[1]):
print(standard_string(filtered_table[c]["Dir"]))
retval[s]["Obs{}".format(i)]["Src"].append(standard_string(filtered_table[c]["Dir"]))
print("")
print("Calibrator observations:")
retval[s]["Obs{}".format(i)]["Cal"] = []
condition1 = np.abs(info["Time"] - observation_start) < max_calibrator_delay
condition2 = np.abs(info["Time"] - observation_end) < max_calibrator_delay
condition = condition1 & condition2
for row in info[condition]:
if standard_string(row["Source"]) in calibrators:
print(standard_string(row["Dir"]))
retval[s]["Obs{}".format(i)]["Cal"].append(standard_string(row["Dir"]))
print("")
print("---------------\n")
return retval
def main_inspector(args=None):
import argparse
description = ('From a given list of directories, read the relevant information'
' and link observations to calibrators. A single file is read for'
' each directory.')
parser = argparse.ArgumentParser(description=description)
parser.add_argument("directories", nargs='+',
help="Directories to inspect",
default=None, type=str)
parser.add_argument("-g", "--group-by", default=None, type=str, nargs="+")
parser.add_argument("-s", "--split-by-source", default=False,
action='store_true',
help=('Split output so that it contains a list of observations '
'and calibrators for each source'))
args = parser.parse_args(args)
info = inspect_directories(args.directories)
info.write('table.csv')
if args.group_by is not None:
rearranged_info = info.group_by(args.group_by)
rearranged_info.write('rearranged_table.csv')
if args.split_by_source:
split_by_source(info)
|
Python
| 0.000147
|
@@ -5503,274 +5503,8 @@
%22+%22)
-%0A parser.add_argument(%22-s%22, %22--split-by-source%22, default=False,%0A action='store_true',%0A help=('Split output so that it contains a list of observations '%0A 'and calibrators for each source'))
%0A%0A
@@ -5605,32 +5605,66 @@
ite('table.csv')
+%0A split_observation_table(info)
%0A%0A if args.gr
@@ -5797,63 +5797,4 @@
')%0A%0A
- if args.split_by_source:%0A split_by_source(info)%0A
|
7a8aa79f191ed633babc1134238017c164b306f3
|
Add optional rtsp_port for Foscam (#22786)
|
homeassistant/components/foscam/camera.py
|
homeassistant/components/foscam/camera.py
|
"""
This component provides basic support for Foscam IP cameras.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/camera.foscam/
"""
import logging
import voluptuous as vol
from homeassistant.components.camera import (
Camera, PLATFORM_SCHEMA, SUPPORT_STREAM)
from homeassistant.const import (
CONF_NAME, CONF_USERNAME, CONF_PASSWORD, CONF_PORT)
from homeassistant.helpers import config_validation as cv
_LOGGER = logging.getLogger(__name__)
REQUIREMENTS = ['libpyfoscam==1.0']
CONF_IP = 'ip'
DEFAULT_NAME = 'Foscam Camera'
DEFAULT_PORT = 88
FOSCAM_COMM_ERROR = -8
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_IP): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Required(CONF_USERNAME): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
})
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up a Foscam IP Camera."""
add_entities([FoscamCam(config)])
class FoscamCam(Camera):
"""An implementation of a Foscam IP camera."""
def __init__(self, device_info):
"""Initialize a Foscam camera."""
from libpyfoscam import FoscamCamera
super(FoscamCam, self).__init__()
ip_address = device_info.get(CONF_IP)
port = device_info.get(CONF_PORT)
self._username = device_info.get(CONF_USERNAME)
self._password = device_info.get(CONF_PASSWORD)
self._name = device_info.get(CONF_NAME)
self._motion_status = False
self._foscam_session = FoscamCamera(
ip_address, port, self._username, self._password, verbose=False)
self._rtsp_port = None
result, response = self._foscam_session.get_port_info()
if result == 0:
self._rtsp_port = response.get('rtspPort') or \
response.get('mediaPort')
def camera_image(self):
"""Return a still image response from the camera."""
# Send the request to snap a picture and return raw jpg data
# Handle exception if host is not reachable or url failed
result, response = self._foscam_session.snap_picture_2()
if result == FOSCAM_COMM_ERROR:
return None
return response
@property
def supported_features(self):
"""Return supported features."""
if self._rtsp_port:
return SUPPORT_STREAM
return 0
@property
def stream_source(self):
"""Return the stream source."""
if self._rtsp_port:
return 'rtsp://{}:{}@{}:{}/videoMain'.format(
self._username,
self._password,
self._foscam_session.host,
self._rtsp_port)
return None
@property
def motion_detection_enabled(self):
"""Camera Motion Detection Status."""
return self._motion_status
def enable_motion_detection(self):
"""Enable motion detection in camera."""
try:
ret = self._foscam_session.enable_motion_detection()
self._motion_status = ret == FOSCAM_COMM_ERROR
except TypeError:
_LOGGER.debug("Communication problem")
self._motion_status = False
def disable_motion_detection(self):
"""Disable motion detection."""
try:
ret = self._foscam_session.disable_motion_detection()
self._motion_status = ret == FOSCAM_COMM_ERROR
except TypeError:
_LOGGER.debug("Communication problem")
self._motion_status = False
@property
def name(self):
"""Return the name of this camera."""
return self._name
|
Python
| 0
|
@@ -562,16 +562,45 @@
P = 'ip'
+%0ACONF_RTSP_PORT = 'rtsp_port'
%0A%0ADEFAUL
@@ -958,16 +958,58 @@
v.port,%0A
+ vol.Optional(CONF_RTSP_PORT): cv.port%0A
%7D)%0A%0A%0Adef
@@ -1830,21 +1830,84 @@
_port =
-None%0A
+device_info.get(CONF_RTSP_PORT)%0A if not self._rtsp_port:%0A
@@ -1962,32 +1962,36 @@
_info()%0A
+
+
if result == 0:%0A
@@ -1990,16 +1990,20 @@
t == 0:%0A
+
@@ -2054,16 +2054,20 @@
') or %5C%0A
+
|
43a39b03469bf6d99ea61ef505170fb9fc1437f1
|
fix flakes
|
temba/channels/tasks.py
|
temba/channels/tasks.py
|
from __future__ import unicode_literals
import requests
import logging
import time
from datetime import timedelta
from django.conf import settings
from django.utils import timezone
from djcelery_transactions import task
from enum import Enum
from redis_cache import get_redis_connection
from temba.msgs.models import SEND_MSG_TASK
from temba.utils import dict_to_struct
from temba.utils.queues import pop_task
from temba.utils.mage import MageClient
from .models import Channel, Alert, ChannelLog, ChannelCount, AUTH_TOKEN
logger = logging.getLogger(__name__)
class MageStreamAction(Enum):
activate = 1
refresh = 2
deactivate = 3
@task(track_started=True, name='sync_channel_task')
def sync_channel_task(gcm_id, channel_id=None): # pragma: no cover
channel = Channel.objects.filter(pk=channel_id).first()
Channel.sync_channel(gcm_id, channel)
@task(track_started=True, name='send_msg_task')
def send_msg_task():
"""
Pops the next message off of our msg queue to send.
"""
# pop off the next task
msg_tasks = pop_task(SEND_MSG_TASK)
# it is possible we have no message to send, if so, just return
if not msg_tasks:
return
if not isinstance(msg_tasks, list):
msg_tasks = [msg_tasks]
r = get_redis_connection()
# acquire a lock on our contact to make sure two sets of msgs aren't being sent at the same time
with r.lock('send_contact_%d' % msg.contact, timeout=300):
# send each of our msgs
for (i, msg_task) in enumerate(msg_tasks):
try:
msg = dict_to_struct('MockMsg', msg_task,
datetime_fields=['modified_on', 'sent_on', 'created_on', 'queued_on', 'next_attempt'])
Channel.send_message(msg)
# if there are more messages to send for this contact, sleep a second before moving on
if i+1 < len(msg_tasks):
time.sleep(1)
except:
logger.error('Error sending msg', exc_info=True)
@task(track_started=True, name='check_channels_task')
def check_channels_task():
"""
Run every 30 minutes. Checks if any channels who are active have not been seen in that
time. Triggers alert in that case
"""
r = get_redis_connection()
# only do this if we aren't already checking campaigns
key = 'check_channels'
if not r.get(key):
with r.lock(key, timeout=300):
Alert.check_alerts()
@task(track_started=True, name='send_alert_task')
def send_alert_task(alert_id, resolved):
alert = Alert.objects.get(pk=alert_id)
alert.send_email(resolved)
@task(track_started=True, name='trim_channel_log_task')
def trim_channel_log_task():
"""
Runs daily and clears any channel log items older than 48 hours.
"""
two_days_ago = timezone.now() - timedelta(hours=48)
ChannelLog.objects.filter(created_on__lte=two_days_ago).delete()
@task(track_started=True, name='notify_mage_task')
def notify_mage_task(channel_uuid, action):
"""
Notifies Mage of a change to a Twitter channel. Having this in a djcelery_transactions task ensures that the channel
db object is updated before Mage tries to fetch it
"""
mage = MageClient(settings.MAGE_API_URL, settings.MAGE_AUTH_TOKEN)
if action == MageStreamAction.activate:
mage.activate_twitter_stream(channel_uuid)
elif action == MageStreamAction.refresh:
mage.refresh_twitter_stream(channel_uuid)
elif action == MageStreamAction.deactivate:
mage.deactivate_twitter_stream(channel_uuid)
else:
raise ValueError('Invalid action: %s' % action)
@task(track_started=True, name="squash_channelcounts")
def squash_channelcounts():
r = get_redis_connection()
key = 'squash_channelcounts'
if not r.get(key):
with r.lock(key, timeout=900):
ChannelCount.squash_counts()
@task(track_started=True, name="fb_channel_subscribe")
def fb_channel_subscribe(channel_id):
channel = Channel.objects.filter(id=channel_id, is_active=True).first()
if channel:
page_access_token = channel.config_json()[AUTH_TOKEN]
# subscribe to messaging events for this channel
response = requests.post('https://graph.facebook.com/v2.6/me/subscribed_apps',
params=dict(access_token=page_access_token))
if response.status_code != 200 or not response.json()['success']:
print "Unable to subscribe for delivery of events: %s" % response.content
|
Python
| 0.000001
|
@@ -1428,16 +1428,25 @@
d' %25 msg
+_tasks%5B0%5D
.contact
@@ -1913,9 +1913,11 @@
if i
-+
+ +
1 %3C
|
ec23d68af3cacefe39fd9e9f21f4cdfebe8f02e5
|
update mime type when sending email
|
contact.py
|
contact.py
|
from __future__ import (
absolute_import,
print_function,
)
from collections import defaultdict
from flask import render_template
import json
import requests
from subprocess import (
Popen,
PIPE,
)
from config import (
DOMAIN_NAME,
TELSTRA_CONSUMER_KEY,
TELSTRA_CONSUMER_SECRET,
YO_API_KEY,
)
from constants import (
CONTACT_TYPE_EMAIL,
CONTACT_TYPE_SMS,
CONTACT_TYPE_YO,
)
from dbhelper import get_redis
def send_alerts(alerts):
# organizes the alerts by contact info then sends one alert per contact info
klasses_by_contact = defaultdict(list)
for alert in alerts:
klasses_by_contact[(alert.contact, alert.contact_type)].append(alert.klass)
for contact, klasses in klasses_by_contact.iteritems():
contact, contact_type = contact
if contact_type == CONTACT_TYPE_EMAIL:
alert_by_email(contact, klasses)
elif contact_type == CONTACT_TYPE_SMS:
alert_by_sms(contact, klasses)
elif contact_type == CONTACT_TYPE_YO:
alert_by_yo(contact, klasses)
def create_alert_link(klass_ids):
return DOMAIN_NAME + '/alert?classids=' + ','.join(map(str, klass_ids))
def alert_by_email(email, klasses):
email_body = 'Subject: A spot has opened up in a class!\n' + render_template('email.html')
pipe = Popen(['sendmail', '-f', 'alert@%s' % DOMAIN_NAME, '-t', email], stdin=PIPE).stdin
pipe.write(email_body)
pipe.close()
def alert_by_sms(phone_number, klasses):
send_sms(phone_number,
"A spot has opened up in a class: %s" % create_alert_link(k.klass_id for k in klasses))
def alert_by_yo(username, klasses):
send_yo(username,
create_alert_link(k.klass_id for k in klasses),
'A spot has opened up in a class!')
def send_yo(username, link=None, text=None):
requests.post("http://api.justyo.co/yo/",
data={'api_token': YO_API_KEY,
'username': username,
'link': link,
'text': text})
# TODO: make sure this returns the right http code
def get_telstra_api_access_token():
access_token = get_redis().get('telstra_api_access_token')
if access_token is not None:
return access_token
r = requests.post('https://api.telstra.com/v1/oauth/token',
data={
'client_id': TELSTRA_CONSUMER_KEY,
'client_secret': TELSTRA_CONSUMER_SECRET,
'scope': 'SMS',
'grant_type': 'client_credentials'
}).json()
# TODO: make sure this returns the right http code
# cache the access token in redis, making it expire slightly earlier than it does on the Telstra server
get_redis().setex('telstra_api_access_token', int(r['expires_in']) - 60, r['access_token'])
return r['access_token']
def send_sms(phone_number, message):
access_token = get_telstra_api_access_token()
r = requests.post('https://api.telstra.com/v1/sms/messages',
headers={'Authorization': 'Bearer %s' % access_token},
data=json.dumps({
'to': phone_number,
'body': message
}))
# TODO: make sure this returns the right http code
|
Python
| 0.000001
|
@@ -98,16 +98,100 @@
ultdict%0A
+from email.mime.multipart import MIMEMultipart%0Afrom email.mime.text import MIMEText%0A
from fla
@@ -1318,29 +1318,59 @@
-email_body =
+msg = MIMEMultipart('alternative')%0A msg%5B
'Subject
: A
@@ -1365,18 +1365,22 @@
'Subject
-:
+'%5D = '
A spot h
@@ -1407,43 +1407,283 @@
ass!
-%5Cn' + render_template('email.html')
+'%0A msg%5B'From'%5D = 'alert@' + DOMAIN_NAME%0A msg%5B'To'%5D = email%0A%0A text = %22this is the text version of the email%22%0A html = render_template('email.html')%0A%0A part1 = MIMEText(text, 'plain')%0A part2 = MIMEText(html, 'html')%0A%0A msg.attach(part1)%0A msg.attach(part2)%0A
%0A
@@ -1792,18 +1792,23 @@
ite(
-email_body
+msg.as_string()
)%0A
|
85537e3f8557a76b8b2ad89edc41848c29622c24
|
Update the paint tool shape with the viewer image changes
|
skimage/viewer/plugins/labelplugin.py
|
skimage/viewer/plugins/labelplugin.py
|
import numpy as np
from .base import Plugin
from ..widgets import ComboBox, Slider
from ..canvastools import PaintTool
__all__ = ['LabelPainter']
rad2deg = 180 / np.pi
class LabelPainter(Plugin):
name = 'LabelPainter'
def __init__(self, max_radius=20, **kwargs):
super(LabelPainter, self).__init__(**kwargs)
# These widgets adjust plugin properties instead of an image filter.
self._radius_widget = Slider('radius', low=1, high=max_radius,
value=5, value_type='int', ptype='plugin')
labels = [str(i) for i in range(6)]
labels[0] = 'Erase'
self._label_widget = ComboBox('label', labels, ptype='plugin')
self.add_widget(self._radius_widget)
self.add_widget(self._label_widget)
print(self.help())
def help(self):
helpstr = ("Label painter",
"Hold left-mouse button and paint on canvas.")
return '\n'.join(helpstr)
def attach(self, image_viewer):
super(LabelPainter, self).attach(image_viewer)
image = image_viewer.original_image
self.paint_tool = PaintTool(self.image_viewer.ax, image.shape,
on_enter=self.on_enter)
self.paint_tool.radius = self.radius
self.paint_tool.label = self._label_widget.index = 1
self.artists.append(self.paint_tool)
def on_enter(self, overlay):
pass
@property
def radius(self):
return self._radius_widget.val
@radius.setter
def radius(self, val):
self.paint_tool.radius = val
@property
def label(self):
return self._label_widget.val
@label.setter
def label(self, val):
self.paint_tool.label = val
|
Python
| 0
|
@@ -1391,16 +1391,141 @@
_tool)%0A%0A
+ def _on_new_image(self, image):%0A %22%22%22Update plugin for new images.%22%22%22%0A self.paint_tool.shape = image.shape%0A%0A
def
|
dc53a39b0ffcaf8da10b99df5161aa1d539f27c2
|
Fix available property in the base supervisor entity (#69966)
|
homeassistant/components/hassio/entity.py
|
homeassistant/components/hassio/entity.py
|
"""Base for Hass.io entities."""
from __future__ import annotations
from typing import Any
from homeassistant.const import ATTR_NAME
from homeassistant.helpers.entity import DeviceInfo, EntityDescription
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from . import DOMAIN, HassioDataUpdateCoordinator
from .const import (
ATTR_SLUG,
DATA_KEY_ADDONS,
DATA_KEY_CORE,
DATA_KEY_OS,
DATA_KEY_SUPERVISOR,
)
class HassioAddonEntity(CoordinatorEntity[HassioDataUpdateCoordinator]):
"""Base entity for a Hass.io add-on."""
def __init__(
self,
coordinator: HassioDataUpdateCoordinator,
entity_description: EntityDescription,
addon: dict[str, Any],
) -> None:
"""Initialize base entity."""
super().__init__(coordinator)
self.entity_description = entity_description
self._addon_slug = addon[ATTR_SLUG]
self._attr_name = f"{addon[ATTR_NAME]}: {entity_description.name}"
self._attr_unique_id = f"{addon[ATTR_SLUG]}_{entity_description.key}"
self._attr_device_info = DeviceInfo(identifiers={(DOMAIN, addon[ATTR_SLUG])})
@property
def available(self) -> bool:
"""Return True if entity is available."""
return (
super().available
and DATA_KEY_ADDONS in self.coordinator.data
and self.entity_description.key
in self.coordinator.data[DATA_KEY_ADDONS].get(self._addon_slug, {})
)
class HassioOSEntity(CoordinatorEntity[HassioDataUpdateCoordinator]):
"""Base Entity for Hass.io OS."""
def __init__(
self,
coordinator: HassioDataUpdateCoordinator,
entity_description: EntityDescription,
) -> None:
"""Initialize base entity."""
super().__init__(coordinator)
self.entity_description = entity_description
self._attr_name = f"Home Assistant Operating System: {entity_description.name}"
self._attr_unique_id = f"home_assistant_os_{entity_description.key}"
self._attr_device_info = DeviceInfo(identifiers={(DOMAIN, "OS")})
@property
def available(self) -> bool:
"""Return True if entity is available."""
return (
super().available
and DATA_KEY_OS in self.coordinator.data
and self.entity_description.key in self.coordinator.data[DATA_KEY_OS]
)
class HassioSupervisorEntity(CoordinatorEntity[HassioDataUpdateCoordinator]):
"""Base Entity for Supervisor."""
def __init__(
self,
coordinator: HassioDataUpdateCoordinator,
entity_description: EntityDescription,
) -> None:
"""Initialize base entity."""
super().__init__(coordinator)
self.entity_description = entity_description
self._attr_name = f"Home Assistant Supervisor: {entity_description.name}"
self._attr_unique_id = f"home_assistant_supervisor_{entity_description.key}"
self._attr_device_info = DeviceInfo(identifiers={(DOMAIN, "supervisor")})
@property
def available(self) -> bool:
"""Return True if entity is available."""
return (
super().available
and DATA_KEY_OS in self.coordinator.data
and self.entity_description.key
in self.coordinator.data[DATA_KEY_SUPERVISOR]
)
class HassioCoreEntity(CoordinatorEntity[HassioDataUpdateCoordinator]):
"""Base Entity for Core."""
def __init__(
self,
coordinator: HassioDataUpdateCoordinator,
entity_description: EntityDescription,
) -> None:
"""Initialize base entity."""
super().__init__(coordinator)
self.entity_description = entity_description
self._attr_name = f"Home Assistant Core: {entity_description.name}"
self._attr_unique_id = f"home_assistant_core_{entity_description.key}"
self._attr_device_info = DeviceInfo(identifiers={(DOMAIN, "core")})
@property
def available(self) -> bool:
"""Return True if entity is available."""
return (
super().available
and DATA_KEY_CORE in self.coordinator.data
and self.entity_description.key in self.coordinator.data[DATA_KEY_CORE]
)
|
Python
| 0
|
@@ -3200,34 +3200,42 @@
and DATA_KEY_
-O
S
+UPERVISOR
in self.coordin
|
8a6b100e671b4f22dee6b0399eb8a4bc8bf1a97e
|
update longdesc string
|
mriqc/info.py
|
mriqc/info.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""
MRIQC
"""
__versionbase__ = '0.8.6'
__versionrev__ = 'a4'
__version__ = __versionbase__ + __versionrev__
__author__ = 'Oscar Esteban'
__email__ = 'code@oscaresteban.es'
__maintainer__ = 'Oscar Esteban'
__copyright__ = ('Copyright 2016, Center for Reproducible Neuroscience, '
'Stanford University')
__credits__ = 'Oscar Esteban'
__license__ = '3-clause BSD'
__status__ = 'Prototype'
__description__ = 'NR-IQMs (no-reference Image Quality Metrics) for MRI'
__longdesc__ = """
MRIQC provides a series of image processing workflows to extract and compute a series of
NR (no-reference), IQMs (image quality metrics) to be used in QAPs (quality assessment
protocols) for MRI (magnetic resonance imaging).
This open-source neuroimaging data processing tool is being developed as a part of the
MRI image analysis and reproducibility platform offered by the CRN. This pipeline derives
from, and is heavily influenced by, the PCP Quality Assessment Protocol.
This tool extracts a series of IQMs from structural and functional MRI data. It is also
scheduled to add diffusion MRI to the target imaging families.
"""
URL = 'http://mriqc.readthedocs.org/'
DOWNLOAD_URL = ('https://pypi.python.org/packages/source/m/mriqc/'
'mriqc-{}.tar.gz'.format(__version__))
CLASSIFIERS = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Science/Research',
'Topic :: Scientific/Engineering :: Image Recognition',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
]
REQUIRES = [
'numpy',
'future',
'lockfile',
'six',
'matplotlib',
'nibabel',
'niworkflows>=0.0.3a5',
'pandas',
'dipy',
'jinja2',
'seaborn',
'pyPdf2',
'PyYAML',
'nitime',
'nilearn',
'sklearn',
'scikit-learn'
]
LINKS_REQUIRES = [
'git+https://github.com/oesteban/nipype.git@master#egg=nipype',
'git+https://github.com/oesteban/rst2pdf.git@futurize/stage2#egg=rst2pdf'
]
TESTS_REQUIRES = [
'mock',
'codecov',
'nose',
'doctest-ignore-unicode'
]
EXTRA_REQUIRES = {
'doc': ['sphinx'],
'tests': TESTS_REQUIRES,
'duecredit': ['duecredit']
}
# Enable a handle to install all extra dependencies at once
EXTRA_REQUIRES['all'] = [val for _, val in list(EXTRA_REQUIRES.items())]
|
Python
| 0.000004
|
@@ -649,16 +649,17 @@
__ = %22%22%22
+%5C
%0AMRIQC p
@@ -739,16 +739,18 @@
eries of
+ %5C
%0ANR (no-
@@ -828,16 +828,18 @@
sessment
+ %5C
%0Aprotoco
@@ -966,16 +966,18 @@
t of the
+ %5C
%0AMRI ima
@@ -1058,16 +1058,18 @@
derives
+ %5C
%0Afrom, a
@@ -1221,16 +1221,18 @@
is also
+ %5C
%0Aschedul
|
fb2c9469f6d026e77e0f8c20a12f4373e68f9ba2
|
update dependency xgboost to v1 (#543)
|
training/xgboost/structured/base/setup.py
|
training/xgboost/structured/base/setup.py
|
#!/usr/bin/env python
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from setuptools import find_packages
from setuptools import setup
# While this is an xgboost sample, we will still require tensorflow and
# scikit-learn to be installed, since the sample uses certain functionalities
# available in those libraries:
# tensorflow: mainly to copy files seamlessly to GCS
# scikit-learn: the helpfer functions it provides, e.g. splitting datasets
REQUIRED_PACKAGES = [
'tensorflow==1.15.4',
'scikit-learn==0.20.2',
'pandas==0.24.2',
'xgboost==0.81',
'cloudml-hypertune',
]
setup(
name='trainer',
version='0.1',
install_requires=REQUIRED_PACKAGES,
packages=find_packages(),
include_package_data=True,
description='AI Platform | Training | xgboost | Base'
)
|
Python
| 0
|
@@ -1171,12 +1171,13 @@
st==
-0.81
+1.5.0
',%0A
|
9aff0b8d5989bf11242ac30b718c23242631668e
|
call enable in RataryEncoder.__init__, fixed a few typos
|
libraries/RotaryEncoder/rotary_encoder.py
|
libraries/RotaryEncoder/rotary_encoder.py
|
import os
from bbio.platform import sysfs
from bbio import addToCleanup, cape_manager, OCP_PATH, delay
class RotaryEncoder(object):
_eqep_dirs = [
'%s/48300000.epwmss/48300180.eqep' % OCP_PATH,
'%s/48302000.epwmss/48302180.eqep' % OCP_PATH,
'%s/48304000.epwmss/48304180.eqep' % OCP_PATH
]
EQEP0 = 0
EQEP1 = 1
EQEP2 = 2
EQEP2b = 3
def __init__(self, eqep_num):
assert 0 <= eqep_num <= 3 , "eqep_num must be between 0 and 3"
if eqep_num == 3:
overlay = 'bone_eqep2b'
eqep_num = 2
else:
overlay = 'bone_eqep%i' % eqep_num
assert os.path.exists("/lib/firmware/bone_eqep2b-00A0.dtbo"), \
"eQEP driver not present, update to a newer image to use the eQEP library"
cape_manager.load(overlay, auto_unload=False)
delay(250) # Give driver time to load
self.base_dir = self._eqep_dirs[eqep_num]
addToCleanup(self.disable)
def enable(self,m):
enable_file = "%s/enabled" % self.base_dir
return sysfs.kernelFilenameIO(enable_file, 1)
def disable(self):
enable_file = "%s/enabled" % self.base_dir
return sysfs.kernelFilenameIO(enable_file, 0)
def setAbsolute(self):
'''
Set mode as Absolute
'''
set_mode = "%s/mode" % self.base_dir
return sysfs.kernelFilenameIO(set_mode, 0)
def setRelative(self):
'''
Set mode as Relative
'''
set_mode = "%s/mode" % self.base_dir
return sysfs.kernelFilenameIO(enable_file, 1)
def getMode(self):
mode_file = "%s/enabled" % self.base_dir
return sysfs.kernelFilenameIO(mode_file)
def getPosition(self):
'''
Get the current position of the encoder
'''
position_file = "%s/position" % self.base_dir
return sysfs.kernelFilenameIO(position_file)
def setFrequency(self,freq):
'''
Set the frequency in Hz at which the driver reports new positions.
'''
period_file = "%s/period" % self.base_dir
return sysfs.kernelFilenameIO(period_file,1000000000/freq)
def setPosition(self,val):
'''
Give a new value to the current position
'''
position_file = "%s/position" % self.base_dir
return sysfs.kernelFilenameIO(position_file,val)
def zero(self):
'''
Set the current position to 0
'''
return self.setPosition(0)
|
Python
| 0.995994
|
@@ -893,16 +893,35 @@
p_num%5D%0D%0A
+ self.enable()%0D%0A
addT
@@ -971,10 +971,8 @@
self
-,m
):%0D%0A
@@ -1266,39 +1266,40 @@
e%0D%0A '''%0D%0A
-set_mod
+mode_fil
e = %22%25s/mode%22 %25
@@ -1347,23 +1347,24 @@
enameIO(
-set_mod
+mode_fil
e, 0)%0D%0A
@@ -1446,15 +1446,16 @@
-set_mod
+mode_fil
e =
@@ -1507,37 +1507,35 @@
ernelFilenameIO(
-enabl
+mod
e_file, 1)%0D%0A
@@ -1570,39 +1570,36 @@
mode_file = %22%25s/
-enabled
+mode
%22 %25 self.base_di
|
06a1b635b02e001e798fa57e70a56ad17f9df7d0
|
fix country cleanup migrate script 5
|
portality/migrate/p1p2/country_cleanup.py
|
portality/migrate/p1p2/country_cleanup.py
|
import sys
from datetime import datetime
from portality import models
from portality import xwalk
def main(argv=sys.argv):
start = datetime.now()
journal_iterator = models.Journal.all_in_doaj()
counter = 0
for j in journal_iterator:
counter += 1
oldcountry = j.bibjson().country
j.bibjson().country = xwalk.get_country_code(j.bibjson().country)
newcountry = j.bibjson().country
print j.bibjson().title.decode('utf-8'), ',', j.bibjson().get_one_identifier(j.bibjson().P_ISSN), j.bibjson().get_one_identifier(j.bibjson().E_ISSN), ',', 'Old country:', oldcountry.decode('utf-8'), ',', 'New country:', newcountry.decode('utf-8')
j.prep()
j.save()
end = datetime.now()
print "Updated Journals", counter
print start, end
print 'Time taken:', end-start
if __name__ == '__main__':
main()
|
Python
| 0.000001
|
@@ -463,18 +463,18 @@
).title.
-d
e
+n
code('ut
@@ -613,34 +613,34 @@
y:', oldcountry.
-d
e
+n
code('utf-8'), '
@@ -670,18 +670,18 @@
country.
-d
e
+n
code('ut
|
042446c8394794255471d784e041c1d9c4ef0752
|
Update example config
|
calplus/conf/providers.py
|
calplus/conf/providers.py
|
"""Provider Configuration"""
from oslo_config import cfg
# Openstack Authenticate Configuration.
openstack_group = cfg.OptGroup('openstack1',
title='OpenStack Hosts')
openstack_opts = [
cfg.StrOpt('driver_name',
default='OpenStackHUST'),
cfg.StrOpt('type_driver',
default='openstack'),
cfg.StrOpt('os_auth_url',
default='localhost'),
cfg.StrOpt('os_project_name',
default='admin'),
cfg.StrOpt('os_username',
default='admin'),
cfg.StrOpt('os_password',
default='ADMIN_PASS'),
cfg.StrOpt('os_project_domain_name',
default='default'),
cfg.StrOpt('os_user_domain_name',
default='default'),
cfg.IntOpt('os_identity_api_version',
default='3'),
cfg.IntOpt('os_image_api_version',
default='2'),
cfg.StrOpt('tenant_id',
default=''),
cfg.StrOpt('os_novaclient_version',
default='2.1'),
cfg.DictOpt('limit',
default={
"subnet": 10,
"network": 10,
"floatingip": 50,
"subnetpool": -1,
"security_group_rule": 100,
"security_group": 10,
"router": 10,
"rbac_policy": -1,
"port": 50
})
]
# Amazon Authenticate Configuration.
amazon_group = cfg.OptGroup('amazon1',
title='Amazon Hosts')
amazon_opts = [
cfg.StrOpt('driver_name',
default='AmazonHUSTACC'),
cfg.StrOpt('type_driver',
default='amazon'),
cfg.StrOpt('aws_access_key_id',
default='localhost'),
cfg.StrOpt('aws_secret_access_key',
default='admin'),
cfg.StrOpt('region_name',
default='localhost'),
cfg.StrOpt('endpoint_url',
default='http://localhost:35357/v3/'),
cfg.DictOpt('limit',
default={
"subnet": 10,
"vpc": 5,
"floatingip": 50,
"subnetpool": -1,
"security_group_rule": 100,
"security_group": 10,
"router": 10,
"rbac_policy": -1,
"port": 50
})
]
#Provider Configuration
provider_group = cfg.OptGroup('providers',
title='Supported Providers')
enable_drivers = cfg.ListOpt(
'enable_drivers',
default=[
openstack_group.name,
amazon_group.name
],
help='List of available Driver Hosts'
)
driver_mapper = cfg.DictOpt('driver_mapper',
default={
'openstack': 'OpenstackDriver',
'amazon': 'AmazonDriver',
},
help="""
Dict with key is provider, and value is
Driver class.
""")
provider_opts = [
driver_mapper,
enable_drivers
]
def register_opts(conf):
conf.register_group(provider_group)
conf.register_opts(provider_opts, group=provider_group)
conf.register_group(openstack_group)
conf.register_opts(openstack_opts, group=openstack_group)
conf.register_group(amazon_group)
conf.register_opts(amazon_opts, group=amazon_group)
def list_opts():
return {
provider_group: provider_opts,
openstack_group: openstack_opts,
amazon_group: amazon_opts,
}
|
Python
| 0.000001
|
@@ -133,17 +133,16 @@
penstack
-1
',%0A
@@ -1517,17 +1517,16 @@
('amazon
-1
',%0A
@@ -1772,33 +1772,41 @@
default='
-localhost
+AWS_ACCESS_KEY_ID
'),%0A cfg.
@@ -1853,37 +1853,53 @@
default='
-admin
+AWS_SECRET_ACCESS_KEY
'),%0A cfg.StrO
@@ -1936,33 +1936,33 @@
default='
-localhost
+us-east-1
'),%0A cfg.
@@ -2029,17 +2029,12 @@
ost:
-35357/v3/
+8788
'),%0A
|
ccaca70aa28bdd3e4f2a9c6e46d76e3ff8653f88
|
Fix public page hashids issue
|
crestify/views/public.py
|
crestify/views/public.py
|
from crestify import app, hashids
from crestify.models import Bookmark
from flask import render_template
@app.route('/public/<string:bookmark_id>', methods=['GET'])
def bookmark_public(bookmark_id):
bookmark_id = hashids.decode(bookmark_id)[0]
query = Bookmark.query.get(bookmark_id)
return render_template("public/bookmark_share.html", bookmark=query)
|
Python
| 0
|
@@ -227,16 +227,20 @@
.decode(
+str(
bookmark
@@ -243,16 +243,17 @@
mark_id)
+)
%5B0%5D%0A
|
3b1418902183ed276400a3d52899394bde59130f
|
Make rfxtrx sensor not crash when unknown sensor is discovered
|
homeassistant/components/sensor/rfxtrx.py
|
homeassistant/components/sensor/rfxtrx.py
|
"""
Support for RFXtrx sensors.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/sensor.rfxtrx/
"""
import logging
import voluptuous as vol
import homeassistant.components.rfxtrx as rfxtrx
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import slugify
from homeassistant.components.rfxtrx import (
ATTR_AUTOMATIC_ADD, ATTR_NAME,
CONF_DEVICES, ATTR_DATA_TYPE, DATA_TYPES)
DEPENDENCIES = ['rfxtrx']
_LOGGER = logging.getLogger(__name__)
PLATFORM_SCHEMA = vol.Schema({
vol.Required("platform"): rfxtrx.DOMAIN,
vol.Optional(CONF_DEVICES, default={}): vol.All(dict, rfxtrx.valid_sensor),
vol.Optional(ATTR_AUTOMATIC_ADD, default=False): cv.boolean,
}, extra=vol.ALLOW_EXTRA)
def setup_platform(hass, config, add_devices_callback, discovery_info=None):
"""Setup the RFXtrx platform."""
# pylint: disable=too-many-locals
from RFXtrx import SensorEvent
sensors = []
for packet_id, entity_info in config['devices'].items():
event = rfxtrx.get_rfx_object(packet_id)
device_id = "sensor_" + slugify(event.device.id_string.lower())
if device_id in rfxtrx.RFX_DEVICES:
continue
_LOGGER.info("Add %s rfxtrx.sensor", entity_info[ATTR_NAME])
sub_sensors = {}
data_types = entity_info[ATTR_DATA_TYPE]
if len(data_types) == 0:
for data_type in DATA_TYPES:
if data_type in event.values:
data_types = [data_type]
break
for _data_type in data_types:
new_sensor = RfxtrxSensor(event, entity_info[ATTR_NAME],
_data_type)
sensors.append(new_sensor)
sub_sensors[_data_type] = new_sensor
rfxtrx.RFX_DEVICES[device_id] = sub_sensors
add_devices_callback(sensors)
def sensor_update(event):
"""Callback for sensor updates from the RFXtrx gateway."""
if not isinstance(event, SensorEvent):
return
device_id = "sensor_" + slugify(event.device.id_string.lower())
if device_id in rfxtrx.RFX_DEVICES:
sensors = rfxtrx.RFX_DEVICES[device_id]
for key in sensors:
sensors[key].event = event
return
# Add entity if not exist and the automatic_add is True
if not config[ATTR_AUTOMATIC_ADD]:
return
pkt_id = "".join("{0:02x}".format(x) for x in event.data)
_LOGGER.info("Automatic add rfxtrx.sensor: %s",
device_id)
data_type = "Unknown"
for _data_type in DATA_TYPES:
if _data_type in event.values:
data_type = _data_type
break
new_sensor = RfxtrxSensor(event, pkt_id, data_type)
sub_sensors = {}
sub_sensors[new_sensor.data_type] = new_sensor
rfxtrx.RFX_DEVICES[device_id] = sub_sensors
add_devices_callback([new_sensor])
if sensor_update not in rfxtrx.RECEIVED_EVT_SUBSCRIBERS:
rfxtrx.RECEIVED_EVT_SUBSCRIBERS.append(sensor_update)
class RfxtrxSensor(Entity):
"""Representation of a RFXtrx sensor."""
def __init__(self, event, name, data_type):
"""Initialize the sensor."""
self.event = event
self._name = name
if data_type not in DATA_TYPES:
data_type = "Unknown"
self.data_type = data_type
self._unit_of_measurement = DATA_TYPES[data_type]
def __str__(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
if self.data_type:
return self.event.values[self.data_type]
return None
@property
def name(self):
"""Get the name of the sensor."""
return self._name
@property
def device_state_attributes(self):
"""Return the state attributes."""
return self.event.values
@property
def unit_of_measurement(self):
"""Return the unit this state is expressed in."""
return self._unit_of_measurement
|
Python
| 0
|
@@ -1459,16 +1459,50 @@
) == 0:%0A
+ data_type = %22Unknown%22%0A
|
3bcc66a91a4c2c2dc51824ea2ba228db3c0d5c8e
|
Add 'info' command for actors
|
calvin/Tools/cscontrol.py
|
calvin/Tools/cscontrol.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import requests
import json
def jsonprint(s):
print(json.dumps(json.loads(s), indent=2))
def control_id(args):
return requests.get(args.node + "/id")
def get_nodes_with_attribute(control_uri, attribute):
req = requests.get(control_uri + "/index/" + attribute)
print("reply: %s" % (req.text))
nodes = json.loads(req.text)
if not nodes or not isinstance(nodes.get("result"), list):
raise Exception("No node with attribute {} found".format(attribute))
return nodes.get("result")
def get_node_info(control_uri, node_id):
req = requests.get(control_uri + "/node/" + node_id)
nodeinfo = json.loads(req.text)
if isinstance(nodeinfo, dict):
return nodeinfo
raise Exception("No node with id {} found".format(node_id))
def get_node_control_uri(control_uri, node_id):
nodeinfo = get_node_info(control_uri, node_id)
return nodeinfo.get("control_uri")
def get_node_with_attribute(control_uri, attribute):
import random
nodes = get_nodes_with_attribute(control_uri, attribute)
node = None
# pick one (with a control uri)
while nodes:
node_id = random.choice(nodes)
nodes.remove(node_id)
other_control_uri = get_node_control_uri(control_uri, node_id)
if other_control_uri:
node = other_control_uri
break
return node
def control_deploy(args):
data = {"name": args.script.name, "script": args.script.read()}
if args.attr:
node = get_node_with_attribute(args.node, args.attr)
else:
node = args.node
return requests.post(node + "/deploy", data=json.dumps(data))
def control_actors(args):
if args.cmd == 'list':
return requests.get(args.node + "/actors")
elif args.cmd == 'delete':
if not args.id:
raise Exception("No actor id given")
return requests.delete(args.node + "/actor/" + args.id)
elif args.cmd == 'migrate':
if not args.id or not args.peer_node:
raise Exception("No actor or peer given")
data = {"peer_node_id": args.peer_node}
return requests.post(args.node + "/actor/" + args.id + "/migrate", data=json.dumps(data))
def control_applications(args):
if args.cmd == 'list':
return requests.get(args.node + "/applications")
elif args.cmd == 'delete':
if not args.id:
raise Exception("No application id given")
return requests.delete(args.node + "/application/" + args.id)
def control_nodes(args):
if args.cmd == 'list':
return requests.get(args.node + "/nodes")
elif args.cmd == 'add':
data = {"peers": args.peerlist}
return requests.post(args.node + "/peer_setup", data=json.dumps(data))
elif args.cmd == 'stop':
return requests.delete(args.node + "/node")
def parse_args():
long_desc = """Send control commands to calvin runtime"""
# top level arguments
argparser = argparse.ArgumentParser(description=long_desc)
argparser.add_argument('node', metavar="<control uri>",
help="control uri of node")
cmdparsers = argparser.add_subparsers(help="command help")
# parser for id cmd
cmd_id = cmdparsers.add_parser('id', help="get id of node", description="Get id of node")
cmd_id.set_defaults(func=control_id)
# parser for nodes cmd
node_commands = ['list', 'add', 'stop']
cmd_nodes = cmdparsers.add_parser('nodes', help='handle node peers')
cmd_nodes.add_argument('cmd', metavar='<command>', choices=node_commands, type=str,
help="one of %s" % ", ".join(node_commands))
cmd_nodes.add_argument('peerlist', metavar='<peer>', nargs='*', default=[],
help="list of peers of the form calvinip://<address>:<port>")
cmd_nodes.set_defaults(func=control_nodes)
# parser for deploy
cmd_deploy = cmdparsers.add_parser('deploy', help="deploy script to node")
cmd_deploy.add_argument("script", metavar="<calvin script>", type=file,
help="script to be deployed")
cmd_deploy.add_argument('-a', '--attr', metavar="<attribute>", type=str, dest="attr",
help="Will deploy script to a random node with the given attribute")
cmd_deploy.set_defaults(func=control_deploy)
# parsers for actor commands
actor_commands = ['list', 'delete', 'migrate']
cmd_actor = cmdparsers.add_parser('actor', help="handle actors on node")
cmd_actor.add_argument('cmd', metavar="<command>", choices=actor_commands, type=str,
help="one of %s" % (", ".join(actor_commands)))
cmd_actor.add_argument('id', metavar="<actor id>", type=str, nargs='?', default=None,
help="id of actor")
cmd_actor.add_argument('peer_node', metavar="<peer node id>", type=str, nargs='?', default=None,
help="id of destination peer")
cmd_actor.set_defaults(func=control_actors)
# parser for applications
app_commands = ['list', 'delete']
cmd_apps = cmdparsers.add_parser('applications', help="handle applications deployed on node")
cmd_apps.add_argument("cmd", metavar="<command>", choices=app_commands, type=str,
help="one of %s" % (", ".join(app_commands)))
cmd_apps.add_argument("id", metavar="<app id>", type=str, nargs='?')
cmd_apps.set_defaults(func=control_applications)
return argparser.parse_args()
def main():
args = parse_args()
jsonprint(args.func(args).text)
if __name__ == '__main__':
main()
|
Python
| 0.000093
|
@@ -2376,24 +2376,185 @@
%22/actors%22)%0A
+ if args.cmd == 'info':%0A if not args.id:%0A raise Exception(%22No actor id given%22)%0A return requests.get(args.node + %22/actor/%22 + args.id)%0A
elif arg
@@ -5173,32 +5173,40 @@
tor_commands = %5B
+'info',
'list', 'delete'
|
bfdf4bffdb30e6f9651c96afb711d2a871b9ff87
|
fix output to shell
|
create_recipes.py
|
create_recipes.py
|
import argparse
import subprocess
parser = argparse.ArgumentParser()
parser.add_argument("package_list", help="List of packages for which" +
" recipies will be created")
args = parser.parse_args()
package_names = [package.strip() for package in
open(args.package_list, 'r').readlines()]
log_dir = "./logs/"
recipes_dir = "./recipes/"
recipe_log_file = open(log_dir + 'recipe_log', 'w')
successes = []
failures = []
for package in package_names:
msg = "Creating Conda recipe for %s\n" % (package)
recipe_log_file.write(msg)
print(msg)
err = subprocess.call(['conda', 'skeleton', 'pypi', package,
'--output-dir', recipes_dir],
stdout=recipe_log_file, stderr=recipe_log_file)
if err is 0:
successes.append(package)
else:
failures.append(package)
recipe_log_file.close()
successful_recipes_file = open(log_dir + 'successful_recipes', 'w')
failed_recipes_file = open(log_dir + 'failed_recipes', 'w')
successful_recipes_file.write('\n'.join(successes))
failed_recipes_file.write('\n'.join(failures))
successful_recipes_file.close()
failed_recipes_file.close()
|
Python
| 0.000023
|
@@ -540,39 +540,8 @@
ge)%0A
- recipe_log_file.write(msg)%0A
@@ -776,43 +776,180 @@
-successes.append(package)%0A else:
+msg = %22Succesfully created conda recipe for %25s%5Cn%22 %25 (package)%0A successes.append(package)%0A else:%0A msg = %22Failed to create conda recipe for %25s%5Cn%22 %25 (package)
%0A
@@ -977,16 +977,31 @@
package)
+%0A print(msg)
%0A%0Arecipe
|
21000dfd4bf63ceae0e8c6ac343624fbf5c5bea2
|
read tags before people
|
cat/test_cat.py
|
cat/test_cat.py
|
from cat.code import GenerateSite
import unittest
import json
import os
import sys
def read_json(file):
with open(file) as fh:
return json.loads(fh.read())
#return fh.read()
class TestDemo(unittest.TestCase):
def test_generate(self):
GenerateSite().generate_site()
assert True
# This fails on travis, we probably need better reporting to see what is the actual difference
# as I cannot see it. Unless it is only the file_date
files = [
'html/v/yougottalovefrontend-2016/vitaly-friedman-cutting-edge-responsive-web-design.json',
'html/p/zohar-babin.json',
]
for result_file in files:
expected_file = 'samples/' + os.path.basename(result_file)
#sys.stderr.write(result_file)
#sys.stderr.write("\n")
#sys.stderr.write(expected_file)
#sys.stderr.write("\n")
# read both files
result = read_json(result_file)
expected = read_json(expected_file)
if 'file_date' in expected:
del(expected['file_date'])
del(result['file_date'])
if result != expected:
print("While testing {}\n".format(result_file))
print("Expected: {}".format(expected))
print("Received: {}".format(result))
assert result == expected
def test_videos(self):
gs = GenerateSite()
gs.read_videos()
report = gs.check_videos()
sys.stderr.write(report)
assert report == ''
def test_people(self):
gs = GenerateSite()
gs.read_people()
report = gs.check_people()
sys.stderr.write(report)
assert report == ''
# vim: expandtab
|
Python
| 0
|
@@ -1631,32 +1631,55 @@
GenerateSite()%0A
+ gs.read_tags()%0A
gs.read_
|
b220af1b5219c59735bd1f35493b0a659c627738
|
Fix cookie handling for tornado
|
social/strategies/tornado_strategy.py
|
social/strategies/tornado_strategy.py
|
import json
from tornado.template import Loader, Template
from social.utils import build_absolute_uri
from social.strategies.base import BaseStrategy, BaseTemplateStrategy
class TornadoTemplateStrategy(BaseTemplateStrategy):
def render_template(self, tpl, context):
path, tpl = tpl.rsplit('/', 1)
return Loader(path).load(tpl).generate(**context)
def render_string(self, html, context):
return Template(html).generate(**context)
class TornadoStrategy(BaseStrategy):
DEFAULT_TEMPLATE_STRATEGY = TornadoTemplateStrategy
def __init__(self, storage, request_handler, tpl=None):
self.request_handler = request_handler
self.request = self.request_handler.request
super(TornadoStrategy, self).__init__(storage, tpl)
def get_setting(self, name):
return self.request_handler.settings[name]
def request_data(self, merge=True):
# Multiple valued arguments not supported yet
return dict((key, val[0])
for key, val in self.request.arguments.iteritems())
def request_host(self):
return self.request.host
def redirect(self, url):
return self.request_handler.redirect(url)
def html(self, content):
self.request_handler.write(content)
def session_get(self, name, default=None):
return self.request_handler.get_secure_cookie(name) or default
def session_set(self, name, value):
self.request_handler.set_secure_cookie(name, str(value))
def session_pop(self, name):
value = self.request_handler.get_secure_cookie(name)
self.request_handler.set_secure_cookie(name, '')
return value
def session_setdefault(self, name, value):
pass
def build_absolute_uri(self, path=None):
return build_absolute_uri('{0}://{1}'.format(self.request.protocol,
self.request.host),
path)
def partial_to_session(self, next, backend, request=None, *args, **kwargs):
return json.dumps(super(TornadoStrategy, self).partial_to_session(
next, backend, request=request, *args, **kwargs
))
def partial_from_session(self, session):
if session:
return super(TornadoStrategy, self).partial_to_session(
json.loads(session)
)
|
Python
| 0.000001
|
@@ -1320,38 +1320,39 @@
=None):%0A
-return
+value =
self.request_ha
@@ -1384,11 +1384,87 @@
ame)
- or
+%0A if value:%0A return json.loads(value.decode())%0A return
def
@@ -1566,17 +1566,33 @@
me,
-str(value
+json.dumps(value).encode(
))%0A%0A
@@ -1649,41 +1649,19 @@
elf.
-request_handler.get_secure_cookie
+session_get
(nam
@@ -1688,34 +1688,29 @@
est_handler.
-set_secure
+clear
_cookie(name
@@ -1713,12 +1713,8 @@
name
-, ''
)%0A
|
c5db8af5faca762e574a5b3b6117a0253e59cd05
|
use new urls module
|
couchexport/urls.py
|
couchexport/urls.py
|
from django.conf.urls.defaults import *
urlpatterns = patterns('',
url(r'^model/$', 'couchexport.views.export_data', name='model_download_excel'),
url(r'^async/$', 'couchexport.views.export_data_async', name='export_data_async'),
url(r'^saved/(?P<export_id>[\w-]+)/$', 'couchexport.views.download_saved_export',
name='couchexport_download_saved_export'),
)
|
Python
| 0.000001
|
@@ -18,17 +18,8 @@
urls
-.defaults
imp
|
b68da6c5b64009dbd2d53206be4c8d98ed1b0a45
|
Add print option to exercise_oaipmh.py
|
librisxl-tools/scripts/exercise_oaipmh.py
|
librisxl-tools/scripts/exercise_oaipmh.py
|
import requests
from lxml import etree
import time
PMH = "{http://www.openarchives.org/OAI/2.0/}"
def parse_oaipmh(start_url, name, passwd):
start_time = time.time()
resumption_token = None
record_count = 0
while True:
url = make_next_url(start_url, resumption_token)
res = requests.get(url, auth=(name, passwd), stream=True, timeout=3600)
record_root = etree.parse(res.raw)
record_count += len(record_root.findall("{0}ListRecords/{0}record".format(PMH)))
resumption_token = record_root.findtext("{0}ListRecords/{0}resumptionToken".format(PMH))
elapsed = time.time() - start_time
print "Record count: %s. Got resumption token: %s. Elapsed time: %s. Records/second: %s" % (record_count, resumption_token, elapsed, record_count / elapsed)
if not resumption_token:
break
def make_next_url(base_url, resumption_token=None):
params = "?verb=ListRecords&resumptionToken=%s" % resumption_token if resumption_token else "?verb=ListRecords&metadataPrefix=marcxml"
return base_url + params
if __name__ == '__main__':
from sys import argv
args = argv[1:]
start_url = (args.pop(0) if len(args) == 3
else "http://data.libris.kb.se/hold/oaipmh")
name, passwd = args[:2]
parse_oaipmh(start_url, name, passwd)
|
Python
| 0.000005
|
@@ -32,16 +32,46 @@
t etree%0A
+from StringIO import StringIO%0A
import t
@@ -164,16 +164,32 @@
, passwd
+, do_print=False
):%0A s
@@ -414,16 +414,173 @@
t=3600)%0A
+ if do_print:%0A data = res.raw.read()%0A print data%0A source = StringIO(data)%0A else:%0A source = res.raw%0A
@@ -605,23 +605,22 @@
e.parse(
-res.raw
+source
)%0A
@@ -1356,139 +1356,335 @@
1:%5D%0A
+%0A
-start_url = (args.pop(0) if len(args) == 3%0A else %22http://data.libris.kb.se/hold/oaipmh%22)%0A name, passwd = args%5B:2%5D
+if '-p' in args:%0A args.remove('-p')%0A do_print = True%0A else:%0A do_print = False%0A%0A if not args:%0A print %22Usage: %25s OAI_PMH_URL %5BNAME, PASSWORD%5D %5B-p%5D%22 %25 argv%5B0%5D%0A exit()%0A start_url = args.pop(0)%0A if args:%0A name, passwd = args%5B:2%5D%0A else:%0A name, passwd = None, None%0A
%0A
@@ -1720,10 +1720,20 @@
, passwd
+, do_print
)%0A
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.