commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
|---|---|---|---|---|---|---|---|
0c6316d76f57dbd64bc055be0fa30d4cdf3e5c0b
|
Move getspec() function into TestCase class
|
test.py
|
test.py
|
from __future__ import print_function
import sys, os, unittest, subprocess, json
from runac import util
import runac
DIR = os.path.dirname(__file__)
TEST_DIR = os.path.join(DIR, 'tests')
def getspec(src):
with open(src) as f:
h = f.readline()
if h.startswith('# test: '):
return json.loads(h[8:])
else:
return {}
def compile(fn, bin):
try:
runac.compile(fn, bin)
return None
except util.Error as e:
return e.show()
except util.ParseError as e:
return e.show()
class RunaTest(unittest.TestCase):
def __init__(self, fn):
unittest.TestCase.__init__(self)
self.fn = fn
def runTest(self):
base = self.fn.rsplit('.rns', 1)[0]
bin = base + '.test'
spec = getspec(self.fn)
type = spec.get('type', 'test')
if type == 'show':
out = '\n'.join(runac.show(self.fn, None)) + '\n'
else:
out = compile(self.fn, bin)
if out and sys.version_info[0] > 2:
out = out.encode('utf-8')
if not out:
cmd = [bin] + spec.get('args', [])
opts = {'stdout': subprocess.PIPE, 'stderr': subprocess.PIPE}
proc = subprocess.Popen(cmd, **opts)
res = [proc.wait(), proc.stdout.read(), proc.stderr.read()]
proc.stdout.close()
proc.stderr.close()
elif type == 'show':
res = [0, out, bytes()]
else:
res = [0, bytes(), out]
expected = [spec.get('ret', 0), bytes(), bytes()]
for i, ext in enumerate(('.out', '.err')):
if os.path.exists(base + ext):
with open(base + ext, 'rb') as f:
expected[i + 1] = f.read()
if self is None:
return res == expected
elif res[1]:
self.assertEqual(expected[0], res[0])
self.assertEqual(expected[1], res[1])
self.assertEqual(expected[2], res[2])
elif res[2]:
self.assertEqual(expected[2], res[2])
self.assertEqual(expected[1], res[1])
self.assertEqual(expected[0], res[0])
def suite():
suite = unittest.TestSuite()
for fn in os.listdir(TEST_DIR):
fn = os.path.join(TEST_DIR, fn)
if fn.endswith('.rns'):
suite.addTest(RunaTest(fn))
return suite
IGNORE = [
'Memcheck', 'WARNING:', 'HEAP SUMMARY:', 'LEAK SUMMARY:',
'All heap blocks', 'For counts',
]
def valgrind(bin, spec):
cmd = ['valgrind', '--leak-check=full', bin] + spec.get('args', [])
streams = {'stdout': subprocess.PIPE, 'stderr': subprocess.PIPE}
proc = subprocess.Popen(cmd, **streams)
proc.wait()
err = proc.stderr.read()
blocks, cur = [], []
for ln in err.splitlines():
if not ln.startswith('=='):
continue
ln = ln.split(' ', 1)[1]
if not ln.strip():
if cur:
blocks.append(cur)
cur = []
continue
cur.append(ln)
errors = []
for bl in blocks:
if not any(flag for flag in IGNORE if bl[0].startswith(flag)):
errors.append(bl)
return len(errors)
def leaks():
for fn in sorted(os.listdir('tests')):
if not fn.endswith('.rns'):
continue
test = os.path.join('tests', fn)
bin = test[:-4] + '.test'
compiled = os.path.exists(bin)
if not compiled or os.stat(test).st_mtime >= os.stat(bin).st_mtime:
out = compile(test, bin)
if out is not None:
continue
print('Running %s...' % bin, end=' ')
count = valgrind(bin, getspec(test))
print(' ' * (40 - len(bin)), '%3i' % count)
if __name__ == '__main__':
if len(sys.argv) > 1 and sys.argv[1] == '--leaks':
leaks()
elif len(sys.argv) > 1:
print(run(None, sys.argv[1]))
else:
unittest.main(defaultTest='suite')
|
Python
| 0.000001
|
@@ -186,148 +186,8 @@
')%0A%0A
-def getspec(src):%0A%09with open(src) as f:%0A%09%09h = f.readline()%0A%09%09if h.startswith('# test: '):%0A%09%09%09return json.loads(h%5B8:%5D)%0A%09%09else:%0A%09%09%09return %7B%7D%0A%0A
def
@@ -451,16 +451,168 @@
.fn = fn
+%0A%0A%09def getspec(self):%0A%09%09with open(self.fn) as f:%0A%09%09%09h = f.readline()%0A%09%09%09if h.startswith('# test: '):%0A%09%09%09%09return json.loads(h%5B8:%5D)%0A%09%09%09else:%0A%09%09%09%09return %7B%7D
%0A%09%0A%09def
@@ -702,24 +702,29 @@
%09spec =
+self.
getspec(
self.fn)
@@ -715,23 +715,16 @@
getspec(
-self.fn
)%0A%09%09type
|
d2bb5dd320253b2fdccddd79eeb240aef70e7098
|
Update command.py
|
src/command.py
|
src/command.py
|
# -*- coding: utf-8 -*-
import math
def parse_command(commands, game_stats):
"""
Parse a command from a player and run it.
Parameters
----------
command : command from the player (str).
game_stats : stat of the game (dic).
Return
------
game_stats : game stat after the command execution (dic).
Version
-------
specification v1. Nicolas Van Bossuyt (10/2/2017)
implementation v1. Nicolas Van Bossuyt (10/2/2017)
"""
commands = commands.split(' ')
for cmd in commands:
sub_cmd = cmd.split(':')
ship_name = sub_cmd[0]
ship_action = sub_cmd[1]
if ship_action == 'slower' or ship_action == 'faster':
game_stats = command_change_speed(ship_name, ship_action, game_stats)
elif ship_action == 'left' or ship_action == 'right':
game_stats = command_rotate(ship_name, ship_action, game_stats)
else:
ship_action = ship_action.split('-')
coordinate = (int(ship_action[0]), int(ship_action[1]))
game_stats = command_attack(ship_name, coordinate, game_stats)
return game_stats
def command_change_speed(ship, change, game_stats):
"""
Increase the speed of a ship.
Parameters
----------
ship : name of the ship to Increase the speed (str).
change : the way to change the speed <"slower"|"faster"> (str).
game_stats : stats of the game (dic).
Returns
-------
game_stats : the game after the command execution (dic)
Version
-------
specification : Nicolas Van Bossuyt (v1. 09/02/2017)
implementation : Bayron Mahy (v1. 10/02/2017)
"""
type = game_stats['ship'][ship]['type']
# Make the ship move faster.
if change == 'faster' and gamestats['ship'][ship]['speed'] < gamestats['model_ship'][type]['max_speed']:
game_stats['ship'][ship]['speed']+=1
# make the ship move slower.
elif change == 'slower' and gamestats['ship'][ship]['speed'] > 0:
game_stats['ship'][ship]['speed']-=1
# show a message when is a invalide change.
else:
print 'you cannot make that change on the speed of this ship'
return game_stats
def command_rotate(ship, direction, game_stats):
"""
Rotate the ship.
Parameters
----------
ship : name of the ship to Increase the speed.
direction : the direction to rotate the ship <"left"|"right">(str)
game_stats : stats of the game (dic).
Returns
-------
new_game_stats : the game after the command execution.
Version
h -------
specification v1. Nicolas Van Bossuyt (10/2/2017)
implementation v1. Nicolas Van Bossuyt (10/2/2017)
"""
def rotate_vector_2D(vector, radian):
"""
Rotate a vector in a 2D space by a specified angle in radian.
Parameters ----------
vector : 2D vector ton rotate (tuple(int,int)).
radian : angle appli to the 2D vector (float).
return
------
vector : rotate vector 2d (tuple(int,int)).
Version
-------
specification v1. Nicolas Van Bossuyt (10/2/2017)
implementation v1. Nicolas Van Bossuyt (10/2/2017)
"""
new_vector = (.0,.0)
# Here is were the magic append.
new_vector[0] = vector[0] * math.cos(radian) - vector[1] * math.sin(radian)
new_vector[1] = vector[0] * math.sin(radian) + vector[1] * math.cos(radian)
return new_vector
if direction == 'left':
gamestats['ship'][ship]['direction'] = rotate_vector_2D(gamestats['ship'][ship]['direction'], -math.pi / 4)
elif direction == 'right':
gamestats['ship'][ship]['direction'] = rotate_vector_2D(gamestats['ship'][ship]['direction'], math.pi / 4)
return game_stats
def command_attack(ship, coordinate, game_stats):
"""
Rotate the ship.
Parameters
----------
ship : name of the ship to Increase the speed.
coordinate : coordinate of the tile to attack (tuple(int,int)).
game_stats : stats of the game (dic).
Returns
-------
new_game_stats : the game after the command execution.
"""
board_width=game_stats['board_size'][0]
board_lenght=game_stats['board_size'][1]
damages=game_stats ['ship'][ship]['damages']
ship_abscissa=game_stats['ship'][ship]['position'][0]
ship_orderly=game_stats['ship'][ship]['position'][1]
distance=(coordinate[0]-ship_abscissa ) + (coordinate[1]-ship_orderly )
if distance<=game_stats ['ship'][ship]['range'] :
for element in game_stats['board'][coordinate] :
game_stats['ship'][element]['heal_point']-=damages
if game_stats['ship'][element]['heal_point']<=0:
game_stats['board'][coordinate].remove(element)
return new_game_stats
raise NotImplementedError
|
Python
| 0.000002
|
@@ -2563,17 +2563,16 @@
Version%0A
-h
----
@@ -4151,17 +4151,20 @@
%22%22%22%0A
-%09
+
board_wi
@@ -4195,17 +4195,20 @@
ze'%5D%5B0%5D%0A
-%09
+
board_le
@@ -4242,17 +4242,20 @@
'%5D%5B1%5D%0A%09%0A
-%09
+
damages=
@@ -4291,17 +4291,20 @@
mages'%5D%0A
-%09
+
ship_abs
@@ -4350,17 +4350,20 @@
n'%5D%5B0%5D %0A
-%09
+
ship_ord
@@ -4407,19 +4407,21 @@
on'%5D%5B1%5D%0A
-%09%0A%09
+%0A
distance
@@ -4486,17 +4486,20 @@
rly )%0A%09%0A
-%09
+
if dista
@@ -4543,18 +4543,24 @@
'%5D :%0A%09%09%0A
-%09%09
+
for elem
@@ -4604,19 +4604,28 @@
%5D :%0A%09%09%09%0A
-%09%09%09
+
game_sta
@@ -4671,19 +4671,28 @@
ges%0A%09%09%09%0A
-%09%09%09
+
if game_
@@ -4736,12 +4736,24 @@
=0:%0A
-%09%09%09%09
+
game
@@ -4796,17 +4796,20 @@
lement)%0A
-%09
+
return n
|
91a3a94466736ef6996befa73549e309fb9251f8
|
Remove unused import
|
explorer/urls.py
|
explorer/urls.py
|
from django.conf.urls import patterns, url
from explorer.views import (
QueryView,
CreateQueryView,
PlayQueryView,
DeleteQueryView,
ListQueryView,
ListQueryLogView,
download_query,
view_csv_query,
email_csv_query,
download_csv_from_sql,
schema,
format_sql,
)
urlpatterns = [
url(r'(?P<query_id>\d+)/$', QueryView.as_view(), name='query_detail'),
url(r'(?P<query_id>\d+)/download$', download_query, name='query_download'),
url(r'(?P<query_id>\d+)/csv$', view_csv_query, name='query_csv'),
url(r'(?P<query_id>\d+)/email_csv$', email_csv_query, name='email_csv_query'),
url(r'(?P<pk>\d+)/delete$', DeleteQueryView.as_view(), name='query_delete'),
url(r'new/$', CreateQueryView.as_view(), name='query_create'),
url(r'play/$', PlayQueryView.as_view(), name='explorer_playground'),
url(r'csv$', download_csv_from_sql, name='generate_csv'),
url(r'schema/$', schema, name='explorer_schema'),
url(r'logs/$', ListQueryLogView.as_view(), name='explorer_logs'),
url(r'format/$', format_sql, name='format_sql'),
url(r'^$', ListQueryView.as_view(), name='explorer_index'),
]
|
Python
| 0.000001
|
@@ -25,18 +25,8 @@
port
- patterns,
url
|
eb758ca756dc5716252b0364aadd5ef5a40ef773
|
Streamline test_ExpectationsStore_with_DatabaseStoreBackend
|
tests/data_context/store/test_expectations_store.py
|
tests/data_context/store/test_expectations_store.py
|
import pytest
from great_expectations.core import ExpectationSuite
from great_expectations.data_context.store import (
DatabaseStoreBackend,
ExpectationsStore,
)
from great_expectations.data_context.types.resource_identifiers import (
ExpectationSuiteIdentifier,
)
from great_expectations.exceptions import StoreBackendError
def test_expectations_store():
my_store = ExpectationsStore()
with pytest.raises(TypeError):
my_store.set("not_a_ValidationResultIdentifier")
ns_1 = ExpectationSuiteIdentifier.from_tuple(tuple("a.b.c.warning"))
my_store.set(ns_1, ExpectationSuite(expectation_suite_name="a.b.c.warning"))
assert my_store.get(ns_1) == ExpectationSuite(
expectation_suite_name="a.b.c.warning"
)
ns_2 = ExpectationSuiteIdentifier.from_tuple(tuple("a.b.c.failure"))
my_store.set(ns_2, ExpectationSuite(expectation_suite_name="a.b.c.failure"))
assert my_store.get(ns_2) == ExpectationSuite(
expectation_suite_name="a.b.c.failure"
)
assert set(my_store.list_keys()) == {
ns_1,
ns_2,
}
def test_ExpectationsStore_with_DatabaseStoreBackend_postgres(caplog, sa):
connection_kwargs = {
"drivername": "postgresql",
"username": "postgres",
"password": "",
"host": "localhost",
"port": "5432",
"database": "test_ci",
}
# First, demonstrate that we pick up default configuration
my_store = ExpectationsStore(
store_backend={
"class_name": "DatabaseStoreBackend",
"credentials": connection_kwargs,
}
)
with pytest.raises(TypeError):
my_store.get("not_a_ExpectationSuiteIdentifier")
# first suite to add to db
default_suite = ExpectationSuite(
expectation_suite_name="a.b.c",
meta={"test_meta_key": "test_meta_value"},
expectations=[],
)
ns_1 = ExpectationSuiteIdentifier.from_tuple(tuple("a.b.c"))
# initial set and check if first suite exists
my_store.set(ns_1, default_suite)
assert my_store.get(ns_1) == ExpectationSuite(
expectation_suite_name="a.b.c",
meta={"test_meta_key": "test_meta_value"},
expectations=[],
)
# update suite and check if new value exists
updated_suite = ExpectationSuite(
expectation_suite_name="a.b.c",
meta={"test_meta_key": "test_new_meta_value"},
expectations=[],
)
my_store.set(ns_1, updated_suite)
assert my_store.get(ns_1) == ExpectationSuite(
expectation_suite_name="a.b.c",
meta={"test_meta_key": "test_new_meta_value"},
expectations=[],
)
def test_ExpectationsStore_with_DatabaseStoreBackend_sqlite(sa):
# Use sqlite so we don't require postgres for this test.
connection_kwargs = {"drivername": "sqlite"}
# First, demonstrate that we pick up default configuration
my_store = ExpectationsStore(
store_backend={
"class_name": "DatabaseStoreBackend",
"credentials": connection_kwargs,
}
)
with pytest.raises(TypeError):
my_store.get("not_a_ExpectationSuiteIdentifier")
ns_1 = ExpectationSuiteIdentifier.from_tuple(tuple("a.b.c.warning"))
my_store.set(ns_1, ExpectationSuite(expectation_suite_name="a.b.c.warning"))
assert my_store.get(ns_1) == ExpectationSuite(
expectation_suite_name="a.b.c.warning"
)
ns_2 = ExpectationSuiteIdentifier.from_tuple(tuple("a.b.c.failure"))
my_store.set(ns_2, ExpectationSuite(expectation_suite_name="a.b.c.failure"))
assert my_store.get(ns_2) == ExpectationSuite(
expectation_suite_name="a.b.c.failure"
)
assert set(my_store.list_keys()) == {
ns_1,
ns_2,
}
|
Python
| 0
|
@@ -1145,237 +1145,122 @@
kend
-_postgres(caplog, sa):%0A connection_kwargs = %7B%0A %22drivername%22: %22postgresql%22,%0A %22username%22: %22postgres%22,%0A %22password%22: %22%22,%0A %22host%22: %22localhost%22,%0A %22port%22: %225432%22,%0A %22database%22: %22test_ci%22,%0A
+(sa):%0A # Use sqlite so we don't require postgres for this test.%0A connection_kwargs = %7B%22drivername%22: %22sqlite%22
%7D%0A%0A
@@ -1682,32 +1682,40 @@
uite_name=%22a.b.c
+.warning
%22,%0A meta=
@@ -1845,16 +1845,24 @@
e(%22a.b.c
+.warning
%22))%0A
@@ -2025,32 +2025,40 @@
uite_name=%22a.b.c
+.warning
%22,%0A meta=
@@ -2243,32 +2243,40 @@
uite_name=%22a.b.c
+.warning
%22,%0A meta=
@@ -2474,16 +2474,24 @@
e=%22a.b.c
+.warning
%22,%0A
@@ -2576,771 +2576,8 @@
)%0A%0A
-%0Adef test_ExpectationsStore_with_DatabaseStoreBackend_sqlite(sa):%0A # Use sqlite so we don't require postgres for this test.%0A connection_kwargs = %7B%22drivername%22: %22sqlite%22%7D%0A%0A # First, demonstrate that we pick up default configuration%0A my_store = ExpectationsStore(%0A store_backend=%7B%0A %22class_name%22: %22DatabaseStoreBackend%22,%0A %22credentials%22: connection_kwargs,%0A %7D%0A )%0A%0A with pytest.raises(TypeError):%0A my_store.get(%22not_a_ExpectationSuiteIdentifier%22)%0A%0A ns_1 = ExpectationSuiteIdentifier.from_tuple(tuple(%22a.b.c.warning%22))%0A my_store.set(ns_1, ExpectationSuite(expectation_suite_name=%22a.b.c.warning%22))%0A assert my_store.get(ns_1) == ExpectationSuite(%0A expectation_suite_name=%22a.b.c.warning%22%0A )%0A%0A
@@ -2645,17 +2645,16 @@
lure%22))%0A
-%0A
my_s
|
eff3a021cbd816c83537f13ec37b203aaeb5d79e
|
testing encoding of std. err file in parse_to_json_file
|
cgum/program.py
|
cgum/program.py
|
from cgum.basic import *
from cgum.utility import FNULL
import cgum.statement as statement
import cgum.expression as expression
import cgum.preprocessor as preprocessor
import cgum.typs as typs
from subprocess import Popen, CalledProcessError
import os.path
import json
import tempfile
# TODO: Probe
class Asm(Node):
CODE = "260800"
LABEL = "Asm"
def __init__(self, pos, length, label, children):
assert label is None
super().__init__(pos, length, label, children)
class Label(Node):
CODE = "270100"
LABEL = "Label"
def __init__(self, pos, length, label, children):
assert label is None
assert len(children) == 2
assert isinstance(children[0], GenericString)
super().__init__(pos, length, label, children)
def name(self):
return self.__children[0].to_s()
def statement(self):
return self.__children[1]
class FunctionParameter(Node):
CODE = "220100"
LABEL = "ParameterType"
def __init__(self, pos, length, label, children):
assert label is None
assert len(children) <= 2
# Find the optional type and name of this parameter
tmp = children.copy()
self.__typ = \
tmp.pop(0) if (tmp and isinstance(tmp[0], typs.FullType)) else None
self.__name = tmp.pop(0) if tmp else None
assert self.__typ is None or isinstance(self.__typ, typs.FullType)
assert self.__name is None or isinstance(self.__name, GenericString)
super().__init__(pos, length, label, children)
def is_incomplete(self):
return self.name() is None
def typ(self):
return self.__typ.to_s() if self.__typ else None
def name(self):
return self.__name.to_s() if self.__name else None
class FunctionParameters(Node):
CODE = "200000"
LABEL = "ParamList"
def __init__(self, pos, length, label, children):
assert label is None
assert all([isinstance(c, FunctionParameter) for c in children])
super().__init__(pos, length, label, children)
def parameters(self):
return self.__children
class FunctionDefinition(Node):
CODE = "380000"
LABEL = "Definition"
@staticmethod
def from_json(jsn):
return FunctionDefinition(jsn['pos'], name, params, block, storage, dots)
def __init__(self, pos, length, label, children):
assert len(children) >= 3 and len(children) <= 5
tmp = children.copy()
self.__storage = \
tmp.pop(0) if isinstance(tmp[0], typs.Storage) else None
self.__parameters = tmp.pop(0)
self.__dots = \
tmp.pop(0) if isinstance(tmp[0], typs.DotsParameter) else None
self.__name = tmp.pop(0)
self.__block = tmp.pop(0)
assert isinstance(self.__parameters, FunctionParameters)
assert self.__dots is None or \
isinstance(self.__dots, typs.DotsParameter)
assert self.__storage is None or \
isinstance(self.__storage, typs.Storage)
assert isinstance(self.__name, GenericString)
assert isinstance(self.__block, statement.Block)
super().__init__(pos, length, label, children)
def name(self):
return self.__name
def parameters(self):
return self.__parameters
def block(self):
return self.__block
def storage(self):
return self.__storage
def dots(self):
return self.__dots
def is_variadic(self):
return not (self.dots() is None)
# Used to mark the end of the program!
class FinalDef(Token):
CODE = "450800"
LABEL = "FinalDef"
# Represents the root AST node for a program
# For now we just get all the "components" of a program and worry about what
# kind of components they might be later.
#
# Throw away the FinalDef
class Program(Node):
CODE = "460000"
LABEL = "Program"
# Generates an AST for a given source code file, using GumTree and CGum
@staticmethod
def from_source_file(fn):
tmp_f = tempfile.NamedTemporaryFile()
Program.parse_to_json_file(fn, tmp_f)
return Program.from_json_file(tmp_f.name)
# Parses a JSON CGum AST, stored in a file at a specified location, into an
# equivalent, Python representation
@staticmethod
def from_json_file(fn):
#print("Attempting to read CGum AST from a JSON file: %s" % fn)
assert os.path.isfile(fn), "file not found"
with open(fn, 'r') as f:
program = Node.from_json(json.load(f)['root'])
#print("Finished converting CGum AST from JSON into Python")
program.renumber()
return program
def __init__(self, pos, length, label, children):
assert label is None
assert len(children) >= 1
assert isinstance(children[-1], FinalDef)
children.pop()
super().__init__(pos, length, label, children)
@staticmethod
def parse_to_json_file(src_fn, jsn_f):
with tempfile.TemporaryFile() as f_err:
cmd = "gumtree parse \"%s\"" % src_fn
p = Popen(cmd, shell=True, stdin=FNULL, stdout=jsn_f, stderr=f_err)
# read the contents of the standard error
f_err.seek(0)
err = f_err.read()
# ensure the exit status was zero
if p.wait() != 0:
raise Exception("ERROR [PyCGum/parse_to_json_file]: unexpected exit code - %s" % error)
# run-time exceptions can occur whilst still returning an exit status
# of zero
elif err.startswith("java.lang.RuntimeException:"):
raise Exception("ERROR [PyCGum/parse_to_json_file]: %s" % err)
|
Python
| 0.999931
|
@@ -5234,16 +5234,20 @@
err =
+str(
f_err.re
@@ -5250,16 +5250,41 @@
r.read()
+)%0A%0A print(err)
%0A%0A
|
3b4dd9a59de9a37a4167f64fec2f3896479f56c9
|
Simplify option formatting.
|
qipipe/registration/ants/similarity_metrics.py
|
qipipe/registration/ants/similarity_metrics.py
|
class SimilarityMetric(object):
_FMT = "{name}[{fixed}, {moving}, {opts}]"
def __init__(self, name, *opts):
self.name = name
self.opts = opts
def format(self, fixed, moving, weight=1):
"""
Formats the ANTS similiarity metric argument.
:param reference: the fixed reference file
:param moving: the moving file to register
:param weight: the weight to assign this metric (default 1)
:rtype: str
"""
opt_arg = ', '.join([weight] + self.opts)
return SimilarityMetric._FMT.format(name=self.name, fixed=fixed, moving=moving, opts=opt_arg)
def __str__(self):
return self.name
class PR(SimilarityMetric):
"""
The probability mapping metric.
"""
def __init__(self):
super(PR, self).__init__('PR')
class CC(SimilarityMetric):
"""
The cross-correlation metric.
"""
def __init__(self, radius=4):
super(CC, self).__init__('CC', radius)
class MI(SimilarityMetric):
"""
The mutual information metric.
"""
def __init__(self, bins=32):
super(MI, self).__init__('MI', bins)
class MSQ(SimilarityMetric):
"""
The mean-squared metric.
"""
def __init__(self):
super(MSQ, self).__init__('MSQ', 0)
class JTB(SimilarityMetric):
"""
The B-spline metric.
"""
def __init__(self, radius=32):
super(JTB, self).__init__('JTB', radius)
|
Python
| 0.000001
|
@@ -488,16 +488,71 @@
opt
+s = %5Bweight%5D%0A opts.extend(self.opts)%0A opt
_arg = '
@@ -564,24 +564,8 @@
oin(
-%5Bweight%5D + self.
opts
|
0d666e9d3a1f998d92fb9c6cc1ffdb9e9414e74e
|
reduce the size of the "smile" text
|
photobooth.py
|
photobooth.py
|
#!/usr/bin/env python3
import pygame
import subprocess
import os
import configparser
import sys
import time
import calendar
from itertools import chain
config = {
"photostorage": "Pictures",
"chdkptp": "chdkptp",
"fullscreen": "-w" not in sys.argv, # -w for windowed
"bgcolor": (0, 0, 0),
"textcolor": (255, 255, 255),
"displayPhotoFor": 5,
"photosPerSet": 4,
"countdown": 4,
"countdownSpeed": 1
}
def configure():
cf = configparser.SafeConfigParser()
cf.read(['photobooth.cfg', os.path.expanduser('~/.photobooth.cfg')])
if cf.has_section('photobooth'):
for key in config:
if cf.has_option('photobooth', key):
if type(config[key]) == int:
config[key] = cf.getint('photobooth', key)
else:
config[key] = cf.get('photobooth', key)
if key in ["bgcolor", "textcolor"]:
config[key] = config[key].split(",")
for color in range(len(config[key])):
config[key][color] = int(config[key][color].strip())
config[key] = tuple(config[key])
print(config)
def waitForInput(stream, waitFor=">"):
char = stream.read(1).decode('utf-8')
while char != waitFor:
print(char, end="")
sys.stdout.flush()
char = stream.read(1).decode('utf-8')
print(char)
## The following code was stolen from http://www.pygame.org/wiki/TextWrapping
def truncline(text, font, maxwidth):
real = len(text)
stext = text
l = font.size(text)[0]
cut = 0
a = 0
done = 1
while l > maxwidth:
a = a+1
n = text.rsplit(None, a)[0]
if stext == n:
cut += 1
stext = n[:-cut]
else:
stext = n
l = font.size(stext)[0]
real = len(stext)
done = 0
return real, done, stext
def wrapline(text, font, maxwidth):
done = 0
wrapped = []
while not done:
nl, done, stext = truncline(text, font, maxwidth)
wrapped.append(stext.strip())
text = text[nl:]
return wrapped
def renderText(textstr, game, fontSize=1000):
font = pygame.font.Font(None, fontSize)
lines = wrapline(textstr, font, game['size'][0]-100)
height = font.size(lines[0])[1]
for line in range(0, len(lines)):
text = font.render(lines[line], 1, config['textcolor'])
textpos = text.get_rect()
textpos.centerx = game['background'].get_rect().centerx
textpos.centery = height*0.5 + height*line
game['background'].blit(text, textpos)
game['screen'].blit(game['background'], (0, 0))
pygame.display.flip()
game['clock'].tick()
def takePhotoSet(chdkptp, game):
stripnumber = calendar.timegm(time.gmtime())
filename = "%s/%s" % (config['photostorage'], stripnumber)
# Enter rsint mode
chdkptp.stdin.write(("rsint %s-last\n" % filename).encode())
waitForInput(chdkptp.stdout)
photonumber = 0
while photonumber < config['photosPerSet']:
doCountdown(game)
takePhoto(chdkptp)
os.rename("%s-last.jpg" % filename, "%s-%s.jpg" % (filename, photonumber))
displayPhoto("%s-%s.jpg" % (filename, photonumber), game)
photonumber += 1
chdkptp.stdin.write(b"q\n")
waitForInput(chdkptp.stdout)
def doCountdown(game):
countdown = config['countdown']
while countdown >= 0:
game['background'].fill(config['bgcolor'])
if countdown > 0:
renderText(str(countdown), game, fontSize=1500)
elif countdown == 0:
renderText("smile", game)
countdown -= 1
time.sleep(config['countdownSpeed'])
def takePhoto(chdkptp):
chdkptp.stdin.write(b"s\n")
waitForInput(chdkptp.stdout)
def displayPhoto(filename, game):
img = pygame.image.load(filename)
imgposition = img.get_rect()
imgposition.centerx = game['background'].get_rect().centerx
imgposition.centery = game['background'].get_rect().centery
game['background'].blit(img, imgposition)
game['screen'].blit(game['background'], (0, 0))
pygame.display.flip()
game['clock'].tick()
time.sleep(config['displayPhotoFor'])
def waitForTrigger(game):
triggered = False
game['background'].fill(config['bgcolor'])
renderText("Press the #BigRedButton to begin", game, fontSize=200)
while not triggered:
for event in pygame.event.get():
if event.type == pygame.QUIT:
sys.exit()
if event.type == pygame.KEYDOWN:
if event.key == 32: # spacebar
triggered = True
elif event.key in [113, 27]: # q, esc
sys.exit()
time.sleep(0.05)
print("triggered!!")
def main():
configure()
pygame.init()
displayinfo = pygame.display.Info() # Find some info out about the display
if config['fullscreen']:
size = displayinfo.current_w, displayinfo.current_h
else:
size = 1800, 800
screen = pygame.display.set_mode(size)
if config['fullscreen']:
pygame.display.toggle_fullscreen()
c = pygame.time.Clock()
background = pygame.Surface(screen.get_size())
chdkptp = subprocess.Popen(config['chdkptp'],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
bufsize=0)
waitForInput(chdkptp.stdout)
chdkptp.stdin.write(b"c\n")
print("Connecting to camera....")
waitForInput(chdkptp.stdout)
chdkptp.stdin.write(b"rec\n")
print("Entering shooting mode...")
waitForInput(chdkptp.stdout)
print("Annnd into the loop we go!")
game = {
"screen": screen,
"background": background,
"clock": c,
"size": size
}
while True:
waitForTrigger(game)
takePhotoSet(chdkptp, game)
if __name__ == "__main__":
main()
|
Python
| 0.999999
|
@@ -3381,16 +3381,17 @@
tdout)%0A%0A
+%0A
def doCo
@@ -3662,23 +3662,38 @@
t(%22smile
+!
%22, game
+, fontSize=200
)%0A
|
01ea3c8481aeba52dc17d8324ddbec8b6a633f35
|
Add a method that inserts comma to given number.
|
numberutils.py
|
numberutils.py
|
##-*- coding: utf-8 -*-
#!/usr/bin/python
"""
Number to Hangul string util.
"""
__author__ = 'SeomGi, Han'
__credits__ = ['SeomGi, Han']
__copyright__ = 'Copyright 2015, Python Utils Project'
__license__ = 'MIT'
__version__ = '0.0.1'
__maintainer__ = 'SeomGi, Han'
__email__ = 'iandmyhand@gmail.com'
__status__ = 'Production'
HANGUL_NUMBER = [
'', '일', '이', '삼', '사', '오', '육', '칠', '팔', '구', '십'
]
HANGUL_NUMBER_UNIT = [
'', '십', '백', '천'
]
HANGUL_NUMBER_BIG_UNIT = [
'', '만', '억', '조', '경', '해'
]
class NumberUtils:
_int_value = 0
_string_value = ''
_len_string_value = 0
def __init__(self, int_value=0):
self._int_value = int_value
self._string_value = str(self._int_value)
self._len_string_value = len(self._string_value)
def convert_to_hangul_string(self):
"""
usage:
numberutils.NumberUtils(220000112490).convert_to_hangul_string() == '이천이백억일십일만이천사백구십'
"""
result = ''
if self._string_value and self._string_value.isdigit():
index = 0
while index < self._len_string_value:
single_result = ''
hangul_number = HANGUL_NUMBER[int(self._string_value[index])]
if hangul_number:
unit_index = ((self._len_string_value - index) % 4) - 1
single_result += hangul_number + HANGUL_NUMBER_UNIT[unit_index]
if (self._len_string_value - index - 1) % 4 == 0:
big_unit_index = (self._len_string_value - index - 1) // 4
if len(HANGUL_NUMBER_BIG_UNIT) > big_unit_index:
single_result += HANGUL_NUMBER_BIG_UNIT[big_unit_index]
result += single_result
index += 1
return result
|
Python
| 0.000075
|
@@ -43,36 +43,35 @@
%22%22%22%0A
-Number to Hangul string util
+Utilities related to Number
.%0A%22%22
@@ -225,13 +225,13 @@
= '
+1.
0.0
-.1
'%0A__
@@ -779,16 +779,135 @@
value)%0A%0A
+ def __del__(self):%0A self._int_value = 0%0A self._string_value = ''%0A self._len_string_value = 0%0A%0A
def
@@ -1042,18 +1042,23 @@
tring()
-==
+returns
'%EC%9D%B4%EC%B2%9C%EC%9D%B4%EB%B0%B1%EC%96%B5%EC%9D%BC
@@ -1932,12 +1932,737 @@
turn result%0A
+%0A def insert_comma(self):%0A %22%22%22%0A usage:%0A numberutils.NumberUtils(2200030112490).insert_comma() returns '2,200,030,112,490'%0A %22%22%22%0A%0A result = ''%0A if self._string_value and self._string_value.isdigit():%0A index = 0%0A while index %3C self._len_string_value:%0A single_result = ''%0A if ((self._len_string_value - index - 1) %25 3 == 0) and (index %3C self._len_string_value - 1):%0A single_result = self._string_value%5Bindex%5D + ','%0A else:%0A single_result = self._string_value%5Bindex%5D %0A result += single_result%0A index += 1%0A%0A return result%0A
|
bc6bacf6bd5fccf2e09dd3c07f6104e1f845351b
|
Revert "Added solution to assignment 4"
|
bootcamp/lesson4.py
|
bootcamp/lesson4.py
|
import datetime
import math
import requests
from core import test_helper
# Question 1
# ----------
# Using the datetime module return a datetime object with the year of 2015, the month of June, and the day of 1
def playing_with_dt():
return datetime.datetime(year=2015, month=06, day=01)
# Question 2
# ----------
# Using the math module return pi
def playing_with_math():
return math.pi
# Question 3
# ----------
# The following URL is public data set of demographic statistics by zip code in the city of New York
# url: https://data.cityofnewyork.us/api/views/kku6-nxdu/rows.json?accessType=DOWNLOAD
#
# Make a request to that address and inspect the contents. Return the number of unique demographic attributes in the
# data set as well as the percentage of ETHNICITY UNKNOWN formatted as a string with 2 significant figures.
# The return object should be a tuple data type
def explore_data():
demo_attributes = []
url = 'http://data.cityofnewyork.us/api/views/kku6-nxdu/rows.json?accessType=DOWNLOAD'
r = requests.get(url=url)
json = r.json()
meta = json['meta']
view = meta['view']
columns = view['columns']
for column in columns:
if column['name'] == 'PERCENT ETHNICITY UNKNOWN':
avg = column['cachedContents']['average']
if column['dataTypeName'] == 'number':
demo_attributes.append(column['name'])
num_attributes = len(set(demo_attributes))
avg_formatted = '{:.4f}%'.format(float(avg))
t = (avg_formatted, num_attributes)
return t
def main():
print "\nRunning playing_with_dt_one function..."
test_helper(playing_with_dt(), datetime.datetime(2015, 06, 01))
print "\nRunning playing_with_dt_one function..."
test_helper(playing_with_math(), math.pi)
print "\nRunning explore_data function..."
test_helper(explore_data(), ('0.0039%', 46))
if __name__ == '__main__':
main()
|
Python
| 0
|
@@ -239,61 +239,34 @@
-return datetime.datetime(year=2015, month=06, day=01)
+# Write code here%0A pass
%0A%0A%0A#
@@ -357,22 +357,34 @@
-return math.pi
+# Write code here%0A pass
%0A%0A%0A#
@@ -899,637 +899,34 @@
-demo_attributes = %5B%5D%0A url = 'http://data.cityofnewyork.us/api/views/kku6-nxdu/rows.json?accessType=DOWNLOAD'%0A%0A r = requests.get(url=url)%0A json = r.json()%0A meta = json%5B'meta'%5D%0A view = meta%5B'view'%5D%0A columns = view%5B'columns'%5D%0A%0A for column in columns:%0A if column%5B'name'%5D == 'PERCENT ETHNICITY UNKNOWN':%0A avg = column%5B'cachedContents'%5D%5B'average'%5D%0A if column%5B'dataTypeName'%5D == 'number':%0A demo_attributes.append(column%5B'name'%5D)%0A%0A num_attributes = len(set(demo_attributes))%0A avg_formatted = '%7B:.4f%7D%25'.format(float(avg))%0A t = (avg_formatted, num_attributes)%0A%0A return t
+# Write code here%0A pass
%0A%0A%0Ad
|
46eeffee39ea9ae7edacc1fe63b0686f6f2a68ad
|
Simplify logger initization logic
|
wolfbot.py
|
wolfbot.py
|
#!/usr/bin/env python3.2
# Copyright (c) 2011 Jimmy Cao
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from oyoyo.client import IRCClient
from oyoyo.parse import parse_nick
import logging
import botconfig
import time
import traceback
import modules.common
import sys
class UTCFormatter(logging.Formatter):
converter = time.gmtime
def main():
if not botconfig.DEBUG_MODE:
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
fh = logging.FileHandler("errors.log")
fh.setLevel(logging.WARNING)
logger.addHandler(fh)
if botconfig.VERBOSE_MODE:
hdlr = logging.StreamHandler(sys.stdout)
hdlr.setLevel(logging.DEBUG)
logger.addHandler(hdlr)
formatter = UTCFormatter('[%(asctime)s] %(message)s', '%d/%b/%Y %H:%M:%S')
for handler in logger.handlers:
handler.setFormatter(formatter)
else:
logging.basicConfig(level=logging.DEBUG)
formatter = UTCFormatter('[%(asctime)s] %(message)s', '%H:%M:%S')
for handler in logging.getLogger().handlers:
handler.setFormatter(formatter)
cli = IRCClient(
{"privmsg":modules.common.on_privmsg,
"notice":lambda a, b, c, d: modules.common.on_privmsg(a, b, c, d, True),
"":modules.common.__unhandled__},
host=botconfig.HOST,
port=botconfig.PORT,
authname=botconfig.USERNAME,
password=botconfig.PASS,
nickname=botconfig.NICK,
sasl_auth=botconfig.SASL_AUTHENTICATION,
use_ssl=botconfig.USE_SSL,
connect_cb=modules.common.connect_callback
)
cli.mainLoop()
if __name__ == "__main__":
try:
main()
except Exception:
logging.error(traceback.format_exc())
|
Python
| 0.001181
|
@@ -1361,12 +1361,8 @@
if
-not
botc
@@ -1375,24 +1375,120 @@
DEBUG_MODE:%0A
+ logging.basicConfig(level=logging.DEBUG)%0A logger = logging.getLogger()%0A else:%0A
logg
@@ -1830,20 +1830,16 @@
r(hdlr)%0A
-
form
@@ -1901,36 +1901,32 @@
b/%25Y %25H:%25M:%25S')%0A
-
for handler
@@ -1949,242 +1949,8 @@
rs:%0A
- handler.setFormatter(formatter)%0A else:%0A logging.basicConfig(level=logging.DEBUG)%0A formatter = UTCFormatter('%5B%25(asctime)s%5D %25(message)s', '%25H:%25M:%25S')%0A for handler in logging.getLogger().handlers:%0A
|
d81ac294add6d2f14f283556ca1b0b2addd5f1ee
|
Update main.py
|
fx_collect/main.py
|
fx_collect/main.py
|
class MainAggregator(object):
def __init__(self):
"""
The MainAggregator class is just here to setup the initial
databases and then start each subprocess.
I'm going to change the name because its shit.
Later updates will have database health monitoring or
something, not sure, I am now just writing aimlessly because
I don't know what else to put here.
More fun in collection.py
"""
self.broker = 'fxcm'
self.db_handler = DatabaseHandler(self.broker)
self.br_handler = FXCMBrokerHandler()
self.subscriptions = {}
self.datebases = self.db_handler.get_databases()
self.time_frames = self.br_handler.supported_time_frames
self._subscriptions_manager()
def _setup_database(self, instruments):
for instrument in instruments:
if instrument.replace('/','') not in self.datebases:
# Setup each instrument
self.db_handler.create(
instrument, self.time_frames
)
self.datebases = self.db_handler.get_databases()
def _start_subprocess(self, instruments):
# There are lots of offers to track at FXCM.
# Its best to only track what you intend to trade.
# Lots of connections will cause login timeouts and
# high RAM usage : (
# Some defaults below have been set.
instruments = ['EUR/USD', 'USD/JPY', 'GBP/USD',
'AUD/USD', 'USD/CHF', 'NZD/USD',
'USD/CAD', 'USDOLLAR', 'UK100']
#instruments = ['EUR/USD']
#instruments = ['GBP/USD','UK100']
for i in instruments:
if i not in self.subscriptions:
broker = self.broker
instrument = i
s = subprocess.Popen(
['python3', 'collection.py', broker, instrument]
)
self.subscriptions[i] = s
time.sleep(5) # Watch out for login timeouts
def _subprocess_manager(self):
instruments = self.br_handler.get_offers()
self._setup_database(instruments)
self._start_subprocess(instruments)
self.br_handler.session.logout()
try:
while True:
time.sleep(60)
except KeyboardInterrupt:
self._kill()
def _kill(self):
print('Kill Command sent to all SubProcesses')
for s in self.subscriptions:
self.subscriptions[s].kill()
sys.exit(0)
print("Starting Collection, press CTRL+C to stop")
MainAggregator()
|
Python
| 0.000001
|
@@ -1,12 +1,139 @@
+from database import DatabaseHandler%0Afrom broker import FXCMBrokerHandler%0A%0Aimport subprocess%0Aimport time%0Aimport sys%0Aimport re%0A%0A
class MainAg
|
864d551ca7aaf661ecfe54cca8c69e0f9daf1c46
|
fix license
|
ezcf/__init__.py
|
ezcf/__init__.py
|
__author__ = "laike9m (laike9m@gmail.com)"
__title__ = 'ezcf'
__version__ = '0.0.1'
# __build__ = None
__license__ = 'Apache 2.0'
__copyright__ = 'Copyright 2015 laike9m'
import sys
from .api import ConfigFinder
sys.meta_path.append(ConfigFinder())
|
Python
| 0.000001
|
@@ -81,27 +81,8 @@
.1'%0A
-# __build__ = None%0A
__li
@@ -96,18 +96,11 @@
= '
-Apache 2.0
+MIT
'%0A__
|
ae8e98a4e609bee0e73175bdc50859dd0bed62cb
|
Fix lint errors.
|
recipe-server/normandy/base/api/serializers.py
|
recipe-server/normandy/base/api/serializers.py
|
from django.contrib.auth.models import User
from rest_framework import serializers
class UserSerializer(serializers.ModelSerializer):
id = serializers.IntegerField()
first_name = serializers.CharField()
last_name = serializers.CharField()
email = serializers.CharField()
class Meta:
model = User
fields = [
'id',
'first_name',
'last_name',
'email',
]
class ServiceInfoSerializer(serializers.ModelSerializer):
user = UserSerializer();
peer_approval_enforced = serializers.BooleanField()
class Meta:
model = User
fields = [
'user',
'peer_approval_enforced',
]
|
Python
| 0.000006
|
@@ -531,9 +531,8 @@
er()
-;
%0A
|
53db7465c8bb5f7ea0a9647b69555df36ade4191
|
Use a trigger and lambda instead of schedule for the delay stage.
|
moa/stage/delay.py
|
moa/stage/delay.py
|
__all__ = ('Delay', )
import random
import time
from kivy.clock import Clock
from kivy.properties import (OptionProperty, BoundedNumericProperty,
ReferenceListProperty)
from moa.stage import MoaStage
class Delay(MoaStage):
def pause(self, *largs, **kwargs):
if super(Delay, self).pause(*largs, **kwargs):
self.delay = max(0, self.delay - (time.clock() - self.start_time))
Clock.unschedule(self.step_stage)
return True
return False
def unpause(self, *largs, **kwargs):
if super(Delay, self).unpause(*largs, **kwargs):
Clock.schedule_once(self.step_stage, self.delay)
return True
return False
def stop(self, *largs, **kwargs):
if super(Delay, self).stop(*largs, **kwargs):
Clock.unschedule(self.step_stage)
return True
return False
def step_stage(self, *largs, **kwargs):
if not super(Delay, self).step_stage(*largs, **kwargs):
return False
if self.delay_type == 'random':
self.delay = random.uniform(self.min, self.max)
Clock.schedule_once(self.step_stage, self.delay)
return True
min = BoundedNumericProperty(0., min=0.)
max = BoundedNumericProperty(1., min=0.)
range = ReferenceListProperty(min, max)
delay = BoundedNumericProperty(0.5, min=0.)
delay_type = OptionProperty('constant', options=['constant', 'random'])
|
Python
| 0
|
@@ -226,16 +226,271 @@
tage):%0A%0A
+ _delay_step_trigger = None%0A%0A def __init__(self, **kwargs):%0A super(Delay, self).__init__(**kwargs)%0A self._delay_step_trigger = Clock.create_trigger(lambda dt:%0A self.step_stage())%0A%0A
def
@@ -512,32 +512,32 @@
rgs, **kwargs):%0A
-
if super
@@ -670,40 +670,40 @@
-Clock.unschedule(self.step_stage
+self._delay_step_trigger.cancel(
)%0A
@@ -860,55 +860,91 @@
-Clock.schedule_once(self.step_stage, self.delay
+self._delay_step_trigger.timeout = self.delay%0A self._delay_step_trigger(
)%0A
@@ -1095,40 +1095,40 @@
-Clock.unschedule(self.step_stage
+self._delay_step_trigger.cancel(
)%0A
@@ -1405,24 +1405,25 @@
lf.max)%0A
+%0A
Clock.sc
@@ -1418,55 +1418,87 @@
-Clock.schedule_once(self.step_stage,
+self._delay_step_trigger.timeout = self.delay%0A
self.
+_
delay
+_step_trigger(
)%0A
|
bdacc7646c087d8fd87feb20c6af1d23d5cb1feb
|
clean up track/models
|
common/djangoapps/track/models.py
|
common/djangoapps/track/models.py
|
from django.db import models
from django.db import models
class TrackingLog(models.Model):
dtcreated = models.DateTimeField('creation date', auto_now_add=True)
username = models.CharField(max_length=32, blank=True)
ip = models.CharField(max_length=32, blank=True)
event_source = models.CharField(max_length=32)
event_type = models.CharField(max_length=512, blank=True)
event = models.TextField(blank=True)
agent = models.CharField(max_length=256, blank=True)
page = models.CharField(max_length=512, blank=True, null=True)
time = models.DateTimeField('event time')
host = models.CharField(max_length=64, blank=True)
def __unicode__(self):
s = "[%s] %s@%s: %s | %s | %s | %s" % (self.time, self.username, self.ip, self.event_source,
self.event_type, self.page, self.event)
return s
|
Python
| 0.000001
|
@@ -1,34 +1,4 @@
-from django.db import models%0A%0A
from
@@ -57,16 +57,90 @@
Model):%0A
+ %22%22%22Defines the fields that are stored in the tracking log database%22%22%22%0A
dtcr
@@ -735,47 +735,32 @@
-s = %22%5B%25s%5D %25s@%25s: %25s %7C %25s %7C %25s %7C %25s%22 %25 (
+fmt = (%0A u%22%5B%7B
self
@@ -764,18 +764,20 @@
elf.time
-,
+%7D%5D %7B
self.use
@@ -785,102 +785,95 @@
name
-,
+%7D@%7B
self.ip
-, self.event_source,%0A self.event_type,
+%7D: %22%0A u%22%7Bself.event_source%7D%7C %7Bself.event_type%7D %7C %22%0A u%22%7B
self
@@ -877,18 +877,21 @@
elf.page
-,
+%7D %7C %7B
self.eve
@@ -892,16 +892,27 @@
lf.event
+%7D%22%0A
)%0A
@@ -920,10 +920,30 @@
return
-s
+fmt.format(self=self)
%0A
|
de99cf5de9fa96c0a5e0853b04070cd4637c43af
|
Fix bug in csv report generation and add some code refactoring
|
edx_data_research/reporting/edx_base.py
|
edx_data_research/reporting/edx_base.py
|
import csv
import os
from pymongo import MongoClient
class EdX(object):
def __init__(self, args):
self.uri = args.uri
client = MongoClient(self.uri)
self.db = client[args.db_name]
self._collections = None
self.output_directory = args.output_directory
self.row_limit = args.row_limit
self.csv_data = None
self.headers = None
self.anonymize = args.anonymize
def generate_csv(self, csv_data, headers, output_file):
"""
Genersate csv report from generated data and given list of headers
"""
self.csv_data = csv_data
self.headers = headers
number_of_rows = len(self.csv_data) + 1
if number_of_rows <= self.row_limit:
self._write_to_csv(output_file)
else:
if number_of_rows % self.row_limit:
number_of_splits = number_of_rows // self.row_limit + 1
else:
number_of_splits = number_of_rows // self.row_limit
for index in xrange(number_of_splits):
self._write_to_csv(output_file.split('.')[0] + '_' + str(index) + '.csv', index)
def _write_to_csv(self, output_file, index=0):
"""
Helper method to write rows to csv files
"""
output_file_path = os.path.abspath(os.path.join(self.output_directory, output_file))
with open(output_file_path, 'w') as csv_file:
writer = csv.writer(csv_file)
writer.writerow(self.headers)
for row in (item for item in self.csv_data[index * self.row_limit :
(index + 1) * self.row_limit]):
# This loop looks for unicode objects and encodes them to ASCII to avoif Unicode errors,
# for e.g. UnicodeEncodeError: 'ascii' codec can't encode character u'\xf1'
row = [item.encode('ascii', 'ignore') if isinstance(item, unicode)
else item for item in row]
writer.writerow(row)
@property
def collections(self):
return self._collections
@collections.setter
def collections(self, _collections):
self._collections = {collection : self.db[collection] for collection in _collections}
def report_name(self, *args):
return '-'.join(item for item in args) + '.csv'
def anonymize_row(self, yes, no, rest):
row = yes if self.anonymize else yes + no
row.extend(rest)
return row
def anonymize_headers(self, headers):
return self.anonymize_row(['Hash ID'], ['User ID', 'Username'], headers)
def user_map(self, user_id=None, username=None):
if username:
user_map = (self.collections['user_id_map']
.find_one({'username' : username}))
return ((user_map['hash_id'], user_map['id']) if user_map
else (None, None))
user_map = (self.collections['user_id_map']
.find_one({'id' : user_id}))
return ((user_map['hash_id'], user_map['username']) if user_map
else (None, None))
|
Python
| 0
|
@@ -588,25 +588,28 @@
%09%22%22%22%0A
-%09
+
self.csv_dat
@@ -670,19 +670,26 @@
mber_of_
-row
+data_point
s = len(
@@ -702,20 +702,16 @@
sv_data)
- + 1
%0A
@@ -728,15 +728,21 @@
_of_
-row
+data_point
s %3C
-=
sel
@@ -754,24 +754,44 @@
_limit:%0A
-%09
+
+number_of_rows =
self._w
@@ -827,61 +827,63 @@
-else:%0A%09 if number_of_rows %25 self.row_limit:%0A%09%09
+ return number_of_rows == number_of_data_points + 1%0A
@@ -907,35 +907,42 @@
its = number_of_
-row
+data_point
s // self.row_li
@@ -948,22 +948,17 @@
imit
- + 1%0A%09
+%0A
else
@@ -957,60 +957,34 @@
-else:%0A%09%09 number_of_splits = number_of_rows //
+if number_of_data_points %25
sel
@@ -994,26 +994,26 @@
ow_limit
+:
%0A
-%09
for inde
@@ -1008,28 +1008,12 @@
-for index in xrange(
+
numb
@@ -1028,21 +1028,22 @@
lits
-):%0A%09
+ += 1%0A
s
@@ -1034,27 +1034,46 @@
= 1%0A
-
+number_of_rows = sum(%5B
self._write_
@@ -1106,16 +1106,46 @@
.')%5B0%5D +
+%0A
'_' + s
@@ -1171,16 +1171,160 @@
index)%0A
+ for index in xrange(number_of_splits)%5D)%0A return number_of_rows == number_of_data_points + number_of_splits%0A
%09
@@ -1515,16 +1515,59 @@
rectory,
+%0A
output_
@@ -1711,16 +1711,42 @@
eaders)%0A
+ row_count = 1%0A
@@ -1946,16 +1946,35 @@
them to
+%0A #
ASCII to
@@ -1978,17 +1978,17 @@
to avoi
-f
+d
Unicode
@@ -1995,34 +1995,16 @@
errors,
-%0A #
for e.g
@@ -2025,16 +2025,35 @@
eError:
+%0A #
'ascii'
@@ -2262,16 +2262,56 @@
(row)%0A%09%09
+row_count += 1%0A return row_count%0A
%0A @pr
@@ -2503,16 +2503,45 @@
lection%5D
+%0A
for col
|
ed6b086f785c4856ef73484ffc2082a0fba200b8
|
Update accessible classes
|
ezhc/__init__.py
|
ezhc/__init__.py
|
from ._config import load_js_libs
from ._highcharts import Highcharts
from ._highstock import Highstock
from . import sample
from . import build
from ._clock import Clock
__all__ = ['Highcharts',
'Highstock',
'sample',
'build',
'Clock',
]
load_js_libs()
|
Python
| 0
|
@@ -101,16 +101,85 @@
ghstock%0A
+from ._global_options import GlobalOptions%0Afrom ._theme import Theme%0A
from . i
@@ -284,24 +284,72 @@
Highstock',%0A
+ 'GlobalOptions',%0A 'Theme',%0A
'
|
dec3d29f8482cb71f5ea3337622460a38b4f9124
|
Set the default to production
|
galaxy/__init__.py
|
galaxy/__init__.py
|
# (c) 2012-2016, Ansible by Red Hat
#
# This file is part of Ansible Galaxy
#
# Ansible Galaxy is free software: you can redistribute it and/or modify
# it under the terms of the Apache License as published by
# the Apache Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# Ansible Galaxy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# Apache License for more details.
#
# You should have received a copy of the Apache License
# along with Galaxy. If not, see <http://www.apache.org/licenses/>.
import os.path
import sys
import warnings
__version__ = '2.2.0'
__all__ = ['__version__']
def find_commands(management_dir):
# Modified version of function from django/core/management/__init__.py.
command_dir = os.path.join(management_dir, 'commands')
commands = []
try:
for f in os.listdir(command_dir):
if f.startswith('_'):
continue
elif f.endswith('.py') and f[:-3] not in commands:
commands.append(f[:-3])
elif f.endswith('.pyc') and f[:-4] not in commands:
commands.append(f[:-4])
except OSError:
pass
return commands
def prepare_env():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'galaxy.settings.default')
local_site_packages = os.path.join(
os.path.dirname(__file__), 'lib', 'site-packages')
sys.path.insert(0, local_site_packages)
from django.conf import settings
if not settings.DEBUG:
warnings.simplefilter('ignore', DeprecationWarning)
# import django.utils
settings.version = __version__
def manage():
# Prepare the galaxy environment.
prepare_env()
# Now run the command (or display the version).
from django.core.management import execute_from_command_line
if len(sys.argv) >= 2 and sys.argv[1] in ('version', '--version'):
sys.stdout.write('galaxy-%s\n' % __version__)
else:
execute_from_command_line(sys.argv)
|
Python
| 0.000014
|
@@ -1414,23 +1414,26 @@
ettings.
-default
+production
')%0A l
|
20d41f31e40a8d20902fcfea4543fa9c2c4d8cae
|
add dummy import function, so modulefinder can find our tables.
|
Lib/fontTools/ttLib/tables/__init__.py
|
Lib/fontTools/ttLib/tables/__init__.py
|
"""Empty __init__.py file to signal Python this directory is a package.
(It can't be completely empty since WinZip seems to skip empty files.)
"""
|
Python
| 0.000002
|
@@ -1,147 +1,793 @@
-%22%22%22Empty __init__.py file to signal Python this directory is a package.%0A(It can't be completely empty since WinZip seems to skip empty files.)%0A%22%22%22
+def _moduleFinderHint():%0A%09import B_A_S_E_%0A%09import C_F_F_%0A%09import D_S_I_G_%0A%09import DefaultTable%0A%09import G_D_E_F_%0A%09import G_P_O_S_%0A%09import G_S_U_B_%0A%09import J_S_T_F_%0A%09import L_T_S_H_%0A%09import O_S_2f_2%0A%09import T_S_I_B_%0A%09import T_S_I_D_%0A%09import T_S_I_J_%0A%09import T_S_I_P_%0A%09import T_S_I_S_%0A%09import T_S_I_V_%0A%09import T_S_I__0%0A%09import T_S_I__1%0A%09import T_S_I__2%0A%09import T_S_I__3%0A%09import T_S_I__5%0A%09import __init__%0A%09import _c_m_a_p%0A%09import _c_v_t%0A%09import _f_p_g_m%0A%09import _g_a_s_p%0A%09import _g_l_y_f%0A%09import _h_d_m_x%0A%09import _h_e_a_d%0A%09import _h_h_e_a%0A%09import _h_m_t_x%0A%09import _k_e_r_n%0A%09import _l_o_c_a%0A%09import _m_a_x_p%0A%09import _n_a_m_e%0A%09import _p_o_s_t%0A%09import _p_r_e_p%0A%09import _v_h_e_a%0A%09import _v_m_t_x%0A%09import asciiTable%0A%09import otBase%0A%09import otConverters%0A%09import otData%0A%09import otTables%0A%09import ttProgram
%0A
|
4380f7484f7ee7a122bb598e0a8ffa700fdac6a2
|
Allow for runserver_plus
|
i18n_helper/__init__.py
|
i18n_helper/__init__.py
|
from django.utils.functional import lazy
from django.utils.safestring import mark_safe
from django.utils.encoding import is_protected_type
from django.utils.safestring import SafeUnicode
from django.conf import settings
from wraptools import wraps
import django
import copy
import sys
# Default values
DEFAULT_I18N_CLASS = "i18n-helper"
DEFAULT_I18N_STYLE = "display: inline; background-color: #FAF9A7;"
I18N_HELPER_DEBUG = getattr(settings, 'I18N_HELPER_DEBUG', False)
RUNSERVER = sys.argv[1:2] == ['runserver']
# Omit if not running development server
if I18N_HELPER_DEBUG and RUNSERVER:
"""
Translation debugging is set, so override django core functions and methods
as necessary
"""
i18n_helper_block = getattr(settings, 'I18N_HELPER_HTML', None)
if i18n_helper_block is None:
I18N_HELPER_CLASS = getattr(
settings, 'I18N_HELPER_CLASS', None)
# Use default style and class if no class is provided.
if I18N_HELPER_CLASS is None:
I18N_HELPER_STYLE = getattr(
settings, 'I18N_HELPER_STYLE', DEFAULT_I18N_STYLE)
i18n_helper_block = unicode(
"<div class='%s' style='%s'>{0}</div>" %
(DEFAULT_I18N_CLASS, I18N_HELPER_STYLE))
else:
i18n_helper_block = unicode(
"<div class='%s'>{0}</div>" % I18N_HELPER_CLASS)
else:
i18n_helper_block = unicode(i18n_helper_block)
# Wrap all the non-lazy translation functions
@wraps(django.utils.translation.gettext)
@wraps(django.utils.translation.ugettext)
@wraps(django.utils.translation.ngettext)
@wraps(django.utils.translation.ungettext)
@wraps(django.utils.translation.pgettext)
@wraps(django.utils.translation.npgettext)
def wrapper(original_function, *args):
original_result = original_function(*args)
return mark_safe(i18n_helper_block.format(original_result))
# Override all the lazy translation functions
django.utils.translation.ngettext_lazy = lazy(
django.utils.translation.ngettext, unicode)
django.utils.translation.gettext_lazy = lazy(
django.utils.translation.gettext, unicode)
django.utils.translation.ungettext_lazy = lazy(
django.utils.translation.ungettext, unicode)
django.utils.translation.ugettext_lazy = lazy(
django.utils.translation.ugettext, unicode)
django.utils.translation.pgettext_lazy = lazy(
django.utils.translation.pgettext, unicode)
django.utils.translation.npgettext_lazy = lazy(
django.utils.translation.npgettext, unicode)
# Override the conditional_escape to allow form labels to be marked
django.utils.html.conditional_escape = lambda html: html
def custom_resolve(self, context):
"""
Custom function to resolve variable against a given context as usual,
but using the overridden transaltion functions.
"""
if self.lookups is not None:
# We're dealing with a variable that needs to be resolved
value = self._resolve_lookup(context)
else:
# We're dealing with a literal, so it's already been "resolved"
value = self.literal
if self.translate:
if getattr(self, 'message_context', None):
return django.utils.translation.pgettext_lazy(
self.message_context, value)
else:
return django.utils.translation.ugettext_lazy(value)
return value
# Set the custom resolve to the Variable class
django.template.base.Variable.resolve = custom_resolve
# Make a copy of the function before overridding it
original_force_unicode = copy.copy(django.utils.encoding.force_unicode)
def custom_force_unicode(s, encoding='utf-8', strings_only=False,
errors='strict'):
"""
This is a wrapper of django.utils.encoding.force_unicode to
return SafeUnicode objects instead of unicode, respecting
protected_types and cases order to keep performance.
"""
# Handle the common case first, saves 30-40% in performance when s
# is an instance of unicode. This function gets called often in that
# setting.
if isinstance(s, unicode):
return SafeUnicode(s)
if strings_only and is_protected_type(s):
return s
return SafeUnicode(
original_force_unicode(s, encoding, strings_only, errors))
django.utils.encoding.force_unicode = custom_force_unicode
|
Python
| 0.000001
|
@@ -495,11 +495,14 @@
1:2%5D
- ==
+%5B0%5D in
%5B'r
@@ -510,16 +510,34 @@
nserver'
+, 'runserver_plus'
%5D%0A# Omit
|
c24979627a8a2282a297704b735b1445b56dbce6
|
Bump version. [skip ci]
|
mocket/__init__.py
|
mocket/__init__.py
|
try:
# Py2
from mocket import mocketize, Mocket, MocketEntry, Mocketizer
except ImportError:
# Py3
from mocket.mocket import mocketize, Mocket, MocketEntry, Mocketizer
__all__ = (mocketize, Mocket, MocketEntry, Mocketizer)
__version__ = '2.7.1'
|
Python
| 0
|
@@ -257,7 +257,7 @@
2.7.
-1
+2
'%0A
|
974f46f4bfe6a8307a5e4a878c14028e89d0c6d6
|
update pep8
|
newfies/dialer_campaign/admin.py
|
newfies/dialer_campaign/admin.py
|
#
# Newfies-Dialer License
# http://www.newfies-dialer.org
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Copyright (C) 2011-2012 Star2Billing S.L.
#
# The Initial Developer of the Original Code is
# Arezqui Belaid <info@star2billing.com>
#
from django.contrib import admin
from django.contrib import messages
from django.conf.urls.defaults import patterns
from django.utils.translation import ugettext as _
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from dialer_campaign.models import Campaign, CampaignSubscriber
from dialer_campaign.function_def import check_dialer_setting, \
dialer_setting_limit
from genericadmin.admin import GenericAdminModelAdmin
class CampaignAdmin(GenericAdminModelAdmin):
"""
Allows the administrator to view and modify certain attributes
of a Campaign.
"""
content_type_whitelist = ('voice_app/voiceapp', 'survey/surveyapp', )
fieldsets = (
(_('Standard options'), {
'fields': ('campaign_code', 'name', 'description', 'callerid',
'user', 'status', 'startingdate', 'expirationdate',
'aleg_gateway', 'content_type', 'object_id',
'extra_data', 'phonebook',
),
}),
(_('Advanced options'), {
'classes': ('collapse',),
'fields': ('frequency', 'callmaxduration', 'maxretry',
'intervalretry', 'calltimeout', 'daily_start_time',
'daily_stop_time', 'monday', 'tuesday', 'wednesday',
'thursday', 'friday', 'saturday', 'sunday')
}),
)
list_display = ('id', 'name', 'content_type', 'campaign_code', 'user',
'startingdate', 'expirationdate', 'frequency',
'callmaxduration', 'maxretry', 'aleg_gateway', 'status',
'update_campaign_status', 'count_contact_of_phonebook',
'campaignsubscriber_detail', 'progress_bar')
list_display_links = ('id', 'name', )
#list_filter = ['user', 'status', 'startingdate', 'created_date']
ordering = ('id', )
filter_horizontal = ('phonebook',)
def get_urls(self):
urls = super(CampaignAdmin, self).get_urls()
my_urls = patterns('',
(r'^$', self.admin_site.admin_view(self.changelist_view)),
(r'^add/$', self.admin_site.admin_view(self.add_view)),
)
return my_urls + urls
def add_view(self, request, extra_context=None):
"""
Override django add_view method for checking the dialer setting limit
**Logic Description**:
* Before adding campaign, check dialer setting limit if applicable
to the user, if matched then the user will be redirected to
the campaign list
"""
# Check dialer setting limit
# check Max Number of running campaigns
if check_dialer_setting(request, check_for="campaign"):
msg = _("you have too many campaigns. Max allowed %(limit)s") \
% {'limit':
dialer_setting_limit(request, limit_for="campaign")}
messages.error(request, msg)
return HttpResponseRedirect(
reverse("admin:dialer_campaign_campaign_changelist"))
ctx = {}
return super(CampaignAdmin, self).add_view(request, extra_context=ctx)
admin.site.register(Campaign, CampaignAdmin)
class CampaignSubscriberAdmin(admin.ModelAdmin):
"""Allows the administrator to view and modify certain attributes
of a CampaignSubscriber."""
list_display = ('id', 'contact', 'campaign',
'last_attempt', 'count_attempt', 'duplicate_contact',
'contact_name', 'status', 'created_date')
list_filter = ['campaign', 'status', 'created_date', 'last_attempt']
ordering = ('id', )
admin.site.register(CampaignSubscriber, CampaignSubscriberAdmin)
|
Python
| 0
|
@@ -779,50 +779,8 @@
ing,
- %5C%0A
dia
@@ -2468,15 +2468,58 @@
-(r'%5E$',
+ (r'%5E$',%0A
sel
@@ -2574,24 +2574,39 @@
+
+
(r'%5Eadd/$',
@@ -2604,16 +2604,44 @@
%5Eadd/$',
+%0A
self.ad
@@ -2669,32 +2669,51 @@
elf.add_view)),%0A
+
)%0A
@@ -3519,20 +3519,16 @@
direct(%0A
-
|
35e82ceae764048aab3a92c92e4fcfebb624e4fd
|
use the new delete endpoint in client testing
|
test.py
|
test.py
|
import logging
import subprocess
import sys
import os
from nose.tools import raises
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
from luminoso_api import LuminosoClient
from luminoso_api.errors import LuminosoAPIError, LuminosoError
ROOT_CLIENT = None
PROJECT = None
USERNAME = None
PROJECT_NAME = os.environ['USER'] + '-test'
ROOT_URL = 'http://localhost:5000/v3'
def fileno_monkeypatch(self):
return sys.__stdout__.fileno()
import StringIO
StringIO.StringIO.fileno = fileno_monkeypatch
def setup():
"""
Make sure we're working with a fresh database. Build a client for
interacting with that database and save it as a global.
"""
global ROOT_CLIENT, PROJECT, USERNAME
user_info_str = subprocess.check_output('tellme lumi-test', shell=True)
user_info = eval(user_info_str)
USERNAME = user_info['username']
ROOT_CLIENT = LuminosoClient.connect(ROOT_URL,
username=USERNAME,
password=user_info['password'])
# check to see if the project exists; also create the client we'll use
projects = ROOT_CLIENT.get(USERNAME + '/projects')
projlist = [proj['name'] for proj in projects]
PROJECT = ROOT_CLIENT.change_path(USERNAME + '/projects/' + PROJECT_NAME)
if PROJECT_NAME in projlist:
logger.warn('The test database existed already. We have to clean it up.')
PROJECT.delete()
# create the project
logger.info("Creating project: "+PROJECT_NAME)
logger.info("Existing projects: %r" % projlist)
ROOT_CLIENT.post(USERNAME + '/projects', project=PROJECT_NAME)
PROJECT.get()
def test_paths():
"""
Without interacting with the network, make sure our path logic works.
"""
client1 = ROOT_CLIENT.change_path('foo')
assert client1.url == ROOT_CLIENT.url + 'foo/'
client2 = client1.change_path('bar')
assert client2.url == ROOT_CLIENT.url + 'foo/bar/'
client3 = client2.change_path('/baz')
assert client3.url == ROOT_CLIENT.url + 'baz/'
@raises(LuminosoAPIError)
def test_empty_relevance():
"""
The project was just created, so it shouldn't have any terms in it.
"""
PROJECT.get('terms')
def test_upload():
"""
Upload three documents, commit them, and wait for the commit.
Check afterward to ensure that the terms are no longer empty.
"""
docs = [
{'text': 'This is an example',
'title': 'example-1'},
{'text': 'Examples are a great source of inspiration',
'title': 'example-2'},
{'text': 'Great things come in threes',
'title': 'example-3'},
]
job_id = PROJECT.upload('docs', docs)
job_id_2 = PROJECT.post('docs/calculate')
assert job_id_2 > job_id
PROJECT.wait_for(job_id_2)
assert PROJECT.get('terms')
def test_topics():
"""
Manipulate some topics.
One thing we check is that a topic is equal after a round-trip to the
server.
"""
topics = PROJECT.get('topics')
assert topics == []
PROJECT.post('topics',
name='Example topic',
role='topic',
color='#aabbcc',
surface_texts=['Examples']
)
result = PROJECT.get('topics')
assert len(result) == 1
topic = result[0]
assert topic['name'] == 'Example topic'
assert topic['surface_texts'] == ['Examples']
assert topic['color'] == '#aabbcc'
topic_id = topic['_id']
topic2 = PROJECT.get('topics/id/%s' % topic_id)
assert topic2 == topic, '%s != %s' % (topic2, topic)
def teardown():
"""
Pack everything up, we're done.
"""
if ROOT_CLIENT is not None:
ROOT_CLIENT.delete(USERNAME + '/projects/' + PROJECT_NAME)
PROJECT = ROOT_CLIENT.change_path(USERNAME + '/projects/' + PROJECT_NAME)
try:
got = PROJECT.get()
except LuminosoError:
# it should be an error, we just deleted the project
return
else:
assert False, got
|
Python
| 0
|
@@ -1377,22 +1377,26 @@
-P
RO
-JEC
+OT_CLIEN
T.delete
@@ -1396,16 +1396,60 @@
.delete(
+USERNAME + '/projects', project=PROJECT_NAME
)%0A%0A #
@@ -3686,37 +3686,43 @@
AME + '/projects
-/' +
+', project=
PROJECT_NAME)%0A
|
3e96eaeb9bb722d24fe4e589c49e52d32e8af1aa
|
Bump version.
|
mocket/__init__.py
|
mocket/__init__.py
|
try:
# Py2
from mocket import mocketize, Mocket, MocketEntry, Mocketizer
except ImportError:
# Py3
from mocket.mocket import mocketize, Mocket, MocketEntry, Mocketizer
__all__ = (mocketize, Mocket, MocketEntry, Mocketizer)
__version__ = '3.7.0'
|
Python
| 0
|
@@ -257,7 +257,7 @@
3.7.
-0
+1
'%0A
|
049ea0ecf4a3b7bafc3989865a1ec95fc15b8ac8
|
change dogbone test to not check for the number of dogbones, just location. Due to the path changing
|
src/Mod/Path/PathTests/TestPathDressupDogbone.py
|
src/Mod/Path/PathTests/TestPathDressupDogbone.py
|
# -*- coding: utf-8 -*-
# ***************************************************************************
# * *
# * Copyright (c) 2017 sliptonic <shopinthewoods@gmail.com> *
# * *
# * This program is free software; you can redistribute it and/or modify *
# * it under the terms of the GNU Lesser General Public License (LGPL) *
# * as published by the Free Software Foundation; either version 2 of *
# * the License, or (at your option) any later version. *
# * for detail see the LICENCE text file. *
# * *
# * This program is distributed in the hope that it will be useful, *
# * but WITHOUT ANY WARRANTY; without even the implied warranty of *
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
# * GNU Library General Public License for more details. *
# * *
# * You should have received a copy of the GNU Library General Public *
# * License along with this program; if not, write to the Free Software *
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
# * USA *
# * *
# ***************************************************************************
import FreeCAD
import Path
import PathScripts.PathDressupDogbone as PathDressupDogbone
import PathScripts.PathJob as PathJob
import PathScripts.PathProfileFaces as PathProfileFaces
from PathTests.PathTestUtils import PathTestBase
class TestProfile:
def __init__(self, side, direction, path):
self.Side = side
self.Direction = direction
self.Path = Path.Path(path)
self.ToolController = None # default tool 5mm
self.Name = 'Profile'
class TestFeature:
def __init__(self):
self.Path = Path.Path()
def addProperty(self, typ, nam, category, tip):
# pylint: disable=unused-argument
setattr(self, nam, None)
def setEditorMode(self, prop, mode):
pass
class TestDressupDogbone(PathTestBase):
"""Unit tests for the Dogbone dressup."""
def formatBone(self, bone):
return "%d: (%.2f, %.2f)" % (bone[0], bone[1][0], bone[1][1])
def test00(self):
'''Verify bones are inserted for simple moves.'''
base = TestProfile('Inside', 'CW', 'G0 X10 Y10 Z10\nG1 Z0\nG1 Y100\nG1 X12\nG1 Y10\nG1 X10\nG1 Z10')
obj = TestFeature()
db = PathDressupDogbone.ObjectDressup(obj, base)
db.setup(obj, True)
db.execute(obj, False)
self.assertEqual(len(db.bones), 4)
self.assertEqual("1: (10.00, 100.00)", self.formatBone(db.bones[0]))
self.assertEqual("2: (12.00, 100.00)", self.formatBone(db.bones[1]))
self.assertEqual("3: (12.00, 10.00)", self.formatBone(db.bones[2]))
self.assertEqual("4: (10.00, 10.00)", self.formatBone(db.bones[3]))
def test01(self):
'''Verify bones are inserted if hole ends with rapid move out.'''
base = TestProfile('Inside', 'CW', 'G0 X10 Y10 Z10\nG1 Z0\nG1 Y100\nG1 X12\nG1 Y10\nG1 X10\nG0 Z10')
obj = TestFeature()
db = PathDressupDogbone.ObjectDressup(obj, base)
db.setup(obj, True)
db.execute(obj, False)
self.assertEqual(len(db.bones), 4)
self.assertEqual("1: (10.00, 100.00)", self.formatBone(db.bones[0]))
self.assertEqual("2: (12.00, 100.00)", self.formatBone(db.bones[1]))
self.assertEqual("3: (12.00, 10.00)", self.formatBone(db.bones[2]))
self.assertEqual("4: (10.00, 10.00)", self.formatBone(db.bones[3]))
def test02(self):
'''Verify bones are correctly generated for a Profile.'''
doc = FreeCAD.newDocument("TestDressupDogbone")
# This is a real world test to make sure none of the tool chain broke
box0 = doc.addObject('Part::Box', 'Box')
box0.Width = 100
box0.Length = 100
box0.Height = 10
box1 = doc.addObject('Part::Box', 'Box')
box1.Width = 50
box1.Length = 50
box1.Height = 20
box1.Placement = FreeCAD.Placement(FreeCAD.Vector(25,25,-5), FreeCAD.Rotation(FreeCAD.Vector(0,0,1), 0))
doc.recompute()
cut = doc.addObject('Part::Cut', 'Cut')
cut.Base = box0
cut.Tool = box1
doc.recompute()
for i in range(11):
face = "Face%d" % (i+1)
f = cut.Shape.getElement(face)
if f.Surface.Axis == FreeCAD.Vector(0,0,1) and f.Orientation == 'Forward':
break
PathJob.Create('Job', [cut], None)
profile = PathProfileFaces.Create('Profile Faces')
profile.Base = (cut, face)
profile.StepDown = 5
# set start and final depth in order to eliminate effects of stock (and its default values)
profile.setExpression('StartDepth', None)
profile.StartDepth = 10
profile.setExpression('FinalDepth', None)
profile.FinalDepth = 0
profile.processHoles = True
profile.processPerimeter = True
doc.recompute()
dogbone = PathDressupDogbone.Create(profile)
doc.recompute()
dog = dogbone.Proxy
locs = sorted([bone[1] for bone in dog.bones], key=lambda xy: xy[0] * 1000 + xy[1])
def formatBoneLoc(pt):
return "(%.2f, %.2f)" % (pt[0], pt[1])
# Make sure we get 8 bones, 2 in each corner (different heights)
self.assertEqual(len(locs), 8)
self.assertEqual("(27.50, 27.50)", formatBoneLoc(locs[0]))
self.assertEqual("(27.50, 27.50)", formatBoneLoc(locs[1]))
self.assertEqual("(27.50, 72.50)", formatBoneLoc(locs[2]))
self.assertEqual("(27.50, 72.50)", formatBoneLoc(locs[3]))
self.assertEqual("(72.50, 27.50)", formatBoneLoc(locs[4]))
self.assertEqual("(72.50, 27.50)", formatBoneLoc(locs[5]))
self.assertEqual("(72.50, 72.50)", formatBoneLoc(locs[6]))
self.assertEqual("(72.50, 72.50)", formatBoneLoc(locs[7]))
FreeCAD.closeDocument("TestDressupDogbone")
|
Python
| 0
|
@@ -5800,32 +5800,169 @@
heights)%0A
+ # with start point changes it passes back over the same spot multiple times, so just make sure they are in the right locations%0A #
self.assertEqua
|
4f6ceb1147d0c326f55621de4b60c440d22d336a
|
add basis parsing to volume parser
|
envision/envision/parser/vasp/volume.py
|
envision/envision/parser/vasp/volume.py
|
#
# ENVISIoN
#
# Copyright (c) 2017 Fredrik Segerhammar
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import itertools
import h5py
import re
import numpy as np
from ..h5writer import _write_volume
line_reg_int = re.compile(r'^( *[+-]?[0-9]+){3} *$')
line_reg_float = re.compile(r'( *[+-]?[0-9]*\.[0-9]+(?:[eE][+-]?[0-9]+)? *)+')
def parse_volume(f, h5_path, volume):
"""Parse a volume.
Keyword arguments:
"""
array = []
datasize = None
data = None
for line in f:
match_float = line_reg_float.match(line)
match_int = line_reg_int.match(line)
if match_int:
data = ([int(v) for v in line.split()])
datasize = data[0]*data[1]*data[2]
elif data and match_float:
for element in line.split():
array.append(float(element))
if len(array) == datasize:
return array, data
else:
data = None
return None, None
def charge(chg_file, h5_path):
try:
with open(chg_file, 'r') as f:
try:
with h5py.File(h5_path, 'a') as h5:
for i in itertools.count():
array, data = parse_volume(f, h5_path, "CHG")
if not array:
break
_write_volume(h5, i, array, data, "CHG")
h5['CHG/final'] = h5py.SoftLink('CHG/{}'.format(i-1,'04d'))
except Exception:
print("CHG dataset already exists.")
return
except FileNotFoundError:
print("CHG file not found.")
return
def elf(elf_file, h5_path):
try:
with open(chg_file, 'r') as f:
try:
with h5py.File(h5_path, 'a') as h5:
for i in itertools.count():
array, data = parse_volume(f, h5_path, "ELF")
if not array:
break
_write_volume(h5, i, array, data, "ELF")
h5['ELF/final'] = h5py.SoftLink('ELF/{}'.format(i-1,'04d'))
except Exception:
print("ELF dataset already exists.")
return
except FileNotFoundError:
print("ELF file not found.")
return
|
Python
| 0.000001
|
@@ -1486,16 +1486,91 @@
e_volume
+%0D%0Afrom ..h5writer import _write_basis%0D%0Afrom .unitcell import _parse_lattice
%0A%0Aline_r
@@ -1789,16 +1789,93 @@
%0A %22%22%22
+%0D%0A %0D%0A basis = _parse_lattice(f)%0D%0A _write_basis(h5_path, basis)%0D%0A
%0A arr
|
fe6b00078507158ec823c61d9d78e29a08ade084
|
fix args err
|
BEGAN/began_train.py
|
BEGAN/began_train.py
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
import tensorflow as tf
import numpy as np
import sys
import time
import began_model as began
from dataset import DataIterator
from dataset import CelebADataSet as DataSet
sys.path.append('../')
import image_utils as iu
results = {
'output': './gen_img/',
'checkpoint': './model/checkpoint',
'model': './model/BEGAN-model.ckpt'
}
train_step = {
'epoch': 25,
'batch_size': 32,
}
def main():
start_time = time.time() # Clocking start
# GPU configure
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
with tf.Session(config=config) as s:
# BEGAN Model
model = began.BEGAN(s) # BEGAN
# Initializing
s.run(tf.global_variables_initializer())
# Celeb-A DataSet images
ds = DataSet(input_height=64,
input_width=64,
input_channel=3,
mode='r').images
dataset_iter = DataIterator(ds, None, train_step['batch_size'],
label_off=True)
sample_x = ds[:model.batch_size]
sample_z = np.random.uniform(-1., 1., [model.batch_size, model.z_dim]).astype(np.float32) # 32 x 128
kt = tf.Variable(0., dtype=tf.float32)
for epoch in range(train_step['epoch']):
d_losses, g_losses = [], []
for batch_images, _ in dataset_iter.iterate():
batch_x = batch_images
batch_z = np.random.uniform(-1., 1., [model.batch_size, model.z_dim]).astype(np.float32) # 32 x 128
# Update k_t
# k_t+1 = k_t + lambda_k * (gamma * d_real - d_fake)
kt += model.lambda_k * (model.gamma * model.d_real - model.d_fake)
# Update D network
_, d_loss = s.run([model.d_op, model.d_loss],
feed_dict={
model.x: batch_x,
model.z: batch_z,
model.kt: kt,
})
# Update G network
_, g_loss = s.run([model.g_op, model.g_loss],
feed_dict={
model.z: batch_z,
model.kt: kt,
})
# savin' losses
d_losses.append(d_loss)
g_losses.append(g_loss)
# Summary
"""
d_loss, g_loss, summary = s.run([model.d_loss, model.g_loss, model.merged],
feed_dict={
model.x: batch_x,
model.z: batch_z,
})
# Summary saver
model.writer.add_summary(summary, epoch)
"""
# Print loss
print("[+] Epoch %04d => " % epoch,
" D loss : {:.8f}".format(tf.reduce_mean(d_losses)),
" G loss : {:.8f}".format(tf.reduce_mean(g_losses)))
# Training G model with sample image and noise
samples = s.run(model.g,
feed_dict={
model.x: sample_x,
model.z: sample_z,
model.kt: kt,
})
# Export image generated by model G
sample_image_height = model.sample_size
sample_image_width = model.sample_size
sample_dir = results['output'] + 'train_{:03d}.png'.format(epoch)
# Generated image save
iu.save_images(samples,
size=[sample_image_height, sample_image_width],
image_path=sample_dir)
# Model save
model.saver.save(s, results['model'], global_step=epoch)
end_time = time.time() - start_time # Clocking end
# Elapsed time
print("[+] Elapsed time {:.8f}s".format(end_time))
# Close tf.Session
s.close()
if __name__ == '__main__':
main()
|
Python
| 0.99997
|
@@ -1469,11 +1469,8 @@
ages
-, _
in
|
469b07c2d13486e760b485025dfa08c08b6696a7
|
Fix logger name to match module name
|
check_python.py
|
check_python.py
|
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Runs Pylint and Pytype."""
import argparse
import json
import logging
import os
import pathlib
import shutil
import sys
import subprocess
import tempfile
class Workspace:
"""Represents a temporary workspace for Pylint and Pytype."""
def __init__(self, params_file: pathlib.Path) -> None:
params = json.loads(params_file.read_text(encoding='utf-8'))
workspace_name = 'phst_rules_elisp'
srcs = []
tempdir = pathlib.Path(tempfile.mkdtemp(prefix='pylint-'))
for file in params['srcs']:
dest = tempdir / workspace_name / file['rel']
dest.parent.mkdir(parents=True, exist_ok=True)
shutil.copy(file['src'], dest)
if not file['ext']:
srcs.append(dest)
if not srcs:
raise FileNotFoundError('no source files found')
for dirpath, _, _ in os.walk(tempdir):
dirpath = pathlib.Path(dirpath)
if dirpath != tempdir:
# Mimic the Bazel behavior. Also see
# https://github.com/bazelbuild/bazel/issues/10076.
(dirpath / '__init__.py').touch()
_logger.info('using temporary workspace: %s', tempdir)
self.srcs = frozenset(srcs)
self.path = [str(tempdir)] + [str(tempdir / d) for d in params['path']]
self.tempdir = tempdir
self._output = pathlib.Path(params['out'])
def success(self) -> None:
"""Clean up the temporary directory."""
shutil.rmtree(self.tempdir)
self.tempdir = None
self._output.touch()
def _main() -> None:
parser = argparse.ArgumentParser(allow_abbrev=False)
parser.add_argument('--params', type=pathlib.Path, required=True)
parser.add_argument('--pylintrc', type=pathlib.Path, required=True)
args = parser.parse_args()
logging.getLogger('phst_rules_elisp').setLevel(logging.INFO)
# Set a fake PYTHONPATH so that Pylint and Pytype can find imports for the
# main and external workspaces.
workspace = Workspace(args.params)
# Pytype wants a Python binary available under the name “python”. See the
# function pytype.tools.environment.check_python_exe_or_die.
bindir = workspace.tempdir / 'bin'
bindir.mkdir()
(bindir / 'python').symlink_to(sys.executable)
cwd = workspace.tempdir / 'phst_rules_elisp'
env = dict(os.environ,
PATH=os.pathsep.join([str(bindir)] + os.get_exec_path()),
PYTHONPATH=os.pathsep.join(sys.path + workspace.path))
result = subprocess.run(
[sys.executable, '-m', 'pylint',
'--persistent=no', '--rcfile=' + str(args.pylintrc.resolve()), '--']
+ [str(file.relative_to(cwd))
for file in sorted(workspace.srcs)],
check=False, cwd=cwd, env=env,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
encoding='utf-8', errors='backslashreplace')
if result.returncode:
print(result.stdout)
sys.exit(result.returncode)
if os.name == 'posix':
result = subprocess.run(
[sys.executable, '-m', 'pytype',
'--no-cache', '--'] + [str(file.relative_to(cwd))
for file in sorted(workspace.srcs)],
check=False, cwd=cwd, env=env,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
encoding='utf-8', errors='backslashreplace')
if result.returncode:
print(result.stdout)
sys.exit(result.returncode)
# Only clean up the workspace if we exited successfully, to help with
# debugging.
workspace.success()
_logger = logging.getLogger('phst_rules_elisp.run_pylint')
if __name__ == '__main__':
_main()
|
Python
| 0.000003
|
@@ -4235,18 +4235,20 @@
isp.
-run_pylint
+check_python
')%0A%0A
|
d043afa075cfdb347172047ac17082911c637ef9
|
set default values for required config variables
|
packages/grid/backend/app/app/core/config.py
|
packages/grid/backend/app/app/core/config.py
|
# stdlib
import secrets
from typing import Any
from typing import Dict
from typing import List
from typing import Optional
from typing import Union
# third party
from pydantic import AnyHttpUrl
from pydantic import BaseSettings
from pydantic import EmailStr
from pydantic import HttpUrl
from pydantic import PostgresDsn
from pydantic import validator
class Settings(BaseSettings):
API_V1_STR: str = "/api/v1"
SECRET_KEY: str = secrets.token_urlsafe(32)
# 60 minutes * 24 hours * 8 days = 8 days
ACCESS_TOKEN_EXPIRE_MINUTES: int = 60 * 24 * 8
SERVER_NAME: str
SERVER_HOST: AnyHttpUrl
# BACKEND_CORS_ORIGINS is a JSON-formatted list of origins
# e.g: '["http://localhost", "http://localhost:4200", "http://localhost:3000", \
# "http://localhost:8080", "http://local.dockertoolbox.tiangolo.com"]'
BACKEND_CORS_ORIGINS: List[AnyHttpUrl] = []
@validator("BACKEND_CORS_ORIGINS", pre=True)
def assemble_cors_origins(cls, v: Union[str, List[str]]) -> Union[List[str], str]:
if isinstance(v, str) and not v.startswith("["):
return [i.strip() for i in v.split(",")]
elif isinstance(v, (list, str)):
return v
raise ValueError(v)
PROJECT_NAME: str
SENTRY_DSN: Optional[HttpUrl] = None
@validator("SENTRY_DSN", pre=True)
def sentry_dsn_can_be_blank(cls, v: str) -> Optional[str]:
if len(v) == 0:
return None
return v
POSTGRES_SERVER: str
POSTGRES_USER: str
POSTGRES_PASSWORD: str
POSTGRES_DB: str
SQLALCHEMY_DATABASE_URI: Optional[PostgresDsn] = None
@validator("SQLALCHEMY_DATABASE_URI", pre=True)
def assemble_db_connection(cls, v: Optional[str], values: Dict[str, Any]) -> Any:
if isinstance(v, str):
return v
return PostgresDsn.build(
scheme="postgresql",
user=values.get("POSTGRES_USER"),
password=values.get("POSTGRES_PASSWORD"),
host=values.get("POSTGRES_SERVER"),
path=f"/{values.get('POSTGRES_DB') or ''}",
)
SMTP_TLS: bool = True
SMTP_PORT: Optional[int] = None
SMTP_HOST: Optional[str] = None
SMTP_USER: Optional[str] = None
SMTP_PASSWORD: Optional[str] = None
EMAILS_FROM_EMAIL: Optional[EmailStr] = None
EMAILS_FROM_NAME: Optional[str] = None
@validator("EMAILS_FROM_NAME")
def get_project_name(cls, v: Optional[str], values: Dict[str, Any]) -> str:
if not v:
return values["PROJECT_NAME"]
return v
EMAIL_RESET_TOKEN_EXPIRE_HOURS: int = 48
EMAIL_TEMPLATES_DIR: str = "/app/app/email-templates/build"
EMAILS_ENABLED: bool = False
@validator("EMAILS_ENABLED", pre=True)
def get_emails_enabled(cls, v: bool, values: Dict[str, Any]) -> bool:
return bool(
values.get("SMTP_HOST")
and values.get("SMTP_PORT")
and values.get("EMAILS_FROM_EMAIL")
)
EMAIL_TEST_USER: EmailStr = "test@example.com"
FIRST_SUPERUSER: EmailStr
FIRST_SUPERUSER_PASSWORD: str
USERS_OPEN_REGISTRATION: bool = False
DOMAIN_NAME: str
STREAM_QUEUE: bool = False
NODE_TYPE: str = "Domain"
class Config:
case_sensitive = True
settings = Settings()
|
Python
| 0.000003
|
@@ -570,24 +570,41 @@
ER_NAME: str
+ = %22unconfigured%22
%0A SERVER_
@@ -619,16 +619,38 @@
yHttpUrl
+ = %22https://localhost%22
%0A # B
@@ -1268,24 +1268,33 @@
CT_NAME: str
+ = %22grid%22
%0A SENTRY_
@@ -1420,19 +1420,23 @@
ptional%5B
-str
+HttpUrl
%5D:%0A
@@ -1441,16 +1441,29 @@
if
+v is None or
len(v) =
@@ -1529,24 +1529,38 @@
_SERVER: str
+ = %22localhost%22
%0A POSTGRE
@@ -1566,24 +1566,33 @@
ES_USER: str
+ = %22user%22
%0A POSTGRE
@@ -1602,24 +1602,32 @@
ASSWORD: str
+ = %22pwd%22
%0A POSTGRE
@@ -1631,24 +1631,31 @@
GRES_DB: str
+ = %22db%22
%0A SQLALCH
@@ -3130,16 +3130,39 @@
EmailStr
+ = %22info@openmined.org%22
%0A FIR
@@ -3183,24 +3183,39 @@
ASSWORD: str
+ = %22changethis%22
%0A USERS_O
@@ -3266,16 +3266,32 @@
AME: str
+ = %22grid_domain%22
%0A STR
|
0af1c7da5d030a66d1e3a9054c15429618048cc3
|
Extend the unlink method
|
obuka/obuka.py
|
obuka/obuka.py
|
# -*- coding: utf-8 -*-
from osv import osv, fields
class obuka_session(osv.osv):
_name = "obuka.session"
_order = 'name'
def _obuka_occupied(self, cr, uid, ids, name, arg, context=None):
res = {}
for obuka in self.browse(cr, uid, ids, context):
#import pdb; pdb.set_trace()
curr_partner_number = obuka.partner_number
curr_attendees_number = len(obuka.attendee_ids)
if curr_partner_number == 0:
res[obuka.id] = 0
else:
res[obuka.id] = float(curr_attendees_number) / curr_partner_number * 100
return res
_columns = {
'name': fields.char('Name', size=32, required=True,readonly=True,
states={'draft': [('readonly', False)]}
),
'description': fields.text('Description'),
'state': fields.selection([
('draft', 'Draft'),
('done', 'Done'),
('cancel', 'Cancel'),
],'State',
required=True,
readonly=True),
'responsible_id': fields.many2one('res.users', 'Responsible user'),
'course_ids': fields.one2many('obuka.course', 'session_id', 'Course'),
'attendee_ids': fields.many2many('res.partner',
'session_partner_rel',
'session_id',
'partner_id',
'Attendees'),
'partner_number': fields.integer('Max number of attendees',
required=True),
'occupied': fields.function(_obuka_occupied,
method=True,
string='Occupied(%)',
type='float', store=True)
}
_defaults = {
'partner_number': 1
}
def session_done(self, cr, uid, ids, *args):
self.write(cr, uid, ids, {'state':'done'})
return True
def session_draft(self, cr, uid, ids, *args):
self.write(cr, uid, ids, {'state':'draft'})
return True
def session_cancel(self, cr, uid, ids, *args):
self.write(cr, uid, ids, {'state':'cancel'})
return True
def onchange_partner_number(self, cr, uid, ids, partner_number):
val = {}
warn = {}
if ids:
len_attendees = len(self.browse(cr, uid, ids[0]).attendee_ids)
curr_partner_number = self.browse(cr, uid, ids[0]).partner_number
if len_attendees > partner_number:
val['partner_number'] = curr_partner_number
warn['title'] = 'Warning'
warn['message'] = 'Max number of partners is too low! Returning to orginal value'
return {'value': val, 'warning' : warn}
def copy(self, cr, uid, id, default, context=None):
default.update({
'name': self.browse(cr, uid, id).name + ' (copy)'
})
return super(obuka_session, self).copy(cr, uid, id, default, context=context)
obuka_session()
class obuka_course(osv.osv):
_name = 'obuka.course'
_columns = {
'name': fields.char('Name', size=32),
'date': fields.date('Date'),
'description': fields.text('Description'),
'instructor_id': fields.many2one('res.partner', 'Instructor',
help='Instructor that is in charge'),
'max_users': fields.integer('Max users'),
'session_id': fields.many2one('obuka.session', 'Session')
}
_defaults = {
'max_users':10
}
_sql_constraints = [
('max_max_users', 'CHECK(max_users<50)', 'Max users limit exceeded'),
]
obuka_course()
|
Python
| 0
|
@@ -2972,16 +2972,363 @@
ntext)%0A%0A
+ def unlink(self, cr, uid, ids, context=None):%0A for obuka in self.browse(cr, uid, ids, context=context):%0A if obuka.state in %5B'done', 'cancel'%5D:%0A raise osv.except_osv('Error',%0A %22You can't delete this data!%22)%0A return super(obuka_session, self).unlink(cr, uid, ids, context=context)%0A%0A
%0A%0Aobuka_
|
812c40bfaf2ef4f59643c53e8b8ac76f20777423
|
Modify a debian example to archlinux
|
fabtools/arch.py
|
fabtools/arch.py
|
"""
Archlinux packages
==================
This module provides tools to manage Archlinux packages
and repositories.
"""
from __future__ import with_statement
from fabric.api import hide, run, settings
from fabtools.utils import run_as_root
MANAGER = 'LC_ALL=C pacman'
def update_index(quiet=True):
"""
Update pacman package definitions.
"""
manager = MANAGER
if quiet:
with settings(hide('running', 'stdout', 'stderr', 'warnings'), warn_only=True):
run_as_root("%(manager)s -Sy" % locals())
else:
run_as_root("%(manager)s -Sy" % locals())
def upgrade():
"""
Upgrade all packages.
"""
manager = MANAGER
run_as_root("%(manager)s -Su" % locals(), pty=False)
def is_installed(pkg_name):
"""
Check if a package is installed.
"""
manager = MANAGER
with settings(hide('running', 'stdout', 'stderr', 'warnings'), warn_only=True):
res = run("%(manager)s -Q %(pkg_name)s" % locals())
return res.succeeded
def install(packages, update=False, options=None):
"""
Install one or more packages.
If *update* is ``True``, the package definitions will be updated
first, using :py:func:`~fabtools.arch.update_index`.
Extra *options* may be passed to ``pacman`` if necessary.
Example::
import fabtools
# Update index, then install a single package
fabtools.arch.install('build-essential', update=True)
# Install multiple packages
fabtools.arch.install([
'python-dev',
'libxml2-dev',
])
"""
manager = MANAGER
if update:
update_index()
if options is None:
options = []
if not isinstance(packages, basestring):
packages = " ".join(packages)
options.append("-q")
options = " ".join(options)
cmd = '%(manager)s -S %(options)s %(packages)s' % locals()
run_as_root(cmd, pty=False)
def uninstall(packages, options=None):
"""
Remove one or more packages.
Extra *options* may be passed to ``pacman`` if necessary.
"""
manager = MANAGER
if options is None:
options = []
if not isinstance(packages, basestring):
packages = " ".join(packages)
options = " ".join(options)
cmd = '%(manager)s -R %(options)s %(packages)s' % locals()
run_as_root(cmd, pty=False)
|
Python
| 0.999775
|
@@ -1425,23 +1425,15 @@
ll('
-build-essential
+mongodb
', u
@@ -1530,18 +1530,15 @@
'
-python-dev
+mongodb
',%0A
@@ -1553,19 +1553,22 @@
'
-libxml2-dev
+python-pymongo
',%0A
|
97805f122e18831ac69a59f3cb9ac539d8a65246
|
Disable up to date checking. It has a huge overall system perf penalty.
|
src/db_stub.py
|
src/db_stub.py
|
# Copyright 2011 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import daemon
import db
import re
import time
from dyn_object import *
from trace_event import *
# TODO(nduca): is Stub the right word for this class? Mehh
class DBStub(object):
def __init__(self, settings, server):
self.db = db.DB(settings)
self.db.needs_indexing.add_listener(self.on_db_needs_indexing)
self.server = server
self.hi_idle_hook_added = False
server.add_json_route('/dirs/add', self.add_dir, ['POST'])
server.add_json_route('/dirs', self.list_dirs, ['GET'])
server.add_json_route('/dirs/([a-zA-Z0-9]+)', self.get_dir, ['GET'])
server.add_json_route('/dirs/([a-zA-Z0-9]+)', self.delete_dir, ['DELETE'])
server.add_json_route('/ignores', self.get_ignores, ['GET'])
server.add_json_route('/ignores/add', self.ignores_add, ['POST'])
server.add_json_route('/ignores/remove', self.ignores_remove, ['POST'])
server.add_json_route('/sync', self.sync, ['POST'])
server.add_json_route('/status', self.status, ['GET'])
server.add_json_route('/search', self.search, ['POST'])
if not self.db.is_up_to_date:
self.on_db_needs_indexing()
self.server.hi_idle.add_listener(self.on_daemon_lo_idle)
self._last_flush_time = 0
def on_db_needs_indexing(self):
if self.hi_idle_hook_added:
return
self.server.hi_idle.add_listener(self.on_daemon_hi_idle)
def on_daemon_lo_idle(self):
self.db.check_up_to_date_a_bit_more()
if time.time() - self._last_flush_time > 5:
trace_flush()
self._last_flush_time = time.time()
def on_daemon_hi_idle(self):
self.db.step_indexer()
if self.db.is_up_to_date:
self.server.hi_idle.remove_listener(self.on_daemon_hi_idle)
self.hi_idle_hook_added = False
def add_dir(self, m, verb, data):
d = self.db.add_dir(data.path)
return {"id": d.id,
"status": 'OK'}
def list_dirs(self, m, verb, data):
return map(lambda d: d.__getstate__(), self.db.dirs)
def get_dir(self, m, verb, data):
id = m.group(1)
for d in self.db.dirs:
if d.id == id:
return d.__getstate__()
raise daemon.NotFoundException()
def delete_dir(self, m, verb, data):
id = m.group(1)
for d in self.db.dirs:
if d.id == id:
self.db.delete_dir(d)
return {"status": 'OK'}
raise daemon.NotFoundException()
def get_ignores(self, m, verb, data):
return self.db.ignores
def ignores_add(self, m, verb, data):
self.db.ignore(data)
return {"status": "OK"}
def ignores_remove(self, m, verb, data):
try:
self.db.unignore(data)
except Exception:
raise daemon.SilentException()
return {"status": "OK"}
def search(self, m, verb, data):
q = re.compile(data)
res = self.db.search(data)
return res
def sync(self, m, verb, data):
self.db.sync()
return {"status": "OK"}
def status(self, m, verb, data):
return self.db.status()
|
Python
| 0
|
@@ -1936,32 +1936,34 @@
_idle(self):%0A
+ #
self.db.check_u
|
c9aff74371f176daa011514a05875f59c86a33c6
|
Refactor CLI argument parsing.
|
checker/main.py
|
checker/main.py
|
#!/usr/bin/env python
import os
import sys
import subprocess
import getopt
class Checker:
def __init__(self, path):
if not os.path.isdir(path):
sys.exit(1);
self.path = os.path.realpath(path)
self.jobs = self.getExecutableFiles(self.path)
def getExecutableFiles(self,path):
files = []
for dirname, dirnames, filenames in os.walk(path):
for filename in filenames:
filename_path = os.path.join(dirname, filename)
if os.access(filename_path,os.X_OK):
files.append(filename_path)
return files;
def run(self):
for job in self.jobs:
subprocess.call(job)
if __name__ == '__main__':
opts, path = getopt.getopt(sys.argv[1], "h")
for opt, arg in opts:
if opt == '-h':
print './main.py /full/path/to/jobs'
sys.exit()
check = Checker(path)
check.run()
|
Python
| 0
|
@@ -61,22 +61,24 @@
%0Aimport
-getopt
+argparse
%0A%0A%0Aclass
@@ -741,174 +741,419 @@
-opts, path = getopt.getopt(sys.argv%5B1%5D, %22h%22)%0A for opt, arg in opts:%0A if opt == '-h':%0A print './main.py /full/path/to/jobs'%0A sys.exit()
+# Add CLI parsing.%0A parser = argparse.ArgumentParser(%0A description = %22A script that runs all the jobs in the given directory and keeps track of responses in an sqlite database.%22)%0A parser.add_argument('path', metavar='jobs-directory', type=str, nargs=1,%0A help='Path to the directory where executable jobs are.')%0A args = parser.parse_args()%0A%0A # Initialize and run the checker.
%0A
@@ -1169,20 +1169,28 @@
Checker(
+args.
path
+%5B0%5D
)%0A ch
|
cb97332633a0e34205abdbc97f4e85a1342ea2ed
|
add code in test ean
|
erpeek_inventory/erpeek_article_test.py
|
erpeek_inventory/erpeek_article_test.py
|
# -*- coding: utf-8 -*-
###############################################################################
#
# ODOO (ex OpenERP)
# Open Source Management Solution
# Copyright (C) 2001-2015 Micronaet S.r.l. (<http://www.micronaet.it>)
# Developer: Nicola Riolini @thebrush (<https://it.linkedin.com/in/thebrush>)
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
import sys
import os
import csv
import erppeek
import ConfigParser
# Set up parameters (for connection to Open ERP Database) *********************
path = '~/ETL/GPB'
config_file = os.path.join(os.path.expanduser(path), 'openerp.cfg')
config = ConfigParser.ConfigParser()
config.read([config_file])
# Connection:
server = config.get('dbaccess', 'server')
port = config.get('dbaccess', 'port')
database = config.get('dbaccess','dbname')
user = config.get('dbaccess', 'user')
password = config.get('dbaccess', 'pwd')
# File CSV
filename = os.path.expanduser(config.get('csv', 'filename') )
#logfile = os.path.expanduser(config.get('csv', 'logfile') )
delimiter = config.get('csv', 'delimiter')
header = eval(config.get('csv', 'header'))
odoo = erppeek.Client(
'http://%s:%s' % (server, port),
db=database,
user=user,
password=password,
)
product = odoo.model('product.product')
i = -header
#log = open(logfile, 'w')
for row in csv.reader(
open(filename, 'rb'), delimiter=delimiter):
try:
i += 1
if i <= 0:
continue # jump line
default_code = row[0].strip()
ean13 = row[1]
product_ids = product.search([('default_code', 'ilike', default_code)])
print '\nBLOCK', default_code, 'TOTAL:', len(product_ids)
if product_ids:
#product.write(product_ids, {'ean13': ean13, })
for variant in product.browse(product_ids):
print (
"INFO Code", ean13, variant.ean13,
"KO *******************" if ean13 != variant.ean13 else "",
)
else:
print 'ERR Code:', default_code, 'not found'
except:
print 'Unmanaged error:', default_code, sys.exc_info()
|
Python
| 0.000001
|
@@ -2548,16 +2548,38 @@
O Code%22,
+ variant.default_code,
ean13,
|
3c231fb34f8adb1d290f2cfc0164dbea6049bc34
|
Reorder methods in test.py
|
test.py
|
test.py
|
from ethjsonrpc import EthJsonRpc
methods = [
'web3_clientVersion',
'net_version',
'net_listening',
'net_peerCount',
'eth_protocolVersion',
'eth_coinbase',
'eth_mining',
'eth_hashrate',
'eth_gasPrice',
'eth_accounts',
'eth_blockNumber',
'eth_getCompilers',
'eth_newPendingTransactionFilter',
'eth_getWork',
# 'shh_version',
# 'shh_newIdentity',
# 'shh_newGroup',
]
c = EthJsonRpc()
print len(methods)
for m in methods:
meth = getattr(c, m)
result = meth()
print '%s: %s (%s)' % (m, result, type(result))
|
Python
| 0.000003
|
@@ -98,17 +98,17 @@
net_
-listening
+peerCount
',%0A
@@ -111,33 +111,33 @@
',%0A 'net_
-peerCount
+listening
',%0A 'eth_
|
33824535f4938c6b2b5170d0bb952d5cac8e3408
|
fix missing database commit, thanks Jeff!
|
checkservers.py
|
checkservers.py
|
#!/usr/bin/env python
# Copyright (c) 2009, Steve Oliver (steve@xercestech.com)
#All rights reserved.
#
#Redistribution and use in source and binary forms, with or without
#modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the <organization> nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
#THIS SOFTWARE IS PROVIDED BY STEVE OLIVER ''AS IS'' AND ANY
#EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
#WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
#DISCLAIMED. IN NO EVENT SHALL STEVE OLIVER BE LIABLE FOR ANY
#DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
#(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
#LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
#ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
#(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
#SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from google.appengine.api import urlfetch
from google.appengine.ext import webapp
from google.appengine.api import users
from google.appengine.ext.webapp.util import run_wsgi_app
from google.appengine.ext import db
from google.appengine.api import mail
from google.appengine.api.urlfetch import DownloadError
import cgi
import datetime
import time
import logging
import wsgiref.handlers
from models import Server, AdminOptions
import prowlpy
#import twitter
class CheckServers(webapp.RequestHandler):
serverlist = db.GqlQuery("SELECT * FROM Server")
adminoptions = AdminOptions.get_by_key_name('credentials')
def updateuptime(self,server):
now = time.mktime(datetime.datetime.now().timetuple())
servercameback = time.mktime(server.timeservercameback.timetuple())
difference = now - servercameback
MINUTE = 60
HOUR = MINUTE * 60
DAY = HOUR * 24
days = int( difference / DAY )
hours = int( ( difference % DAY ) / HOUR )
minutes = int( ( difference % HOUR ) / MINUTE )
seconds = int( difference % MINUTE )
string = ""
if days> 0:
string += str(days) + " " + (days == 1 and "day" or "days" ) + ", "
if len(string)> 0 or hours> 0:
string += str(hours) + " " + (hours == 1 and "hour" or "hours" ) + ", "
if len(string)> 0 or minutes> 0:
string += str(minutes) + " " + (minutes == 1 and "minute" or "minutes" ) + ", "
string += str(seconds) + " " + (seconds == 1 and "second" or "seconds" )
server.uptime = string
server.put()
def serverisup(self,server,responsecode):
if server.status == False:
self.servercameback(server)
server.status = True
server.falsepositivecheck = False
server.responsecode = int(responsecode)
server.uptimecounter = server.uptimecounter + 1
self.updateuptime(server)
server.put()
def serverisdown(self,server,responsecode):
server.status = False
server.uptimecounter = 0
server.uptime = "0"
server.responsecode = int(responsecode)
server.timeservercameback = 0
server.put()
if server.notifylimiter == False:
if server.notifywithprowl:
self.notifyprowl(server)
if server.notifywithemail:
self.notifyemail(server)
else:
pass
def servercameback(self,server):
server.timeservercameback = datetime.datetime.now()
def testserver(self,server):
if server.ssl:
prefix = "https://"
else:
prefix = "http://"
try:
url = prefix + "%s" % server.serverdomain
result = urlfetch.fetch(url, headers = {'Cache-Control' : 'max-age=30'}, deadline=10 )
except DownloadError:
if server.falsepositivecheck:
self.serverisdown(server,000)
else:
server.falsepositivecheck = True
else:
if result.status_code == 500:
self.serverisdown(server,result.status_code)
else:
self.serverisup(server,result.status_code)
def notifyemail(self,server):
message = mail.EmailMessage()
message.sender = server.email
message.subject = "%s is down" % server.serverdomain
message.to = server.email
message.body = "HTTP response code %s" % server.responsecode
message.send()
server.notifylimiter = True
server.put()
def notifytwitter(self,server):
pass
#api = twitter.Api(username="%s" % self.adminoptions.twitteruser , password="%s" % self.adminoptions.twitterpass)
#api.PostDirectMessage(self.adminoptions.twitteruser, "%s is down" % server.serverdomain)
#server.notifylimiter = True
#server.put()
def notifyprowl(self,server):
prowlkey = self.adminoptions.prowlkey
prowlnotifier = prowlpy.Prowl(prowlkey)
try:
prowlnotifier.add('Server Monitor','Server %s is Down' % server.serverdomain, 'error code %s' % server.responsecode)
except:
logging.error('prowl notify failed, you may need to check your API key')
server.notifylimiter = True
server.put()
def get(self):
for server in self.serverlist:
self.testserver(server)
def main():
application = webapp.WSGIApplication([('/checkservers', CheckServers)],debug=True)
wsgiref.handlers.CGIHandler().run(application)
if __name__ == '__main__':
main()
|
Python
| 0
|
@@ -4172,16 +4172,33 @@
= True%0A
+%09%09%09%09server.put()%0A
%09%09else:%0A
|
7a1ad4ae0e3ec15c1fd5aec763476e482ea76ba8
|
Make a better version of shuffle
|
somber/components/utilities.py
|
somber/components/utilities.py
|
"""Utility functions."""
import numpy as np
class Scaler(object):
"""
Scales data based on the mean and standard deviation.
Attributes
----------
mean : numpy array
The columnwise mean of the data after scaling.
std : numpy array
The columnwise standard deviation of the data after scaling.
is_fit : bool
Indicates whether this scaler has been fit yet.
"""
def __init__(self):
"""Initialize the scaler."""
self.mean = None
self.std = None
self.is_fit = False
def fit_transform(self, X):
"""First call fit, then call transform."""
self.fit(X)
return self.transform(X)
def fit(self, X):
"""
Fit the scaler based on some data.
Takes the columnwise mean and standard deviation of the entire input
array.
If the array has more than 2 dimensions, it is flattened.
Parameters
----------
X : numpy array
Returns
-------
scaled : numpy array
A scaled version of said array.
"""
if X.ndim > 2:
X = X.reshape((np.prod(X.shape[:-1]), X.shape[-1]))
self.mean = X.mean(0)
self.std = X.std(0)
self.is_fit = True
return self
def transform(self, X):
"""Transform your data to zero mean unit variance."""
if not self.is_fit:
raise ValueError("The scaler has not been fit yet.")
return (X-self.mean) / (self.std + 10e-7)
def inverse_transform(self, X):
"""Invert the transformation."""
return ((X * self.std) + self.mean)
def shuffle(array):
"""Gpu/cpu-agnostic shuffle function."""
z = array.copy()
np.random.shuffle(z)
return z
|
Python
| 0.998703
|
@@ -1728,59 +1728,40 @@
-z = array.copy()%0A np.random.shuffle(z)%0A return z
+return np.random.permutation(array)
%0A
|
00b57b668a5c68a209dac335915bbf2312df0580
|
Make sure tests run on local package
|
test.py
|
test.py
|
#
# Copyright (c) 2013-2014, Scott J Maddox
#
# This file is part of openbandparams.
#
# openbandparams is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# openbandparams is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with openbandparams. If not, see <http://www.gnu.org/licenses/>.
#
#############################################################################
'''
Find and run all unit tests in the project.
'''
import nose
nose.main()
|
Python
| 0
|
@@ -897,16 +897,165 @@
ct.%0A'''%0A
+# Make sure we import the local package%0Aimport os%0Aimport sys%0Asys.path.insert(0,%0A os.path.abspath(os.path.join(os.path.dirname(__file__), 'src')))%0A
import n
|
736c18d692a3e69179ef164f1b9d927c2eb2637a
|
Update test.py
|
test.py
|
test.py
|
from ethjsonrpc import EthJsonRpc
methods = [
'web3_clientVersion',
'net_version',
'net_peerCount',
'net_listening',
'eth_protocolVersion',
'eth_coinbase',
'eth_mining',
'eth_hashrate',
'eth_gasPrice',
'eth_accounts',
'eth_blockNumber',
'eth_getCompilers',
'eth_newPendingTransactionFilter',
'eth_getWork',
# 'shh_version',
# 'shh_newIdentity',
# 'shh_newGroup',
]
c = EthJsonRpc()
print len(methods)
for m in methods:
meth = getattr(c, m)
result = meth()
print '%s: %s (%s)' % (m, result, type(result))
################################################################################
print '*' * 80
addr = '0x1dcb8d1f0fcc8cbc8c2d76528e877f915e299fbe'
for x in ['earliest', 'latest', 'pending', 150000]:
result = c.eth_getTransactionCount(addr, x)
print 'eth_getTransactionCount: %s (%s)' % (result, type(result))
b = (231301, '0x9476018748ba1dae5bdf5e3725f8966df1fa127d49f58e66f621bf6868a23c85')
result = c.eth_getBlockTransactionCountByHash(b[1])
print 'eth_getBlockTransactionCountByHash: %s (%s)' % (result, type(result))
for x in ['earliest', 'latest', 'pending', b[0]]:
result = c.eth_getBlockTransactionCountByNumber(x)
print 'eth_getBlockTransactionCountByNumber: %s (%s)' % (result, type(result))
b = (199583, '0x19d761c6f944eefe91ad70b9aff3d2d76c972e5bb68c443eea7c0eaa144cef9f')
result = c.eth_getUncleCountByBlockHash(b[1])
print 'eth_getUncleCountByBlockHash: %s (%s)' % (result, type(result))
for x in ['earliest', 'latest', 'pending', b[0]]:
result = c.eth_getUncleCountByBlockNumber(x)
print 'eth_getUncleCountByBlockNumber: %s (%s)' % (result, type(result))
################################################################################
print '*' * 80
db_name = 'db_name'
k = 'my_key'
v = 'my_value'
print c.db_putString(db_name, k, v)
x = c.db_getString(db_name, k)
print x
assert v == x
db_name = 'db_name'
k = 'my_key'
v = '0xabcdef'
print c.db_putHex(db_name, k, v)
x = c.db_getHex(db_name, k)
print x
assert v == x
################################################################################
print '*' * 80
b = (199583, '0x19d761c6f944eefe91ad70b9aff3d2d76c972e5bb68c443eea7c0eaa144cef9f')
print c.eth_getBlockByHash(b[1], tx_objects=False)
for x in ['earliest', 'latest', 'pending', b[0]]:
print c.eth_getBlockByNumber(x, tx_objects=False)
tx = '0x12cd5d9a82049154c8990214a551479853d1bfe45852688833bc4ef86a29b1a3'
print c.eth_getTransactionByHash(tx)
################################################################################
print '*' * 80
code = 'contract Test {}'
print c.eth_compileSolidity(code)
#code = ''
#print c.eth_compileSerpent(code)
#code = ''
#print c.eth_compileLLL(code)
################################################################################
print '*' * 80
b = (246236, '0xcd43703a1ead33ffa1f317636c7b67453c5cc03a3350cd71dbbdd70fcbe0987a')
index = 2
print c.eth_getTransactionByBlockHashAndIndex(b[1], index)
for x in ['earliest', 'latest', 'pending', b[0]]:
print c.eth_getTransactionByBlockNumberAndIndex(b[0], index)
tx = '0x27191ea9e8228c98bc4418fa60843540937b0c615b2db5e828756800f533f8cd'
print c.eth_getTransactionReceipt(tx)
b = (246294, '0x3d596ca3c7b344419567957b41b2132bb339d365b6b6b3b6a7645e5444914a16')
index = 0
print c.eth_getUncleByBlockHashAndIndex(b[1], index)
for x in ['earliest', 'latest', 'pending', b[0]]:
print c.eth_getUncleByBlockNumberAndIndex(b[0], index)
################################################################################
print '*' * 80
addr = '0x1dcb8d1f0fcc8cbc8c2d76528e877f915e299fbe'
for x in ['earliest', 'latest', 'pending', 150000]:
print c.eth_getBalance(addr, x)
addr = '0x407d73d8a49eeb85d32cf465507dd71d507100c1'
for x in ['earliest', 'latest', 'pending', 2]:
print c.eth_getStorageAt(addr, 0, x)
################################################################################
print '*' * 80
hash_rate = 1000000
client_id = '0x59daa26581d0acd1fce254fb7e85952f4c09d0915afd33d3886cd914bc7d283c'
print c.eth_submitHashrate(hash_rate, client_id)
print c.web3_sha3('')
|
Python
| 0.000001
|
@@ -4103,21 +4103,24 @@
nt_id)%0A%0A
-print
+digest =
c.web3_
@@ -4128,8 +4128,134 @@
ha3('')%0A
+print digest%0A# keccak-256, not sha3-256%0Aassert digest == '0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470'%0A
|
13b2be31b19b2ca6f6b7979ce6754ee35a2aeb51
|
Remove print statement
|
boundary/api_cli.py
|
boundary/api_cli.py
|
#!/usr/bin/env python
###
### Copyright 2014-2015 Boundary, Inc.
###
### Licensed under the Apache License, Version 2.0 (the "License");
### you may not use this file except in compliance with the License.
### You may obtain a copy of the License at
###
### http://www.apache.org/licenses/LICENSE-2.0
###
### Unless required by applicable law or agreed to in writing, software
### distributed under the License is distributed on an "AS IS" BASIS,
### WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
### See the License for the specific language governing permissions and
### limitations under the License.
###
import argparse
import json
import logging
import os
import requests
import urllib2
import urllib
'''
Base class for all the Boundary CLI commands
'''
class ApiCli():
def __init__(self):
self.message = None
self.path = None
self.apihost = "premium-api.boundary.com"
self.email = None
self.apitoken = None
self.parser = argparse.ArgumentParser(description=self.getDescription())
self.scheme = "https"
self.path = None
self.url_parameters = None
self.method = "GET"
self.data = None
# Construct a dictionary with each of the HTTP methods that we support
self.methods = {"DELETE": self.doDelete,"GET": self.doGet,"POST": self.doPost,"PUT": self.doPut}
def getDescription(self):
'''
Returns a description of the CLI
'''
return "General API CLI"
def getEnvironment(self):
'''
Gets the configuration stored in environment variables
'''
try:
self.email = os.environ['BOUNDARY_EMAIL']
except(KeyError):
self.email = None
try:
self.apitoken = os.environ['BOUNDARY_API_TOKEN']
except(KeyError):
self.apitoken = None
try:
self.apihost = os.environ['BOUNDARY_API_HOST']
except(KeyError):
self.apihost = 'premium-api.boundary.com'
def addArguments(self):
'''
Configure handling of command line arguments.
'''
self.parser.add_argument('-v', '--verbose',dest='verbose', action='store_true', help='verbose mode')
self.parser.add_argument('-a', '--api-host',dest='apihost',action='store',metavar="api_host",help='API host endpoint')
self.parser.add_argument('-e', '--email',dest='email',action='store',metavar="e_mail",help='e-mail used to create the Boundary account')
self.parser.add_argument('-t', '--api-token',dest='apitoken',required=False,action='store',metavar="api_token",help='API token to access the Boundary account')
def parseArgs(self):
'''
Handles the parse of the command line arguments
'''
self.addArguments()
self.args = self.parser.parse_args()
def getArguments(self):
'''
CLIs get called back so that they can process any command line arguments
that are given. This method handles the standard command line arguments for:
API Host, user, password, etc.
'''
if self.args.apihost != None:
self.apihost = self.args.apihost
if self.args.email != None:
self.email = self.args.email
if self.args.apitoken != None:
self.apitoken = self.args.apitoken
def setErrorMessage(self,message):
self.message = message
def validateArguments(self):
if self.email == None:
self.setErrorMessage("E-mail for the account not provided")
return False
if self.apitoken == None:
self.setErrorMessage("API Token for the account not provided")
return False
return True
def getUrlParameters(self):
urlParameters = ""
if self.url_parameters != None:
urlParameters = "?"
values = self.url_parameters
first = True
for key in values:
if first == True:
first = False
else:
urlParameters = urlParameters + "&"
urlParameters = urlParameters + "{0}={1}".format(key,values[key])
return urlParameters
def doGet(self):
'''
HTTP Get Request
'''
return requests.get(self.url,auth=(self.email,self.apitoken),data=self.data)
def doDelete(self):
'''
HTTP Delete Request
'''
return requests.delete(self.url,auth=(self.email,self.apitoken),data=self.data)
def doPost(self):
return requests.post(self.url,auth=(self.email,self.apitoken),data=self.data)
def doPut(self):
'''
HTTP Put Request
'''
return requests.put(self.url,auth=(self.email,self.apitoken),data=self.data)
def callAPI(self):
'''
Make an API call to get the metric definition
'''
self.url = "{0}://{1}/{2}{3}".format(self.scheme,self.apihost,self.path,self.getUrlParameters())
print(self.url)
result = self.methods[self.method]()
if result.status_code != urllib2.httplib.OK:
print(self.url)
if self.data != None:
print(self.data)
print(result)
self.handleResults(result)
def handleResults(self,result):
'''
Call back function to be implemented by the CLI.
Default is to just print the results to standard out
'''
print(result.text)
def execute(self):
'''
Run the steps to execute the CLI
'''
self.getEnvironment()
self.parseArgs()
self.getArguments()
if self.validateArguments() == True:
self.callAPI()
else:
print(self.message)
|
Python
| 0.007015
|
@@ -4677,28 +4677,8 @@
s())
-%0A print(self.url)
%0A%0A
|
a038be51410c66d7dff819093b1c71c4ddfa5516
|
Move to 2 channels to stop OS X IOError
|
soundbooth/apps/booth/utils.py
|
soundbooth/apps/booth/utils.py
|
import os
from sys import byteorder
from array import array
from struct import pack
import pyaudio
import wave
from django.conf import settings
CHANNELS = 1
SAMPLE_RATE = 44100
THRESHOLD = 500
CHUNK_SIZE = 1024
FORMAT = pyaudio.paInt16
TEMP_PATH = getattr(settings, 'BOOTH_RECORDING_STORAGE_PATH', '/tmp')
def is_silent(snd_data):
"Returns 'True' if below the 'silent' threshold"
return max(snd_data) < THRESHOLD
def normalize(snd_data):
"Average the volume out"
MAXIMUM = 16384
times = float(MAXIMUM)/max(abs(i) for i in snd_data)
r = array('h')
for i in snd_data:
r.append(int(i*times))
return r
def trim(snd_data):
"Trim the blank spots at the start and end"
def _trim(snd_data):
snd_started = False
r = array('h')
for i in snd_data:
if not snd_started and abs(i)>THRESHOLD:
snd_started = True
r.append(i)
elif snd_started:
r.append(i)
return r
# Trim to the left
snd_data = _trim(snd_data)
# Trim to the right
snd_data.reverse()
snd_data = _trim(snd_data)
snd_data.reverse()
return snd_data
def add_silence(snd_data, seconds):
"Add silence to the start and end of 'snd_data' of length 'seconds' (float)"
r = array('h', [0 for i in range(int(seconds*SAMPLE_RATE))])
r.extend(snd_data)
r.extend([0 for i in range(int(seconds*SAMPLE_RATE))])
return r
def record(duration):
"""
Record a word or words from the microphone and
return the data as an array of signed shorts.
Normalizes the audio, trims silence from the
start and end, and pads with 0.5 seconds of
blank sound to make sure VLC et al can play
it without getting chopped off.
"""
duration = int(duration) * 60
p = pyaudio.PyAudio()
stream = p.open(format=FORMAT, channels=1, rate=SAMPLE_RATE,
input=True, output=True,
frames_per_buffer=CHUNK_SIZE)
num_silent = 0
snd_started = False
r = array('h')
for i in range(0, int(SAMPLE_RATE / CHUNK_SIZE * duration)):
# little endian, signed short
snd_data = array('h', stream.read(CHUNK_SIZE))
if byteorder == 'big':
snd_data.byteswap()
r.extend(snd_data)
'''
silent = is_silent(snd_data)
if silent and snd_started:
num_silent += 1
elif not silent and not snd_started:
snd_started = True
if snd_started and num_silent > 30:
break
'''
sample_width = p.get_sample_size(FORMAT)
stream.stop_stream()
stream.close()
p.terminate()
r = normalize(r)
r = trim(r)
r = add_silence(r, 0.5)
return sample_width, r
def record_to_file(duration, filename):
"Records from the microphone and outputs the resulting data to 'path'"
duration = int(duration)
if '.wav' not in filename:
filename = filename + '.wav'
path = os.path.join(TEMP_PATH, filename)
sample_width, data = record(duration)
data = pack('<' + ('h'*len(data)), *data)
wf = wave.open(path, 'wb')
wf.setnchannels(1)
wf.setsampwidth(sample_width)
wf.setframerate(SAMPLE_RATE)
wf.writeframes(data)
wf.close()
return path
if __name__ == '__main__':
print("please speak a word into the microphone")
record_to_file(10, 'demo.wav')
print("done - result written to demo.wav")
|
Python
| 0
|
@@ -151,17 +151,17 @@
NNELS =
-1
+2
%0ASAMPLE_
@@ -1865,16 +1865,25 @@
p.open(
+%0A
format=F
@@ -1888,16 +1888,24 @@
=FORMAT,
+%0A
channel
@@ -1908,16 +1908,24 @@
nnels=1,
+%0A
rate=SA
@@ -1954,16 +1954,24 @@
ut=True,
+%0A
output=
@@ -2012,16 +2012,21 @@
UNK_SIZE
+%0A
)%0A%0A n
|
c1ec2b58ca0520b64cd34c6fa88156d9d2e58462
|
add stop all option
|
kbservices.py
|
kbservices.py
|
#!/usr/bin/python
# -*- coding:utf-8 -*-
#############################################
# Flask & werkzeug HTTP Proxy Sample code.
# - Code by Jioh L. Jung (ziozzang@gmail.com)
#############################################
import ConfigParser
import os
import docker
from docker.utils import kwargs_from_env
import time
import sys
STARTED='started'
STOPPED='stopped'
STATUS='status'
class kbservices:
PREFIX='proxy_'
CONFIGFILE='cluster.ini'
DEFAULTIMAGE='canon/fakeserv:1.0'
RETRY=40
POLL_TIME=0.1
def __init__(self):
self.services=self.read_config()
self.client=self.init_docker()
self.update_services()
def get_item(self,section,item,default):
value=default
if self.Config.has_option('defaults',item):
value=self.Config.get('defaults',item)
if self.Config.has_option(section,item):
value=self.Config.get(section,item)
return value
def read_config(self):
services=dict()
self.Config = ConfigParser.ConfigParser()
self.Config.default_section='defaults'
self.Config.read(self.CONFIGFILE)
for section in self.Config.sections():
if section=='global':
continue
if section=='defaults':
continue
type=self.get_item(section,'type','service')
if type!='service':
continue
pt=self.get_item(section,'proxytype','proxy')
if pt=='skip':
continue
service=self.get_item(section,'urlname',section)
services[service]=dict()
services[service][STATUS]=STOPPED
services[service]['ip']=''
services[service]['port']=0
services[service]['service-port']=int(self.get_item(section,'service-port',0))
services[service]['image']=self.get_item(section,'docker-image',self.DEFAULTIMAGE)
volumes=[]
binds=[]
for item in self.get_item(section,'docker-volumes','').split(','):
if item!='':
(volume,alias)=item.split(':')
volumes.append(volume)
binds.append(volume+':'+alias)
services[service]['volumes']=volumes
services[service]['binds']=binds
links=dict()
for item in self.get_item(section,'docker-links','').split(','):
if item!='':
(link,alias)=item.split(':')
links[link]=alias
services[service]['links']=links
services[service]['section']=section
services[service]['name']=service
services[service]['container']=''
return services
def init_docker(self):
if 'DOCKER_HOST' in os.environ:
self.IP=os.environ['DOCKER_HOST'].replace('tcp://','').split(':')[0]
else:
self.IP=''
kwargs = kwargs_from_env()
if 'tls' in kwargs:
kwargs['tls'].assert_hostname = False
client = docker.Client(**kwargs)
return client
def isaservice(self,service):
if service in self.services:
return True
else:
return False
def isstarted(self,service):
if service in self.services and self.services[service][STATUS]==STARTED:
return True
else:
return False
def get_list(self):
return self.services.keys()
def get_hostport(self,service):
if service in self.services:
sr=self.services[service]
if sr[STATUS]==STOPPED:
start_service(service)
return (sr['ip'],sr['port'])
else:
return (None,None)
def update_service(self,service,id):
self.services[service]['container']=id
ct=self.client.inspect_container(id)
if ct['State']['Running']==False:
self.client.remove_container(id)
self.services[service][STATUS]=STOPPED
return self.services[service][STATUS]
if self.IP == '':
self.services[service]['ip']=ct['NetworkSettings']['IPAddress']
self.services[service]['port']=self.services[service]['service-port']
else:
self.services[service]['ip']=self.IP
self.services[service]['port']=self.services[service]['service-port']
self.services[service][STATUS]=STARTED
return self.services[service][STATUS]
def update_services(self):
for cont in self.client.containers(all=True):
for name in cont['Names']:
service=name.replace('/'+self.PREFIX,'')
if service in self.services:
self.update_service(service,cont['Id'])
def start_service(self,service):
self.update_services()
if service not in self.services:
return False
sr=self.services[service]
if sr[STATUS]==STARTED:
return True
image=sr['image']
port=sr['service-port']
host_config=docker.utils.create_host_config(port_bindings={port:port},
links=sr['links'],
binds=sr['binds'])
container = self.client.create_container( image=image,
name=self.PREFIX+sr['name'],
detach=True,
ports=[port],
volumes=sr['volumes'],
environment=dict(PORT=port,MYSERVICES=sr['section']),
host_config=host_config)
id=container.get('Id')
response = self.client.start(container=id)
retry=self.RETRY
while retry>0:
retry-=1
self.update_service(service,id)
if sr[STATUS]==STARTED:
return True
time.sleep(self.POLL_TIMESLEEP)
return False
def kill_service(self,service):
self.update_services()
if service in self.services:
sr=self.services[service]
id=sr['container']
if sr[STATUS]!=STOPPED:
self.client.kill(id)
retry=self.RETRY
while retry>0:
retry-=1
self.update_service(service,id)
if sr[STATUS]==STOPPED:
return True
time.sleep(self.POLL_TIME)
return False
if __name__ == '__main__':
kbs=kbservices()
if len(sys.argv)==3 and sys.argv[1]=='start':
service=sys.argv[2]
print "Starting "+service
kbs.start_service(service)
elif len(sys.argv)==3 and sys.argv[1]=='stop':
service=sys.argv[2]
print "Stop "+service
kbs.kill_service(service)
elif len(sys.argv)==2 and sys.argv[1]=='status':
print
print '%-40s %s'%('Service','Status')
print '==================================================='
for s in kbs.get_list():
status=kbs.isstarted(s)
print '%-40s %s'%(s,status)
else:
print "Usage: kbservices <start,stop,stautus> [service]"
|
Python
| 0.000538
|
@@ -5944,32 +5944,134 @@
%22Stop %22+service%0A
+ if service=='all':%0A for s in kbs.get_list():%0A kbs.kill_service(s)%0A else:%0A
kbs.kill_s
|
8c0e26f6b15b605831e6394cbce7feb8f4a35de4
|
Fix test.
|
gdax_utils_test.py
|
gdax_utils_test.py
|
"""Unit tests."""
import unittest
import gdax_utils
import mock
import utils
utils.configure_logging(to_stderr=True, to_file=False)
class TestInit(unittest.TestCase):
def setUp(self):
self.patcher = mock.patch('gdax.AuthenticatedClient', autospec=True)
self.mock_gdax_client_class = self.patcher.start()
self.mock_client = mock.Mock(spec=self.mock_gdax_client_class)
self.mock_gdax_client_class.return_value = self.mock_client
self.mock_client.get_products.return_value = [
{'id': 'ETH-USD'},
{'id': 'BTC-GBP'},
]
self.mock_client.get_product_ticker.return_value = {'price': '123.45'}
self.c = gdax_utils.Client()
def tearDown(self):
self.patcher.stop()
def testGetProductIds(self):
self.assertListEqual(self.c._get_product_ids(), ['BTC-GBP', 'ETH-USD'])
def testCheckValidOrder(self):
# order_type, side, product, size, price, product_ids
self.c._check_valid_order('limit', 'buy', 'ETH-USD', '23.4', '140.11')
self.c._check_valid_order('market', 'sell', 'btc-gbp', '948.2', '1239123')
self.c._check_valid_order('market', 'sell', 'btc-gbp', '5', '-2')
# TODO: test stop orders.
with self.assertRaises(AssertionError):
self.c._check_valid_order('something', 'buy', 'ETH-USD', '23.4', '140.11')
with self.assertRaises(AssertionError):
self.c._check_valid_order('something', 'buysell', 'ETH-USD', '23.4', '140.11')
with self.assertRaises(AssertionError):
self.c._check_valid_order('something', 'buysell', 'usd-gbp', '23.4', '140.11')
with self.assertRaises(AssertionError):
self.c._check_valid_order('limit', 'buysell', 'usd-gbp', '23.4', '')
def testParsePrice(self):
test_values = [
('-1', ('122.45', -1)),
('-1.00', ('122.45', -1)),
('+.5', ('123.95', 0.5)),
('+5', ('128.45', 5.0)),
('+99.11', ('222.56', 99.11)),
# Maybe flaky here if we use assertEqual
('180', ('180', 180 - 123.45)),
('50', ('50', 50 - 123.45)),
('45.87', ('45.87', 45.87 - 123.45)),
]
for price, abs_price in test_values:
self.assertEqual(self.c._parse_price(price, 123.45), abs_price)
def testTruncate(self):
test_values = [
(('123.45', 1), '123.4'),
(('1234.5678', 2), '1234.56'),
(('1234.5678', 3), '1234.567'),
(('1234.5678', 4), '1234.5678'),
]
for args, expected in test_values:
self.assertEqual(self.c._truncate(*args), expected)
def testOrder(self):
# order_type, side, product, size, price
self.c.order('market', 'buy', 'eth-usd', '0.1', '')
self.c.order('limit', 'buy', 'ETH-USD', '.25', '-1')
with self.assertRaises(ValueError):
self.c.order('limit', 'buy', 'ETH-USD', '9.3', '125')
with self.assertRaises(ValueError):
self.c.order('limit', 'buy', 'ETH-USD', '9.3', '+1')
kwargs = {
'side': 'buy',
'product_id': 'ETH-USD',
'stp': True
}
self.mock_client.buy.assert_has_calls([
mock.call(type='market', size='0.1', **kwargs),
mock.call(type='limit', size='.25', price='122.45', **kwargs),
])
self.c.order('market', 'sell', 'ETH-USD', '.2345', None)
self.c.order('limit', 'sell', 'ETH-USD', '0.1', '180')
with self.assertRaises(ValueError):
self.c.order('limit', 'sell', 'ETH-USD', '0.1', '120')
with self.assertRaises(ValueError):
self.c.order('limit', 'sell', 'ETH-USD', '0.1', '123.43')
with self.assertRaises(ValueError):
self.c.order('limit', 'sell', 'ETH-USD', '0.1', '-.5')
kwargs['side'] = 'sell'
self.mock_client.sell.assert_has_calls([
mock.call(type='market', size='.2345', **kwargs),
mock.call(type='limit', size='0.1', price='180', **kwargs),
])
if __name__ == '__main__':
unittest.main()
|
Python
| 0.000001
|
@@ -2909,28 +2909,8 @@
D',%0A
- 'stp': True%0A
|
a122193144185320f045367613650b40f7df00b8
|
Rework the test script a bit.
|
test.py
|
test.py
|
import codegen, jinja2, spidermonkey, sys
import simplejson as json
def jstest(env, src, data):
run = spidermonkey.Runtime()
ctx = run.new_context()
js = codegen.generate(env, codegen.compile(env, src))
jsobj = json.dumps(data)
code = js + '\ntemplate.render(%s);' % jsobj
return ctx.execute(code)
def pytest(env, src, data):
tmpl = env.from_string(src)
return tmpl.render(data)
WORKS = [
('{{ test }}', {'test': 'crap'}),
('{% if a %}x{% endif %}', {'a': True}),
('{% if a %}c{% endif %}b', {'a': False}),
('{{ 1 if a else 2 }}', {'a': True}),
('{{ 1 if a else 2 }}', {'a': False}),
('{% if a %}d{% else %}e{% endif %}', {'a': False}),
('{% if a %}f{% elif b %}g{% endif %}', {'b': True}),
("{{ '%4.2f'|format(x) }}", {'x': 17.0}),
('{{ d[:7] }}', {'d': '2011-05-27'}),
('{{ a.x }}', {'a': {'x': 'z'}}),
('{{ "%.6f"|format(a / b) }}', {'a': 5.0, 'b': 3}),
('{{ "%.1f"|format(a.x / b.y * 100) }}', {'a': {'x': 20}, 'b': {'y': 5}}),
('{% macro x(y) %}{{ y / 2 }}{% endmacro %}{{ x(z) }}', {'z': 512}),
]
# next:
# - assignment + cond-expr
# - for-loop
src, data = WORKS[int(sys.argv[1])]
env = jinja2.Environment()
ast = codegen.compile(env, src)
print ast
print codegen.generate(env, ast)
print 'js:', repr(jstest(env, src, data))
print 'py:', repr(pytest(env, src, data))
|
Python
| 0
|
@@ -66,333 +66,12 @@
on%0A%0A
-def jstest(env, src, data):%0A%09run = spidermonkey.Runtime()%0A%09ctx = run.new_context()%0A%09js = codegen.generate(env, codegen.compile(env, src))%0A%09jsobj = json.dumps(data)%0A%09code = js + '%5Cntemplate.render(%25s);' %25 jsobj%0A%09return ctx.execute(code)%0A%0Adef pytest(env, src, data):%0A%09tmpl = env.from_string(src)%0A%09return tmpl.render(data)%0A%0AWORK
+TEST
S =
@@ -704,93 +704,378 @@
%0A%5D%0A%0A
-# next:%0A# - assignment + cond-expr%0A# - for-loop%0A%0Asrc, data = WORKS%5Bint(sys.argv%5B1%5D)
+def jstest(env, src, data):%0A%09run = spidermonkey.Runtime()%0A%09ctx = run.new_context()%0A%09js = codegen.generate(env, codegen.compile(env, src))%0A%09jsobj = json.dumps(data)%0A%09code = js + '%5Cntemplate.render(%25s);' %25 jsobj%0A%09return ctx.execute(code)%0A%0Adef pytest(env, src, data):%0A%09tmpl = env.from_string(src)%0A%09return tmpl.render(data)%0A%0Adef run(i, quiet=True):%0A%09%0A%09src, data = TESTS%5Bi
%5D%0A
+%09
env
@@ -1097,16 +1097,17 @@
nment()%0A
+%09
ast = co
@@ -1130,16 +1130,35 @@
v, src)%0A
+%09%0A%09if not quiet:%0A%09%09
print as
@@ -1159,16 +1159,18 @@
int ast%0A
+%09%09
print co
@@ -1198,89 +1198,423 @@
st)%0A
-%0Aprint 'js:', repr(jstest(env, src, data))%0Aprint 'py:', repr(pytest(env, src, data)
+%09%0A%09js = jstest(env, src, data)%0A%09py = pytest(env, src, data)%0A%09%0A%09if not quiet:%0A%09%09print 'js:', repr(js)%0A%09%09print 'py:', repr(py)%0A%09%0A%09if js.isdigit():%0A%09%09return float(js) == float(py)%0A%09return js == py%0A%0Adef test():%0A%09for i, t in enumerate(TESTS):%0A%09%09res = run(i)%0A%09%09sys.stdout.write('.' if res else 'F')%0A%09sys.stdout.write('%5Cn')%0A%0Aif __name__ == '__main__':%0A%09args = sys.argv%5B1:%5D%0A%09if args:%0A%09%09run(int(args%5B0%5D), False)%0A%09else:%0A%09%09test(
)%0A
|
5d71fadb5b1e1453c7667a9264f9b2fb3cfe5398
|
Fix filename and name table info of Arabic fonts.
|
nototools/autofix_for_release.py
|
nototools/autofix_for_release.py
|
#!/usr/bin/python
#
# Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Fix some issues in Noto fonts before releasing them."""
__author__ = 'roozbeh@google.com (Roozbeh Pournader)'
import array
from os import path
import re
import sys
from fontTools import ttLib
import font_data
def fix_revision(font):
"""Fix the revision of the font to match its version."""
version = font_data.font_version(font)
match = re.match(r'Version (\d{1,5})\.(\d{1,5})', version)
major_version = match.group(1)
minor_version = match.group(2)
accuracy = len(minor_version)
font_revision = font_data.printable_font_revision(font, accuracy)
expected_font_revision = major_version+'.'+minor_version
if font_revision != expected_font_revision:
font['head'].fontRevision = float(expected_font_revision)
print 'Fixed fontRevision to %s' % expected_font_revision
return True
return False
def fix_fstype(font):
"""Fix the fsType of the font."""
if font['OS/2'].fsType != 0:
font['OS/2'].fsType = 0
print 'Updated fsType to 0'
return True
return False
# Reversed name records in Khmer and Lao fonts
NAME_CORRECTIONS = {
'UI Khmer': 'Khmer UI',
'UIKhmer': 'KhmerUI',
'UI Lao': 'Lao UI',
'UILao': 'LaoUI',
}
def fix_name_table(font):
"""Fix copyright and reversed values in the 'name' table."""
modified = False
name_records = font_data.get_name_records(font)
copyright_data = name_records[0]
years = re.findall('20[0-9][0-9]', copyright_data)
year = min(years)
copyright_data = u'Copyright %s Google Inc. All Rights Reserved.' % year
if copyright_data != name_records[0]:
print 'Updated copyright message to "%s"' % copyright_data
font_data.set_name_record(font, 0, copyright_data)
modified = True
for name_id in [1, 3, 4, 6]:
record = name_records[name_id]
for source in NAME_CORRECTIONS:
if source in record:
record = record.replace(source, NAME_CORRECTIONS[source])
if record != name_records[name_id]:
font_data.set_name_record(font, name_id, record)
print 'Updated name table record #%d to "%s"' % (
name_id, record)
modified = True
return modified
def drop_hints(font):
"""Drops a font's hint."""
modified = False
glyf_table = font['glyf']
for glyph_index in range(len(glyf_table.glyphOrder)):
glyph_name = glyf_table.glyphOrder[glyph_index]
glyph = glyf_table[glyph_name]
if glyph.numberOfContours > 0:
if glyph.program.bytecode:
glyph.program.bytecode = array.array('B')
modified = True
print 'Dropped hints from glyph "%s"' % glyph_name
return modified
def drop_tables(font, tables):
"""Drops the listed tables from a font."""
modified = False
for table in tables:
if table in font:
modified = True
print 'Dropped table "%s"' % table
modified = True
del font[table]
return modified
TABLES_TO_DROP = [
# FontForge internal tables
'FFTM', 'PfEd',
# Microsoft VOLT internatl tables
'TSI0', 'TSI1', 'TSI2', 'TSI3',
'TSI5', 'TSID', 'TSIP', 'TSIS',
'TSIV',
]
def main(argv):
"""Fix all fonts provided in the command line."""
for font_file in argv[1:]:
print 'Font file: %s' % font_file
font = ttLib.TTFont(font_file)
modified = False
modified |= fix_revision(font)
modified |= fix_fstype(font)
modified |= fix_name_table(font)
is_hinted = '/hinted' in font_file or '_hinted' in font_file
if not is_hinted:
modified |= drop_hints(font)
tables_to_drop = TABLES_TO_DROP
if not is_hinted:
tables_to_drop += ['fpgm', 'prep', 'cvt']
modified |= drop_tables(font, tables_to_drop)
target_file = (
path.dirname(font_file) +
'/modified/' +
path.basename(font_file))
if modified:
font.save(target_file)
else:
print 'No modification necessary'
print
if __name__ == '__main__':
main(sys.argv)
|
Python
| 0
|
@@ -809,16 +809,31 @@
ttLib%0A%0A
+from nototools
import f
@@ -1758,16 +1758,65 @@
ONS = %7B%0A
+ 'Sans Kufi': 'Kufi',%0A 'SansKufi': 'Kufi',%0A
'UI
@@ -3744,24 +3744,16 @@
modified
-
%0A%0A%0ATABLE
@@ -4150,24 +4150,16 @@
= False%0A
-
%0A
@@ -4405,24 +4405,16 @@
s(font)%0A
-
%0A
|
0b46c6636dc97a008277566e20c5f72cd0e5384f
|
Allow to upload retina '@2x' files
|
cmscloud_client/utils.py
|
cmscloud_client/utils.py
|
# -*- coding: utf-8 -*-
from cStringIO import StringIO
import hashlib
import shutil
import subprocess
import tarfile
import tempfile
from cmscloud_client.serialize import register_yaml_extensions
import os
import re
import yaml
FILENAME_BASIC_RE = re.compile(r'^[a-zA-Z0-9_]+[a-zA-Z0-9._-]*\.[a-zA-Z]{2,4}$')
ALLOWED_EXTENSIONS = [
'.js',
'.css',
'.png',
'.jpg',
'.jpeg',
'.gif',
'.htc',
'.scss',
'.sass',
'.rb',
'.less',
'.ico',
'.html',
'.htm',
]
BOILERPLATE_REQUIRED = [
'name',
('author', [
'name',
]),
'version',
'description',
('license', [
'name',
'text',
]),
'templates',
]
APP_REQUIRED = [
'name',
('author', [
'name',
]),
'version',
'package-name',
'description',
('license', [
'name',
'text'
]),
'installed-apps',
]
class ValidationError(Exception):
pass
def _validate(config, required):
valid = (True, "Configuration file is valid")
for thing in required:
if isinstance(thing, tuple):
key, values = thing
else:
key, values = thing, []
if key not in config:
valid = (False, "Required key %r not found in config" % key)
for subkey in values:
if subkey not in config[key]:
valid = (False, "Required sub key %r in %r not found in config" % (subkey, key))
return valid
def validate_app_config(config):
return _validate(config, APP_REQUIRED)
def validate_boilerplate_config(config):
(valid, msg) = _validate(config, BOILERPLATE_REQUIRED)
if not valid:
return (False, msg)
# check templates
data = config.get('templates', [])
template_valid = True
if not isinstance(data, list):
template_valid = False
else:
for template in data:
if not isinstance(template, list):
template_valid = False
elif len(template) != 2:
template_valid = False
if not template_valid:
msg = "Templates must be a list of lists of two items"
return (False, msg)
# check protected
protected = config.get('protected', [])
valid = True
if not isinstance(protected, list):
valid = False
msg = "Protected files must be a list"
else:
errors = []
for filename in protected:
if not os.path.exists(filename):
valid = False
errors.append("Protected file %r not found" % filename)
if errors:
msg = os.linesep.join(errors)
return (valid, msg)
def tar_add_stringio(tar, string_io, name):
info = tarfile.TarInfo(name=name)
string_io.seek(0, os.SEEK_END)
info.size = string_io.tell()
string_io.seek(0)
tar.addfile(tarinfo=info, fileobj=string_io)
def is_valid_file_name(name):
if not FILENAME_BASIC_RE.match(name):
raise ValidationError(
"File name %r is not valid, ignoring." % name)
ext = os.path.splitext(name)[-1]
if ext not in ALLOWED_EXTENSIONS:
raise ValidationError(
"File extension %r is not allowed, ignoring." % ext)
return True
def filter_static_files(tarinfo):
if not tarinfo.isfile():
return tarinfo
basename = os.path.basename(tarinfo.name)
if is_valid_file_name(basename):
return tarinfo
else:
return None
def filter_sass_files(tarinfo):
basename = os.path.basename(tarinfo.name)
if tarinfo.isfile():
if is_valid_file_name(basename):
return tarinfo
else:
return None
elif basename.startswith('.'):
return None
else:
return tarinfo
def filter_template_files(tarinfo):
if not tarinfo.isfile():
return tarinfo
basename = os.path.basename(tarinfo.name)
ext = os.path.splitext(basename)[1]
if ext == '.html':
return tarinfo
else:
return None
def bundle_boilerplate(config, data, extra_file_paths, **complex_extra):
register_yaml_extensions()
fileobj = StringIO()
tar = tarfile.open(mode='w:gz', fileobj=fileobj)
config_fileobj = StringIO()
yaml.dump(config, config_fileobj)
tar_add_stringio(tar, config_fileobj, 'boilerplate.yaml')
data_fileobj = StringIO()
yaml.dump(data, data_fileobj)
tar_add_stringio(tar, data_fileobj, 'data.yaml')
for path in extra_file_paths:
tar.add(path)
for key, value in complex_extra.items():
tar.add(key, filter=value)
tar.close()
fileobj.seek(0)
return fileobj
def bundle_package(workspace, tar):
devnull = open(os.devnull, 'w')
try:
subprocess.check_call(['python', 'setup.py', 'sdist', '-d', workspace], stdout=devnull, stderr=devnull)
finally:
devnull.close()
egg_file = os.path.join(workspace, os.listdir(workspace)[0])
tar.add(egg_file, arcname='package.tar.gz')
def bundle_app(config, script):
register_yaml_extensions()
fileobj = StringIO()
tar = tarfile.open(mode='w:gz', fileobj=fileobj)
config_fileobj = StringIO()
yaml.dump(config, config_fileobj)
tar_add_stringio(tar, config_fileobj, 'app.yaml')
script_fileobj = StringIO(script)
if os.path.exists('cmscloud_config.py'):
tar_add_stringio(tar, script_fileobj, 'cmscloud_config.py')
# add actual package
distdir = tempfile.mkdtemp(prefix='cmscloud-client')
try:
bundle_package(distdir, tar)
finally:
shutil.rmtree(distdir)
tar.close()
fileobj.seek(0)
return fileobj
def hashfile(fd, blocksize=65536):
hasher = hashlib.sha256()
buf = fd.read(blocksize)
while len(buf) > 0:
hasher.update(buf)
buf = fd.read(blocksize)
return hasher.digest()
def uniform_filepath(filepath):
filepath = os.path.abspath(filepath)
filepath = os.path.realpath(filepath)
filepath = filepath.rstrip(os.sep)
return filepath
def is_hidden(path):
filename = os.path.basename(path)
return filename.startswith('.')
def is_inside_dir(path, parent_dir):
path = os.path.join(parent_dir, path)
path = uniform_filepath(path)
return path.startswith(parent_dir)
def filter_bad_paths(members, parent_dir):
parent_dir = uniform_filepath(parent_dir)
for finfo in members:
if (not finfo.issym() and not finfo.islnk() and
is_inside_dir(finfo.path, parent_dir)):
yield finfo
def resource_path(relative_path):
"""
Get absolute path of the resource, works for dev and for PyInstaller
"""
try:
# PyInstaller creates a temp folder and stores path in _MEIPASS
base_path = os.sys._MEIPASS
except Exception:
base_path = os.path.dirname(__file__)
return os.path.join(base_path, relative_path)
|
Python
| 0
|
@@ -284,16 +284,22 @@
0-9._-%5D*
+(@2x)?
%5C.%5Ba-zA-
|
58603e6d49b058c3c8ac70a5d1c50897d8409d82
|
Use new webhook URL in place of token
|
icinga_slack/webhook.py
|
icinga_slack/webhook.py
|
#!/usr/bin/env python
import argparse
import json
import urllib
import sys
from icinga_slack import __version__
alert_colors = {'UNKNOWN': '#6600CC',
'CRITICAL': '#FF0000',
'WARNING': '#FF9900',
'OK': '#36A64F'}
class AttachmentField(dict):
def __init__(self, title, value, short=False):
self['title'] = title
self['value'] = value
self['short'] = short
class AttachmentFieldList(list):
def __init__(self, *args):
for count, field in enumerate(args):
self.append(field)
class Attachment(dict):
def __init__(self, fallback, fields, text=None, pretext=None, color=None):
self['fallback'] = fallback
self['fields'] = fields
if text:
self['text'] = text
if pretext:
self['pretext'] = pretext
if color:
self['color'] = color
class AttachmentList(list):
def __init__(self, *args):
for count, attachment in enumerate(args):
self.append(attachment)
class Message(dict):
def __init__(self, channel, text, username, mrkdwn_in=["fields"],
icon_emoji=":ghost:", attachments=None):
self['channel'] = channel
self['text'] = text
if mrkdwn_in:
self['mrkdwn_in'] = mrkdwn_in
if username:
self['username'] = username
if icon_emoji:
self['icon_emoji'] = icon_emoji
self['attachments'] = AttachmentList()
def attach(self, message, host, level, action_url=None, notes_url=None, status_cgi_url=''):
fields = AttachmentFieldList()
fields.append(AttachmentField("Message", message))
fields.append(AttachmentField("Host", "<{1}?host={0}|{0}>".format(host, status_cgi_url), True))
fields.append(AttachmentField("Level", level, True))
if action_url:
fields.append(AttachmentField("Actions URL", action_url, True))
if notes_url:
fields.append(AttachmentField("Notes URL", notes_url, True))
if level in alert_colors.keys():
color = alert_colors[level]
else:
color = alert_colors['UNKNOWN']
alert_attachment = Attachment(fallback=" {0} on {1} is {2}".format(message, host, level), color=color, fields=fields)
self['attachments'].append(alert_attachment)
def send(self, subdomain, token):
data = urllib.urlencode({"payload": json.dumps(self)})
response = urllib.urlopen('https://{0}.slack.com/services/hooks/incoming-webhook?token={1}'.format(subdomain, token), data).read()
if response == "ok":
return True
else:
print "Error: %s" % response
return False
def parse_options():
parser = argparse.ArgumentParser(description="Send an Icinga Alert to Slack.com via a generic webhook integration")
parser.add_argument('-c', metavar="CHANNEL", type=str, required=True, help="The channel to send the message to")
parser.add_argument('-m', metavar="MESSAGE", type=str, required=True, help="The text of the message to send")
parser.add_argument('-s', metavar="SUBDOMAIN", type=str, required=True, help="Slack.com subdomain")
parser.add_argument('-t', metavar="TOKEN", type=str, required=True, help="The access token for your integration")
parser.add_argument('-A', metavar="SERVICEACTIONURL", type=str, default=None, help="An optional action_url for this alert {default: None}")
parser.add_argument('-H', metavar="HOST", type=str, default="UNKNOWN", help="An optional host the message relates to {default: UNKNOWN}")
parser.add_argument('-L', metavar="LEVEL", type=str, choices=["OK", "WARNING", "CRITICAL", "UNKNOWN"], default="UNKNOWN",
help="An optional alert level {default: UNKNOWN}")
parser.add_argument('-M', metavar="HEADERMESSAGE", type=str, default="I have received the following alert:",
help="A header message sent before the formatted alert {default: I have received the following alert:}")
parser.add_argument('-N', metavar="SERVICENOTESURL", type=str, default=None, help="An optional notes_url for this alert {default: None}")
parser.add_argument('-S', metavar="STATUSCGIURL", type=str, default='https://nagios.example.com/cgi-bin/icinga/status.cgi',
help="The URL of status.cgi for your Nagios/Icinga instance {default: https://nagios.example.com/cgi-bin/icinga/status.cgi}")
parser.add_argument('-U', metavar="USERNAME", type=str, default="Icinga", help="Username to send the message from {default: Icinga}")
parser.add_argument('-V', action='version', help="Print version information", version="version: {0}".format(__version__))
args = parser.parse_args()
return args
def main():
args = parse_options()
message = Message(channel=args.c, text=args.M, username=args.U)
message.attach(message=args.m, host=args.H, level=args.L, action_url=args.A, notes_url=args.N, status_cgi_url=args.S)
if message.send(subdomain=args.s, token=args.t):
sys.exit(0)
else:
sys.exit(1)
if __name__ == "__main__":
main()
|
Python
| 0.000003
|
@@ -2396,32 +2396,27 @@
d(self,
-subdomain, token
+webhook_url
):%0A
@@ -2511,98 +2511,19 @@
pen(
-'https://%7B0%7D.slack.com/services/hooks/incoming-webhook?token=%7B1%7D'.format(subdomain, token)
+webhook_url
, da
@@ -3066,17 +3066,17 @@
ument('-
-s
+u
', metav
@@ -3083,117 +3083,18 @@
ar=%22
-SUBDOMAIN%22, type=str, required=True, help=%22Slack.com subdomain%22)%0A parser.add_argument('-t', metavar=%22TOKEN
+WEBHOOKURL
%22, t
@@ -3131,20 +3131,19 @@
The
-access token
+webhook URL
for
@@ -4877,38 +4877,26 @@
end(
-subdomain=args.s, token
+webhook_url
=args.
-t
+u
):%0A
|
73021d7b456c2cc76816ffebf37a440ec21f5019
|
Add the prog to the ArgumentParser arguments
|
icinga_slack/webhook.py
|
icinga_slack/webhook.py
|
#!/usr/bin/env python3
import argparse
import json
import urllib.parse
import urllib.request
import sys
from icinga_slack import __version__
alert_colors = {'UNKNOWN': '#6600CC',
'CRITICAL': '#FF0000',
'WARNING': '#FF9900',
'OK': '#36A64F'}
class AttachmentField(dict):
def __init__(self, title, value, short=False):
self['title'] = title
self['value'] = value
self['short'] = short
class AttachmentFieldList(list):
def __init__(self, *args):
for count, field in enumerate(args):
self.append(field)
class Attachment(dict):
def __init__(self, fallback, fields, text=None, pretext=None, color=None):
self['fallback'] = fallback
self['fields'] = fields
if text:
self['text'] = text
if pretext:
self['pretext'] = pretext
if color:
self['color'] = color
class AttachmentList(list):
def __init__(self, *args):
for count, attachment in enumerate(args):
self.append(attachment)
class Message(dict):
def __init__(self, channel, text, username, mrkdwn_in=["fields"],
icon_emoji=":ghost:", attachments=None):
self['channel'] = channel
self['text'] = text
if mrkdwn_in:
self['mrkdwn_in'] = mrkdwn_in
if username:
self['username'] = username
if icon_emoji:
self['icon_emoji'] = icon_emoji
self['attachments'] = AttachmentList()
def attach(self, message, host, level, action_url=None, notes_url=None, status_cgi_url=''):
fields = AttachmentFieldList()
fields.append(AttachmentField("Message", message))
fields.append(AttachmentField("Host", "<{1}?host={0}|{0}>".format(host, status_cgi_url), True))
fields.append(AttachmentField("Level", level, True))
if action_url:
fields.append(AttachmentField("Actions URL", action_url, True))
if notes_url:
fields.append(AttachmentField("Notes URL", notes_url, True))
if level in alert_colors.keys():
color = alert_colors[level]
else:
color = alert_colors['UNKNOWN']
alert_attachment = Attachment(fallback=" {0} on {1} is {2}".format(message, host, level), color=color, fields=fields)
self['attachments'].append(alert_attachment)
def send(self, webhook_url):
data = urllib.parse.urlencode({"payload": json.dumps(self)})
response = urllib.request.urlopen(webhook_url, data.encode('utf8')).read()
if response == b'ok':
return True
else:
print("Error: %s" % response)
return False
def parse_options():
parser = argparse.ArgumentParser(description="Send an Icinga Alert to Slack.com via a generic webhook integration")
parser.add_argument('-c', metavar="CHANNEL", type=str, required=True, help="The channel to send the message to")
parser.add_argument('-m', metavar="MESSAGE", type=str, required=True, help="The text of the message to send")
parser.add_argument('-u', metavar="WEBHOOKURL", type=str, required=True, help="The webhook URL for your integration")
parser.add_argument('-A', metavar="SERVICEACTIONURL", type=str, default=None, help="An optional action_url for this alert {default: None}")
parser.add_argument('-H', metavar="HOST", type=str, default="UNKNOWN", help="An optional host the message relates to {default: UNKNOWN}")
parser.add_argument('-L', metavar="LEVEL", type=str, choices=["OK", "WARNING", "CRITICAL", "UNKNOWN"], default="UNKNOWN",
help="An optional alert level {default: UNKNOWN}")
parser.add_argument('-M', metavar="HEADERMESSAGE", type=str, default="I have received the following alert:",
help="A header message sent before the formatted alert {default: I have received the following alert:}")
parser.add_argument('-N', metavar="SERVICENOTESURL", type=str, default=None, help="An optional notes_url for this alert {default: None}")
parser.add_argument('-S', metavar="STATUSCGIURL", type=str, default='https://nagios.example.com/cgi-bin/icinga/status.cgi',
help="The URL of status.cgi for your Nagios/Icinga instance {default: https://nagios.example.com/cgi-bin/icinga/status.cgi}")
parser.add_argument('-U', metavar="USERNAME", type=str, default="Icinga", help="Username to send the message from {default: Icinga}")
parser.add_argument('-V', action='version', help="Print version information", version="version: {0}".format(__version__))
args = parser.parse_args()
return args
def main():
args = parse_options()
message = Message(channel=args.c, text=args.M, username=args.U)
message.attach(message=args.m, host=args.H, level=args.L, action_url=args.A, notes_url=args.N, status_cgi_url=args.S)
if message.send(webhook_url=args.u):
sys.exit(0)
else:
sys.exit(1)
if __name__ == "__main__":
main()
|
Python
| 0
|
@@ -2786,16 +2786,69 @@
tParser(
+%0A prog=%22icinga_slack_webhook_notify%22,%0A
descript
@@ -2912,32 +2912,37 @@
ook integration%22
+%0A
)%0A parser.add
|
6d425b617a28b2eb35d53f35f5136148aa1f2ef6
|
Add relative import for the parser
|
source/champollion/__init__.py
|
source/champollion/__init__.py
|
# :coding: utf-8
import os
from ._version import __version__
from .directive.data import AutoDataDirective
from .directive.function import AutoFunctionDirective
from .directive.class_ import AutoClassDirective
from .directive.method import AutoMethodDirective
from .directive.attribute import AutoAttributeDirective
from .viewcode import (
add_source_code_links,
create_code_pages,
create_missing_code_link
)
import parser
def parse_js_source(app):
"""Parse the javascript source path."""
path = os.path.abspath(app.config.js_source)
app.env.js_environment = parser.get_environment(path)
def setup(app):
"""Register the javascript autodoc directives."""
app.add_config_value("js_source", None, "env")
app.connect("builder-inited", parse_js_source)
app.connect("doctree-read", add_source_code_links)
app.connect("html-collect-pages", create_code_pages)
app.connect("missing-reference", create_missing_code_link)
app.add_directive_to_domain("js", "autodata", AutoDataDirective)
app.add_directive_to_domain("js", "autofunction", AutoFunctionDirective)
app.add_directive_to_domain("js", "autoclass", AutoClassDirective)
app.add_directive_to_domain("js", "automethod", AutoMethodDirective)
app.add_directive_to_domain("js", "autoattribute", AutoAttributeDirective)
# app.add_directive_to_domain("js", "automodule", AutoModuleDirective)
return {
"version": __version__
}
|
Python
| 0.000003
|
@@ -422,21 +422,43 @@
k%0A)%0A
-import parser
+from .parser import get_environment
%0A%0A%0Ad
@@ -608,15 +608,8 @@
t =
-parser.
get_
|
feb745334c4b7ba97a54c8e2b1862888ed9369ea
|
fix test: str->bytes
|
test.py
|
test.py
|
#!/usr/bin/env python
# Standard library imports
import unittest
import random
import hashlib
# Custom SHA-1 library
import sha1
class TestSha1(unittest.TestCase):
"""TestSha1 class
Test case for the custom SHA-1 implementation.
"""
def test_similar(self):
"""Test Similar SHA-1 Inputs
Tests sets of messages with 1 bit of difference. Ensures that all
messages produce unique hashes.
Raises:
AssertionError if test fails.
"""
print '\n>>> running: test_similar'
first_msg = bytearray(get_random_bytes())
modified_msg = bytearray()
# Pick a random byte, modify it by one bit
byte_to_modify = random.randrange(0, len(first_msg))
for i, byte in enumerate(first_msg):
augmentor = 1 if i == byte_to_modify else 0
modified_msg.append(byte + augmentor)
first_digest = sha1.sha1(str(first_msg))
modified_digest = sha1.sha1(str(modified_msg))
print '... test_similar: checking digest differences'
self.assertNotEqual(first_digest, modified_digest)
print '... test_similar: success'
def test_repeatable(self):
"""Test SHA-1 Repeatability
Runs the SHA-1 hashing function multiple times to ensure the same
outcome for any identical message input.
Raises:
AssertionError if test fails.
"""
print '\n>>> running: test_repeatable'
msg = bytearray(get_random_bytes())
first_digest = sha1.sha1(str(msg))
second_digest = sha1.sha1(str(msg))
print '... test_repeatable: checking for identical digests'
self.assertEqual(first_digest, second_digest)
print '... test_repeatable: success'
def test_comparison(self):
"""Test SHA-1 Library Accuracy
Runs the custom SHA-1 hashing function implementation with other
SHA-1 functions contained in the Python hashlib library.
Raises:
AssertionError if test fails.
"""
print '\n>>> running: test_comparison'
msg = bytearray(get_random_bytes())
custom_sha1_digest = sha1.sha1(str(msg))
stdlib_sha1_digest = hashlib.sha1(str(msg)).hexdigest()
print '... test_comparison: checking for identical digests'
self.assertEqual(custom_sha1_digest, stdlib_sha1_digest)
print '... test_comparison: success'
def get_random_bytes():
"""Get Random Bits
Generates a sequence of random bits of a random size between 1 and 1000
bits in the sequence.
Returns:
A stream of random bits.
"""
size = random.randrange(1, 1000)
for _ in xrange(size):
yield random.getrandbits(8)
if __name__ == '__main__':
unittest.main()
|
Python
| 0.000023
|
@@ -16,16 +16,72 @@
python%0A%0A
+from __future__ import print_function, unicode_literals%0A
# Standa
@@ -145,16 +145,68 @@
ashlib%0A%0A
+try:%0A range = xrange%0Aexcept NameError:%0A pass%0A%0A
# Custom
@@ -606,33 +606,33 @@
%22%22%0A print
-
+(
'%5Cn%3E%3E%3E running:
@@ -644,16 +644,17 @@
similar'
+)
%0A
@@ -1030,19 +1030,21 @@
a1.sha1(
-str
+bytes
(first_m
@@ -1084,19 +1084,21 @@
a1.sha1(
-str
+bytes
(modifie
@@ -1111,33 +1111,33 @@
)%0A%0A print
-
+(
'... test_simila
@@ -1167,16 +1167,17 @@
erences'
+)
%0A
@@ -1234,33 +1234,33 @@
)%0A%0A print
-
+(
'... test_simila
@@ -1262,32 +1262,33 @@
imilar: success'
+)
%0A%0A def test_r
@@ -1548,33 +1548,33 @@
%22%22%0A print
-
+(
'%5Cn%3E%3E%3E running:
@@ -1589,16 +1589,17 @@
eatable'
+)
%0A
@@ -1661,35 +1661,37 @@
est = sha1.sha1(
-str
+bytes
(msg))%0A s
@@ -1711,27 +1711,29 @@
= sha1.sha1(
-str
+bytes
(msg))%0A%0A
@@ -1733,33 +1733,33 @@
)%0A%0A print
-
+(
'... test_repeat
@@ -1787,32 +1787,33 @@
entical digests'
+)
%0A self.as
@@ -1857,33 +1857,33 @@
)%0A%0A print
-
+(
'... test_repeat
@@ -1896,16 +1896,17 @@
success'
+)
%0A%0A de
@@ -2192,17 +2192,17 @@
print
-
+(
'%5Cn%3E%3E%3E r
@@ -2225,16 +2225,17 @@
parison'
+)
%0A
@@ -2307,27 +2307,29 @@
= sha1.sha1(
-str
+bytes
(msg))%0A
@@ -2365,19 +2365,21 @@
ib.sha1(
-str
+bytes
(msg)).h
@@ -2395,33 +2395,33 @@
)%0A%0A print
-
+(
'... test_compar
@@ -2457,16 +2457,17 @@
digests'
+)
%0A
@@ -2538,17 +2538,17 @@
print
-
+(
'... tes
@@ -2569,16 +2569,17 @@
success'
+)
%0A%0Adef ge
@@ -2828,17 +2828,16 @@
or _ in
-x
range(si
|
1caace2631f8e9c38cf0adfb1179a5260dcd3c33
|
Change output_all_unitprot to allow multi ids for some proteins.
|
tools/management/commands/output_all_uniprot.py
|
tools/management/commands/output_all_uniprot.py
|
from django.core.management.base import BaseCommand, CommandError
from django.core.management import call_command
from django.conf import settings
from django.db import connection
from django.db.models import Q
from django.template.loader import render_to_string
from protein.models import Protein
from residue.models import ResidueGenericNumber, ResidueGenericNumberEquivalent
from common import definitions
from common.selection import SelectionItem
from common.alignment_gpcr import Alignment
import xlsxwriter, xlrd
import logging, json, os
class Command(BaseCommand):
help = "Output all uniprot mappings"
logger = logging.getLogger(__name__)
def handle(self, *args, **options):
#Get the proteins
f = open('uniprot.json', 'w')
ps = Protein.objects.filter(Q(source__name='SWISSPROT') | Q(source__name='TREMBL'),web_links__web_resource__slug='uniprot').all().prefetch_related('web_links__web_resource')
print('total:',len(ps))
mapping = {}
for p in ps:
uniprot = p.web_links.get(web_resource__slug='uniprot')
mapping[p.entry_name] = uniprot.index
json.dump(mapping,f, indent=4, separators=(',', ': '))
# print("Seqs: {}\tNot matching: {}".format(num_of_sequences, num_of_non_matching_sequences))
# open("uniprot.txt", "w").write()
|
Python
| 0
|
@@ -1052,19 +1052,22 @@
b_links.
-get
+filter
(web_res
@@ -1088,16 +1088,50 @@
niprot')
+.values_list('index', flat = True)
%0A
@@ -1163,21 +1163,22 @@
%5D =
+list(
uniprot
-.index
+)%0A
%0A%0A
|
595767b396fa5fc54a924cc8e2565505eb8389a8
|
Mangle expections to avoid matching line twice
|
ilogue/fexpect/tests.py
|
ilogue/fexpect/tests.py
|
import unittest
import sys
from fabric.api import *
def runtest(testclass):
suite = unittest.TestLoader().loadTestsFromTestCase(testclass)
testResult = unittest.TextTestRunner(verbosity=2).run(suite)
if not testResult.wasSuccessful():
sys.exit('[fexpect test wrapper] One or more tests failed!')
class FexpectTests(unittest.TestCase):
def test_one_expectation(self):
cmd = 'echo "Hello" && read NAME && echo "Hi $NAME."'
from ilogue.fexpect import expect, expecting, run
expectation = expect('Hello','answer')
with expecting(expectation):
output = run(cmd)
self.assertIn('answer',output)
def test_two_expectations(self):
cmd = 'echo "Hello" && read ONE && echo "bladiebla" && read TWO && echo "First $ONE than $TWO."'
from ilogue.fexpect import expect, expecting, run
exp1 = expect('Hello','111')
exp2 = expect('bladiebla','222')
with expecting(exp1+exp2):
output = run(cmd)
self.assertIn('111',output)
self.assertIn('222',output)
def test_order_inconsequential(self):
#sequence shouldn't matter
cmd = 'echo "Hello" && read ONE && echo "bladiebla" && read TWO && echo "First $ONE than $TWO."'
from ilogue.fexpect import expect, expecting, run
exp1 = expect('Hello','111')
exp2 = expect('bladiebla','222')
with expecting(exp2+exp1):
output = run(cmd)
self.assertIn('111',output)
self.assertIn('222',output)
def test_exit_after_expectation(self):
import time
from StringIO import StringIO
#sequence shouldn't matter
script = "#!/usr/bin/python\nimport time\nfor i in range(1,8):\n\tprint(i)\n\ttime.sleep(1)"
cmd = 'python /tmp/test.py'
put(StringIO(script),'/tmp/test.py')
from ilogue.fexpect import expect, expecting, run
exp1 = expect('Hello','111')
exp2 = expect('3','expected',exitAfter=0)
t = time.time()
with expecting(exp1+exp2):
output = run(cmd)
elapsed = time.time() - t
self.assertGreater(elapsed,2)
self.assertLess(elapsed,4)
def test_one_expectation_local(self):
cmd = 'echo "Hello" && read NAME && echo "Hi $NAME."'
from ilogue.fexpect import expect, expecting, local
expectation = expect('Hello','answer')
with expecting(expectation):
output = local(cmd,capture=True)
self.assertIn('answer',output)
def test_can_change_shell(self):
cmd = 'ps c && echo "Hello" && read NAME && echo "Hi $NAME."'
from ilogue.fexpect import expect, expecting, run
import fabric
expectation = expect('Hello','answer')
backupenv = dict(fabric.state.env)
fabric.state.env.shell = 'sh -c'
with expecting(expectation):
output = run(cmd)
fabric.state.env.update(backupenv)
self.assertIn('00 sh',output)
def test_mixed_case(self):
cmd1 = 'expr 5 + 5'
cmd2 = 'read -p Name: NAME && echo Hi $NAME.'
cmd3 = 'expr 18 / 3'
from ilogue.fexpect import expect, expecting, run
import fabric
output1 = run(cmd1)
expectation = expect('Name:','Bill')
with expecting(expectation):
output2 = run(cmd2)
output3 = run(cmd3)
self.assertIn('10',output1)
self.assertIn('Hi Bill.',output2)
self.assertIn('6',output3)
def test_quotes(self):
cmd1 = 'read -p "Prompt1:" RESP1 && echo Received $RESP1.'
cmd2 = "read -p 'Prompt2:' RESP2 && echo Received $RESP2."
cmd3 = """read -p 'Prompt3:' -n "20" RESP3 && echo Received $RESP3."""
from ilogue.fexpect import expect, expecting, run
import fabric
expectation = []
expectation += expect('Prompt1:','Foo')
expectation += expect('Prompt2:','Bar')
expectation += expect('Prompt3:','Baz')
with expecting(expectation):
output1 = run(cmd1)
output2 = run(cmd2)
output3 = run(cmd3)
self.assertIn('Received Foo',output1)
self.assertIn('Received Bar',output2)
self.assertIn('Received Baz',output3)
def test_controlchar(self):
cmd = 'python'
from ilogue.fexpect import controlchar, expect, expecting, run
import fabric
expectation = []
expectation += expect(">>>", controlchar('C'))
expectation += expect('KeyboardInterrupt', controlchar('D'))
with expecting(expectation):
output = run(cmd)
self.assertIn('KeyboardInterrupt',output)
def tryOrFailOnPrompt(self,method,args):
try:
with settings(abort_on_prompts=True):
result = method(*args)
except SystemExit as promptAbort:
self.fail("There was an unexpected (password) prompt.")
return result
|
Python
| 0.999998
|
@@ -4978,8 +4978,552 @@
result %0A
+%0A def test_multimatch(self):%0A %22%22%22 Match same prompt but with different responses %22%22%22%0A%0A cmd = 'echo %22name%22 && read NAME1 && echo %22name is $NAME1%22 && echo %22name%22 && read NAME2 && echo %22name is $NAME2%22'%0A%0A from ilogue.fexpect import expecting, expect, run%0A%0A expectation = %5B%5D%0A expectation += expect('name', 'Ford')%0A expectation += expect('name', 'Arthur')%0A%0A with expecting(expectation):%0A output = run(cmd)%0A%0A self.assertIn('Ford', output)%0A self.assertIn('Arthur', output)%0A
|
d908fd35c751ea47e7e5162f93a9d64a7fdc2f35
|
Test escape of single quotes.
|
test_alias.py
|
test_alias.py
|
import unittest
import json
import io
import alias
from alias import Alias, Aliases, JSONBackend
class FakeAliasDatabase():
def __init__(self):
self.aliases = []
def add_alias(self, alias):
self.aliases.append(alias)
def get_aliases(self):
return self.aliases
def make_fake_aliases():
return {'lst': Alias("lst", "ls -lhar --sort time"),
'lss': Alias("lss", "ls -lhar --sort time")}
SIMPLE_ALIAS_JSON = """
{
"aliases": {
"lst" :{
"command": "ls -lhar --sort time",
"category": null
}
}
}
"""
def get_simple_alias_json_stringio():
return io.StringIO(SIMPLE_ALIAS_JSON)
class TestAliasObj(unittest.TestCase):
def test_comparisons(self):
a1 = Alias('lst', 'ls -lhar --sort time')
a2 = Alias('lst', 'ls -lhar --sort time')
a3 = Alias('lss', 'ls -lhar --sort size')
self.assertEqual(a1, a2)
self.assertNotEqual(a1, a3)
class TestJSON(unittest.TestCase):
def test_dicts_to_aliases(self):
example = {
"lst": {
'command': 'ls -lhar --sort time',
'category': None
},
"lss": {
'command': 'ls -lhar --sort size',
'category': None
}
}
expected = {
'lst': Alias('lst', 'ls -lhar --sort time'),
'lss': Alias('lss', 'ls -lhar --sort size')
}
result = alias.dicts_to_aliases(example)
self.assertDictEqual(result, expected)
def test_decode(self):
f = get_simple_alias_json_stringio()
json.load(f, cls=alias.AliasesJSONDecoder)
def make_aliases(f=None):
if f is None:
f = io.StringIO()
backend = JSONBackend(f)
aliases = Aliases(backend)
return aliases
class TestAlias(unittest.TestCase):
def test_parse_alias(self):
f = get_simple_alias_json_stringio()
aliases = make_aliases(f)
script = aliases.get_sh_script()
self.assertEqual(script, 'alias lst="ls -lhar --sort time"\n')
def test_add_alias(self):
f = io.StringIO('{"aliases": {}}')
aliases = make_aliases(f)
aliases.add_alias(Alias("lst", "ls -lhar --sort time"))
aliases.add_alias(Alias("lss", "ls -lhar --sort size"))
script = aliases.get_sh_script()
self.assertEqual('alias lss="ls -lhar --sort size"\n' +
'alias lst="ls -lhar --sort time"\n', script)
def test_change_alias(self):
f = io.StringIO('{"aliases": {}}')
aliases = make_aliases(f)
aliases.add_alias(Alias("lss", "ls -lhar --sort size"))
aliases.add_alias(Alias("lss", "ls -lha --sort size"))
script = aliases.get_sh_script()
self.assertEqual('alias lss="ls -lha --sort size"\n', script)
def test_backend(self):
f = io.StringIO()
backend = JSONBackend(f)
backend.write_aliases(make_fake_aliases())
def test_containsdoublequotes(self):
aliases = make_aliases()
aliases.add_alias(Alias('test', 'echo "This contains quotes"'))
result = aliases.get_sh_script()
expected = "alias test=\"echo \\\"This contains quotes\\\"\"\n"
self.assertEqual(expected, result)
def test_contains_brackets(self):
aliases = make_aliases()
aliases.add_alias(Alias('hasbrackets', 'echo (This is in brackets)'))
result = aliases.get_sh_script()
expected = 'alias hasbrackets="echo \\(This is in brackets\\)"\n'
self.assertEqual(expected, result)
|
Python
| 0
|
@@ -3018,16 +3018,324 @@
contains
+_singlequote(self):%0A aliases = make_aliases()%0A%0A aliases.add_alias(Alias('test', %22echo 'This contains quotes'%22))%0A result = aliases.get_sh_script()%0A%0A expected = %22%22%22alias test=%22echo %5C%5C%5C'This contains quotes%5C%5C%5C'%22%5Cn%22%22%22%0A self.assertEqual(expected, result)%0A%0A def test_contains_
doublequ
|
1b06091101c119f30eb5eabb2d2638fab0e8f658
|
Test modified to work with renamed debug function
|
test_debug.py
|
test_debug.py
|
from bullsandcows import isdebugmode
def test_isdebugmode():
assert isdebugmode() == 0, "program is in debug mode, this should not be commited"
|
Python
| 0
|
@@ -25,20 +25,16 @@
isdebug
-mode
%0D%0A%0D%0Adef
@@ -45,20 +45,16 @@
_isdebug
-mode
():%0D%0A
@@ -68,20 +68,16 @@
isdebug
-mode
() == 0,
|
f1c47f99255bc6ff2dc7819d72ceafbecaa328a4
|
Fix comment formatting
|
imapclient/test/util.py
|
imapclient/test/util.py
|
# Copyright (c) 2014, Menno Smits
# Released subject to the New BSD License
# Please see http://en.wikipedia.org/wiki/BSD_licenses
from __future__ import unicode_literals
def find_unittest2():
import unittest
if hasattr(unittest, 'skip') and hasattr(unittest, 'loader'):
return unittest # unittest from stdlib is unittest2, use that
try:
import unittest2 # try for a separately installed unittest2 package
except ImportError:
raise ImportError('unittest2 not installed and unittest in standard library is not unittest2')
else:
return unittest2
unittest = find_unittest2()
def patch_TestCase():
TestCase = unittest.TestCase
# Older versions of unittest2 don't have
# TestCase.assertRaisesRegex # and newer version raises warnings
# when you use # assertRaisesRegexp. This helps deal with the
# mismatch.
if not hasattr(TestCase, 'assertRaisesRegex'):
TestCase.assertRaisesRegex = TestCase.assertRaisesRegexp
patch_TestCase()
|
Python
| 0.000003
|
@@ -758,18 +758,16 @@
sesRegex
- #
and new
@@ -811,18 +811,16 @@
you use
- #
assertR
|
6e35f8778b3293a9b2ad60624637460146f755ba
|
add njobs parameters
|
implement/tunemodels.py
|
implement/tunemodels.py
|
import sys
import os
sys.path.insert(0, os.path.abspath('..'))
from implement.decisiontreemodel import DecisionTreeModel
from sklearn.grid_search import GridSearchCV, RandomizedSearchCV
from evaluation.sklearnmape import mean_absolute_percentage_error_scoring
import logging
from utility.logger_tool import Logger
from datetime import datetime
from knnmodel import KNNModel
from utility.duration import Duration
from svmregressionmodel import SVMRegressionModel
from randomforestmodel import RandomForestModel
import numpy as np
from gradientboostingmodel import GrientBoostingModel
class TuneModel:
def __init__(self):
self.application_start_time = datetime.now().strftime("%Y_%m_%d_%H_%M_%S")
logfile_name = r'logs/tunealgorithm_' +self.application_start_time + '.txt'
_=Logger(filename=logfile_name,filemode='w',level=logging.DEBUG)
self.durationtool = Duration()
self.do_random_gridsearch = True
self.n_iter_randomsearch = 5
return
def runGridSearch(self, model):
logging.debug("run grid search on model: {}".format(model.__class__.__name__))
logging.debug("cross validation strategy: {}".format(model.holdout_split))
logging.debug("used features: {}".format(model.usedFeatures))
logging.debug("tuned parameters: {}".format(model.getTunedParamterOptions()))
features,labels,cv = model.getFeaturesLabel()
# do grid search
if self.do_random_gridsearch:
estimator = RandomizedSearchCV(model.clf, model.getTunedParamterOptions(), cv=cv,
scoring=mean_absolute_percentage_error_scoring, verbose = 500, n_iter=self.n_iter_randomsearch)
else:
estimator = GridSearchCV(model.clf, model.getTunedParamterOptions(), cv=cv,
scoring=mean_absolute_percentage_error_scoring, verbose = 500)
estimator.fit(features, labels)
model.clf = estimator.best_estimator_
model.save_final_model = True
model.save_model()
# model.dispFeatureImportance()
logging.debug('estimaator parameters: {}'.format(estimator.get_params))
logging.debug('Best parameters: {}'.format(estimator.best_params_))
logging.debug('Best Scores: {}'.format(-estimator.best_score_))
logging.debug('Score grid: {}'.format(estimator.grid_scores_ ))
for i in estimator.grid_scores_ :
logging.debug('parameters: {}'.format(i.parameters ))
logging.debug('mean_validation_score: {}'.format(np.absolute(i.mean_validation_score)))
logging.debug('cv_validation_scores: {}'.format(np.absolute(i.cv_validation_scores) ))
return
def get_model(self, model_id):
model_dict = {}
model_dict[1] =DecisionTreeModel
model_dict[2] =KNNModel
model_dict[3] =SVMRegressionModel
model_dict[4] = RandomForestModel
model_dict[5] = GrientBoostingModel
return model_dict[model_id]()
def run(self):
model_id = 5
model = self.get_model(model_id)
model.application_start_time = self.application_start_time
self.durationtool.start()
self.runGridSearch(model)
self.durationtool.end()
return
if __name__ == "__main__":
obj= TuneModel()
obj.run()
|
Python
| 0.000001
|
@@ -975,17 +975,43 @@
earch =
-5
+260%0A self.n_jobs = 1
%0A
@@ -1591,32 +1591,52 @@
ptions(), cv=cv,
+ n_jobs=self.n_jobs,
%0A
@@ -1840,16 +1840,36 @@
, cv=cv,
+n_jobs=-self.n_jobs,
%0A
|
e1088b744b9b8040c3304f75980adb60ffb6d1c5
|
Fix python3 compatibility.
|
khal/calendar_display.py
|
khal/calendar_display.py
|
# vim: set ts=4 sw=4 expandtab sts=4 fileencoding=utf-8:
# Copyright (c) 2013-2015 Christian Geier et al.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from __future__ import print_function
import calendar
import datetime
from click import style
from terminal import urwid_to_click
from .compat import VERSION
def month_abbr(month_no):
"""calendar.month_abbr[] are str (text) in python3 and str (bytes) in
python2 """
if VERSION == 2:
# TODO check if how they are really encoded
return calendar.month_abbr[month_no].decode('utf-8')
elif VERSION == 3:
return calendar.month_abbr[month_no]
def getweeknumber(date):
"""return iso week number for datetime.date object
:param date: date
:type date: datetime.date()
:return: weeknumber
:rtype: int
"""
return datetime.date.isocalendar(date)[1]
def get_event_color(event, default_color):
"""Because multi-line lambdas would be un-Pythonic
"""
if event.color == '':
return default_color
return event.color
def str_highlight_day(day, devents, hmethod, default_color, multiple, color):
"""returns a string with day highlighted according to configuration
"""
dstr = str(day.day).rjust(2)
if color == '':
dcolors = list(set(map(lambda x: get_event_color(x, default_color), devents)))
if len(dcolors) > 1:
if multiple == '':
color1 = urwid_to_click(dcolors[0])
color2 = urwid_to_click(dcolors[1])
if hmethod == "foreground" or hmethod == "fg":
return style(dstr[:1], fg=color1) + style(dstr[1:], fg=color2)
else:
return style(dstr[:1], bg=color1) + style(dstr[1:], bg=color2)
else:
dcolor = urwid_to_click(multiple)
else:
if devents[0].color == '':
dcolorv = default_color
if dcolorv != '':
dcolor = urwid_to_click(dcolorv)
else:
dcolor = urwid_to_click(devents[0].color)
else:
dcolor = urwid_to_click(color)
if dcolor != '':
if hmethod == "foreground" or hmethod == "fg":
return style(dstr, fg=dcolor)
else:
return style(dstr, bg=dcolor)
return dstr
def str_week(week, today, collection=None,
hmethod=None, default_color=None, multiple=None, color=None,
highlight_event_days=0, locale=None):
"""returns a string representing one week,
if for day == today colour is reversed
:param week: list of 7 datetime.date objects (one week)
:type day: list()
:param today: the date of today
:type today: datetime.date
:return: string, which if printed on terminal appears to have length 20,
but may contain ascii escape sequences
:rtype: str
"""
strweek = ''
for day in week:
if day == today:
day = style(str(day.day).rjust(2), reverse=True)
elif highlight_event_days != 0:
localize = locale['local_timezone'].localize
start = localize(datetime.datetime.combine(day, datetime.time.min))
end = localize(datetime.datetime.combine(day, datetime.time.max))
devents = collection.get_datetime_by_time_range(start, end) + collection.get_allday_by_time_range(day)
if len(devents) > 0:
day = str_highlight_day(day, devents, hmethod, default_color,
multiple, color)
else:
day = str(day.day).rjust(2)
else:
day = str(day.day).rjust(2)
strweek = strweek + day + ' '
return strweek
def vertical_month(month=datetime.date.today().month,
year=datetime.date.today().year,
today=datetime.date.today(),
weeknumber=False,
count=3,
firstweekday=0,
collection=None,
hmethod='fg',
default_color='',
multiple='',
color='',
highlight_event_days=0,
locale=None):
"""
returns a list() of str() of weeks for a vertical arranged calendar
:param month: first month of the calendar,
if non given, current month is assumed
:type month: int
:param year: year of the first month included,
if non given, current year is assumed
:type year: int
:param today: day highlighted, if non is given, current date is assumed
:type today: datetime.date()
:param weeknumber: if not False the iso weeknumber will be shown for each
week, if weeknumber is 'right' it will be shown in its
own column, if it is 'left' it will be shown interleaved
with the month names
:type weeknumber: str/bool
:returns: calendar strings, may also include some
ANSI (color) escape strings
:rtype: list() of str()
"""
khal = list()
w_number = ' ' if weeknumber == 'right' else ''
calendar.setfirstweekday(firstweekday)
_calendar = calendar.Calendar(firstweekday)
khal.append(
style(' ' + calendar.weekheader(2) + ' ' + w_number, bold=True)
)
for _ in range(count):
for week in _calendar.monthdatescalendar(year, month):
new_month = len([day for day in week if day.day == 1])
strweek = str_week(week, today, collection, hmethod, default_color,
multiple, color, highlight_event_days, locale)
if new_month:
m_name = style(month_abbr(week[6].month).ljust(4), bold=True)
elif weeknumber == 'left':
m_name = \
style(' {:2} '.format(getweeknumber(week[0])), bold=True)
else:
m_name = ' '
if weeknumber == 'right':
w_number = \
style(' {}'.format(getweeknumber(week[0])), bold=True)
else:
w_number = ''
sweek = m_name + strweek + w_number
if sweek != khal[-1]:
khal.append(sweek)
month = month + 1
if month > 12:
month = 1
year = year + 1
return khal
|
Python
| 0
|
@@ -1260,16 +1260,17 @@
e%0A%0Afrom
+.
terminal
|
96261b3c277cb2f694fb5cc2f7cbe29847ff1a53
|
change the receiver
|
SyncEmailNotification.py
|
SyncEmailNotification.py
|
__author__ = 'chuqiao'
import smtplib
import base64
from email.MIMEMultipart import MIMEMultipart
from email.MIMEText import MIMEText
def viewlog(file):
file = open("syncsolr.log")
file.seek(0,2)# Go to the end of the file
while True:
line = file.readline()
if "***Finished synchronizing***" in line:
mailUpdate()
elif "***Synchronize failed***" in line:
mailAlert()
def mailUpdate():
fromaddr = 'bioeventsportal@gmail.com'
toaddr = 'info@bioevents-portal.org'
msg = MIMEMultipart()
msg['From'] = fromaddr
msg['To'] = toaddr
msg['Subject'] = "[Sync-reports] Synchronise two Solrs"
body = '''The IAnn Solr is now synchronised with the Bioevents Solr.
'''
msg.attach(MIMEText(body, 'plain'))
username = 'bioeventsportal'
password = base64.b64decode('YmlvZXZlbnRzMzIx')
server = smtplib.SMTP('smtp.gmail.com', 587)
server.ehlo()
server.starttls()
server.login(username, password)
text = msg.as_string()
server.sendmail(fromaddr, toaddr, text)
server.quit()
def mailAlert():
fromaddr = 'bioeventsportal@gmail.com'
toaddr = 'info@bioevents-portal.org'
msg = MIMEMultipart()
msg['From'] = fromaddr
msg['To'] = toaddr
msg['Subject'] = "[Sync-reports]Synchronise two Solrs failed"
body = '''The synchronisation of two Solrs failed.
'''
msg.attach(MIMEText(body, 'plain'))
username = 'bioeventsportal'
password = base64.b64decode('YmlvZXZlbnRzMzIx')
server = smtplib.SMTP('smtp.gmail.com', 587)
server.ehlo()
server.starttls()
server.login(username, password)
text = msg.as_string()
server.sendmail(fromaddr, toaddr, text)
server.quit()
if __name__ == '__main__':
viewlog(file)
|
Python
| 0.000017
|
@@ -514,35 +514,28 @@
fo@bioevents
--portal.org
+.pro
'%0A%0A msg =
@@ -1184,19 +1184,12 @@
ents
--portal.org
+.pro
'%0A%0A
|
474dfd3aa9d03ed6bbda47078523badcc7909664
|
Reorganize and refactor
|
scripts/observations/scrape/CalFloraScraper.py
|
scripts/observations/scrape/CalFloraScraper.py
|
from selenium import webdriver
import pandas as pd
import argparse
import PyFloraBook.web.communication as scraping
import PyFloraBook.input_output.data_coordinator as dc
# ---------------- INPUT ----------------
# Parse arguments
parser = argparse.ArgumentParser(
description='Scrape CalFlora for species counts for given family')
parser.add_argument("-f", "--families", nargs='+',
help="Names of the families to be analyzed.")
args = parser.parse_args()
families = args.families
# ---------------- SCRAPING ----------------
print("Opening browser...")
browser = webdriver.Firefox()
browser.set_window_size(500, 300)
browser.set_window_position(200, 200)
SITE_NAME = "CalFlora"
OUTPUT_PATH = dc.locate_raw_data_folder() / SITE_NAME
for family in families:
# Load the webpage
try:
browser.get(
"http://www.calflora.org/entry/wgh.html#srch=t&family="
+ family +
"&group=none&fmt=simple&y=39.493&x=-119.6979&z=5&rid=rs940")
except:
pass # lol
scraping.wait_for_load(browser, "CLASS_NAME", "familyColumn")
# Download the rows in the species data table
# Next we skip the first three rows because they contain nonsense
data_table = browser.find_element_by_id("resultSlot")
data_rows = data_table.find_elements_by_tag_name("tr")[3:]
# Extract the species counts
species_list = [
(row.find_element_by_class_name("column1Simple").text,
int(row.find_element_by_class_name("observColumn").text.split()[0]))
for row in data_rows
]
# ---------------- ANALYSIS ----------------
# Convert to friendly format for writing CSV
family_results_path = str(OUTPUT_PATH / (family + "_raw_data.csv"))
all_species = pd.DataFrame(species_list, columns=["full_name", "count"])
all_species.to_csv(
family_results_path,
columns=['full_name', 'count'], index=False
)
# For whatever reason, it won't load the next page unless I do this
browser.get("about:blank")
browser.quit()
|
Python
| 0.000002
|
@@ -168,16 +168,84 @@
as dc%0A%0A%0A
+# ---------------- GLOBALS ----------------%0ASITE_NAME = %22CalFlora%22%0A%0A
# ------
@@ -296,22 +296,22 @@
guments%0A
-parser
+PARSER
= argpa
@@ -403,16 +403,21 @@
ily'
-)%0Aparser
+%0A )%0APARSER
.add
@@ -426,16 +426,21 @@
rgument(
+%0A
%22-f%22, %22-
@@ -510,16 +510,21 @@
alyzed.%22
+%0A
)%0Aargs =
@@ -528,14 +528,14 @@
s =
-parser
+PARSER
.par
@@ -750,42 +750,19 @@
0)%0A%0A
-SITE_NAME = %22CalFlora%22%0AOUTPUT_PATH
+output_path
= d
@@ -1751,19 +1751,19 @@
str(
-OUTPUT_PATH
+output_path
/ (
|
1305af162dd05591cc0e5328eb192843b63dabb1
|
Use DefaultRouter instead of SimpleRouter
|
kk/urls_v1.py
|
kk/urls_v1.py
|
from django.conf.urls import include, url
from kk.views import (
HearingCommentViewSet, HearingImageViewSet, HearingViewSet, SectionCommentViewSet,
SectionViewSet, UserDataViewSet
)
from rest_framework_nested import routers
router = routers.SimpleRouter()
router.register(r'hearing', HearingViewSet)
router.register(r'users', UserDataViewSet, base_name='users')
hearing_comments_router = routers.NestedSimpleRouter(router, r'hearing', lookup='comment_parent')
hearing_comments_router.register(r'comments', HearingCommentViewSet, base_name='comments')
hearing_child_router = routers.NestedSimpleRouter(router, r'hearing', lookup='hearing')
hearing_child_router.register(r'sections', SectionViewSet, base_name='sections')
hearing_child_router.register(r'images', HearingImageViewSet, base_name='images')
section_comments_router = routers.NestedSimpleRouter(hearing_child_router, r'sections', lookup='comment_parent')
section_comments_router.register(r'comments', SectionCommentViewSet, base_name='comments')
urlpatterns = [
url(r'^', include(router.urls, namespace='v1')),
url(r'^', include(hearing_comments_router.urls, namespace='v1')),
url(r'^', include(hearing_child_router.urls, namespace='v1')),
url(r'^', include(section_comments_router.urls, namespace='v1')),
]
|
Python
| 0
|
@@ -243,22 +243,23 @@
routers.
-Simple
+Default
Router()
|
cd04513f6f0f5bb9f55e895aa852ffefefe1e47f
|
fix building by name of configuration; add some form of pretty-printing to the build by id command
|
pnc/builds.py
|
pnc/builds.py
|
from argh import arg
import client
from client.BuildconfigurationsApi import BuildconfigurationsApi
import utils
__author__ = 'thauser'
def _create_build_configuration(name, project_id, environment, description, scm_url, scm_revision, patches_url,
build_script):
created_build_configuration = client.models.Configuration.Configuration()
created_build_configuration.name = name
created_build_configuration.projectId = project_id
def _get_build_configuration_id_by_name(name):
"""
Returns the id of the build configuration matching name
:param name: name of build configuration
:return: id of the matching build configuration, or None if no match found
"""
response = client.BuildconfigurationsApi(utils.get_api_client()).getAll()
for config in response.json():
if config["name"] == name:
return config["id"]
return None
def _build_configuration_exists(search_id):
"""
Test if a build configuration matching search_id exists
:param search_id: id to test for
:return: True if a build configuration with search_id exists
"""
response = BuildconfigurationsApi(utils.get_api_client()).getSpecific(id=search_id)
if response.ok:
return True
return False
@arg("-n", "--name", help="Name of the build configuration to trigger")
@arg("-i", "--id", help="ID of the build configuration to trigger")
def build(name=None,id=None):
"Trigger a build configuration giving either the name or ID."
if id:
if (_build_configuration_exists(id)):
print(BuildconfigurationsApi(utils.get_api_client()).trigger(id=id))
else:
print "There is no build configuration with id {0}.".format(id)
elif name:
build_id = _get_build_configuration_id_by_name(name)
if build_id:
print(utils.pretty_format_response(BuildconfigurationsApi(utils.get_api_client()).trigger(id=build_id).json()))
else:
print "There is no build configuration with name {0}.".format(name)
else:
print "Build requires either a name or an ID of a build configuration to trigger."
def create_build_configuration(name, project_id, environment, description="", scm_url="", scm_revision="", patches_url="",
build_script=""):
#check for existing project_ids, fail out if the project id doesn't exist
build_configuration = _create_build_configuration(name, project_id, environment, description, scm_url, scm_revision, patches_url, build_script)
response = utils.pretty_format_response(BuildconfigurationsApi(utils.get_api_client()).createNew(body=build_configuration).json())
print(response)
def list_build_configurations():
"Get a JSON object containing existing build configurations"
response = BuildconfigurationsApi(utils.get_api_client()).getAll()
print(utils.pretty_format_response(response.json()))
|
Python
| 0
|
@@ -729,23 +729,16 @@
ponse =
-client.
Buildcon
@@ -1583,16 +1583,45 @@
print(
+utils.pretty_format_response(
Buildcon
@@ -1673,16 +1673,24 @@
r(id=id)
+.json())
)%0A
|
9f7fb674333d8b7f0846759df6b1ac89d99d2536
|
use openmm topology
|
examples/neq-switching/run_equilibrium_setup.py
|
examples/neq-switching/run_equilibrium_setup.py
|
import numpy as np
import os
import tqdm
from openeye import oechem, oeiupac
from openmmtools import integrators, states, mcmc, constants
from openmoltools import forcefield_generators
from perses.rjmc.topology_proposal import TopologyProposal, SystemGenerator
from perses.rjmc.geometry import FFAllAngleGeometryEngine
from perses.annihilation.ncmc_switching import NCMCEngine
from perses.tests.utils import extractPositionsFromOEMOL
from simtk import openmm, unit
from io import StringIO
from simtk.openmm import app
import copy
from perses.dispersed.feptasks import compute_reduced_potential
import mdtraj as md
temperature = 300.0*unit.kelvin
beta = 1.0 / (temperature*constants.kB)
def generate_complex_topologies_and_positions(ligand_filename, protein_pdb_filename):
ifs = oechem.oemolistream()
ifs.open(ligand_filename)
# get the list of molecules
mol_list = [oechem.OEMol(mol) for mol in ifs.GetOEMols()]
mol_dict = {oechem.OEMolToSmiles(mol) : mol for mol in mol_list}
ligand_topology_dict = {smiles : forcefield_generators.generateTopologyFromOEMol(mol) for smiles, mol in mol_dict.items()}
protein_pdbfile = open(protein_pdb_filename, 'r')
pdb_file = app.PDBFile(protein_pdbfile)
protein_pdbfile.close()
receptor_positions = pdb_file.positions
receptor_topology = pdb_file.topology
receptor_md_topology = md.Topology.from_openmm(receptor_topology)
n_receptor_atoms = receptor_md_topology.n_atoms
complex_topologies = {}
complex_positions_dict = {}
for smiles, ligand_topology in ligand_topology_dict.items():
ligand_md_topology = md.Topology.from_openmm(ligand_topology)
n_complex_atoms = ligand_md_topology.n_atoms + n_receptor_atoms
copy_receptor_md_topology = copy.deepcopy(receptor_md_topology)
complex_positions = unit.Quantity(np.zeros([n_complex_atoms, 3]), unit=unit.nanometers)
complex_topology = copy_receptor_md_topology.join(ligand_md_topology)
complex_topologies[smiles] = complex_topology
ligand_positions = extractPositionsFromOEMOL(mol_dict[smiles])
complex_positions[:n_receptor_atoms, :] = receptor_positions
complex_positions[n_receptor_atoms:, :] = ligand_positions
complex_positions_dict[smiles] = complex_positions
return complex_topologies, complex_positions_dict
def solvate_system(topology, positions, system_generator, padding=9.0 * unit.angstrom, num_added=None, water_model='tip3p'):
modeller = app.Modeller(topology, positions)
modeller.addSolvent(system_generator._forcefield, model=water_model, padding=padding, numAdded=num_added)
solvated_topology = modeller.topology
solvated_positions = modeller.positions
solvated_system = system_generator.build_system(solvated_topology)
return solvated_positions, solvated_topology, solvated_system
def create_solvated_complex_systems(protein_pdb_filename, ligand_filename, output_directory, project_prefix):
barostat = openmm.MonteCarloBarostat(1.0*unit.atmosphere, temperature, 50)
system_generator = SystemGenerator(['amber14/protein.ff14SB.xml', 'gaff.xml', 'amber14/tip3p.xml'], barostat=barostat, forcefield_kwargs={'nonbondedMethod': app.PME,
'constraints': app.HBonds,
'hydrogenMass': 4 * unit.amus})
complex_topologies, complex_positions = generate_complex_topologies_and_positions(ligand_filename, protein_pdb_filename)
list_of_smiles = list(complex_topologies.keys())
initial_smiles = list_of_smiles[0]
initial_topology = complex_topologies[initial_smiles]
initial_positions = complex_positions[initial_smiles]
solvated_initial_positions, solvated_topology, solvated_system = solvate_system(initial_topology, initial_positions, system_generator)
md_topology = md.Topology.from_openmm(solvated_topology)
num_added = md_topology.n_residues - initial_topology.n_residues
if not os.path.exists(output_directory):
os.mkdir(output_directory)
np.save("{}_{}_initial.npy".format(project_prefix, 0), (solvated_initial_positions, md_topology, solvated_system))
for i in tqdm.trange(1, len(list_of_smiles)):
smiles = list_of_smiles[i]
topology = complex_topologies[smiles]
positions = complex_positions[smiles]
solvated_positions, solvated_topology, solvated_system = solvate_system(topology, positions, system_generator, padding=None, num_added=num_added)
np.save("{}_{}_initial.npy".format(project_prefix, i),
(solvated_positions, md.Topology.from_openmm(solvated_topology), solvated_system))
if __name__=="__main__":
import sys
import yaml
yaml_filename = sys.argv[1]
with open(yaml_filename, "r") as yaml_file:
options = yaml.load(yaml_file)
setup_options = options['setup']
ligand_filename = setup_options['ligand_filename']
protein_pdb_filename = setup_options['protein_pdb_filename']
project_prefix = setup_options['project_prefix']
output_directory = setup_options['output_directory']
create_solvated_complex_systems(protein_pdb_filename, ligand_filename, output_directory, project_prefix)
|
Python
| 0
|
@@ -3872,16 +3872,28 @@
topology
+.to_openmm()
, initia
@@ -4517,32 +4517,44 @@
_system(topology
+.to_openmm()
, positions, sys
|
a0dcb73836222e3515c4af4cf4cfe2d41f470b9e
|
handle missing stash[benchstorage] (#3564)
|
tests/integration_tests/tests/benchmarks/conftest.py
|
tests/integration_tests/tests/benchmarks/conftest.py
|
import pytest
from datetime import datetime
def log_result(name, timing, start, stop):
if timing:
name = f'{name}.{timing}'
print(f'BENCH {name}: {stop - start}')
class _Timings(object):
def __init__(self, func_name):
self.records = {}
self._func_name = func_name
def start(self, name=None):
self.records[name] = [datetime.utcnow(), None]
def stop(self, name=None):
if name not in self.records:
raise RuntimeError(f'bench called stop without a start: {name}')
self.records[name][1] = datetime.utcnow()
log_result(self._func_name, name, *self.records[name])
@pytest.fixture()
def bench(request):
"""Give the tests a "bench" fixture for measuring time.
Tests can call `self.bench.start()` and `self.bench.stop()` (with an
optional name). Results will be printed out immediately, and also at the
end of the session (using the pytest_sessionx hooks).
"""
func_name = request.function.__name__
storage = request.session.stash['benchstorage']
timings = _Timings(func_name)
request.cls.bench = timings
storage[func_name] = timings
def pytest_sessionstart(session):
session.stash['benchstorage'] = {}
def pytest_sessionfinish(session, exitstatus):
print('\nBENCHMARK RESULTS')
for name, timings in session.stash['benchstorage'].items():
for timing, (start, stop) in timings.records.items():
log_result(name, timing, start, stop)
|
Python
| 0
|
@@ -1341,33 +1341,37 @@
in session.stash
-%5B
+.get(
'benchstorage'%5D.
@@ -1368,17 +1368,21 @@
storage'
-%5D
+, %7B%7D)
.items()
|
f14ffce0f11a34554e48bd14c17c296e346bc611
|
Fix press events not propagating in ripple behavior
|
kivymd/ripplebehavior.py
|
kivymd/ripplebehavior.py
|
# -*- coding: utf-8 -*-
from kivy.properties import ListProperty, NumericProperty, StringProperty, \
BooleanProperty
from kivy.animation import Animation
from kivy.graphics import Color, Ellipse, StencilPush, StencilPop, \
StencilUse, StencilUnUse, Rectangle
class CommonRipple(object):
ripple_rad = NumericProperty()
ripple_rad_default = NumericProperty(1)
ripple_post = ListProperty()
ripple_color = ListProperty()
ripple_alpha = NumericProperty(.5)
ripple_scale = NumericProperty(None)
ripple_duration_in_fast = NumericProperty(.3)
# FIXME: These speeds should be calculated based on widget size in dp
ripple_duration_in_slow = NumericProperty(2)
ripple_duration_out = NumericProperty(.5)
ripple_func_in = StringProperty('out_quad')
ripple_func_out = StringProperty('out_quad')
doing_ripple = BooleanProperty(False)
finishing_ripple = BooleanProperty(False)
fading_out = BooleanProperty(False)
def on_touch_down(self, touch):
if touch.is_mouse_scrolling:
return False
if not self.collide_point(touch.x, touch.y):
return False
if not self.disabled:
self.ripple_rad = self.ripple_rad_default
self.ripple_pos = (touch.x, touch.y)
Animation.cancel_all(self, 'ripple_rad', 'ripple_color',
'rect_color')
if self.ripple_color != []:
pass
elif hasattr(self, 'theme_cls'):
self.ripple_color = self.theme_cls.ripple_color
else:
# If no theme, set Grey 300
self.ripple_color = [0.8784313725490196, 0.8784313725490196,
0.8784313725490196, self.ripple_alpha]
self.ripple_color[3] = self.ripple_alpha
self.lay_canvas_instructions()
self.finish_rad = max(self.width, self.height) * self.ripple_scale
self.start_ripple()
def lay_canvas_instructions(self):
raise NotImplementedError
def on_touch_move(self, touch, *args):
if not self.collide_point(touch.x, touch.y):
if not self.finishing_ripple and self.doing_ripple:
self.finish_ripple()
return super(CommonRipple, self).on_touch_move(touch, *args)
def on_touch_up(self, touch):
if self.collide_point(touch.x, touch.y) and self.doing_ripple:
self.finish_ripple()
return super(CommonRipple, self).on_touch_up(touch)
def start_ripple(self):
if not self.doing_ripple:
anim = Animation(
ripple_rad=self.finish_rad,
t='linear',
duration=self.ripple_duration_in_slow)
anim.bind(on_complete=lambda x, y: self.fade_out())
self.doing_ripple = True
anim.start(self)
def _set_ellipse(self, instance, value):
self.ellipse.size = (self.ripple_rad, self.ripple_rad)
# Adjust ellipse pos here
def _set_color(self, instance, value):
self.col_instruction.a = value[3]
def finish_ripple(self):
self.finishing_ripple = True
if self.doing_ripple:
Animation.cancel_all(self, 'ripple_rad')
anim = Animation(ripple_rad=self.finish_rad,
t=self.ripple_func_in,
duration=self.ripple_duration_in_fast)
anim.bind(on_complete=self.anim_complete)
self.fade_out()
anim.start(self)
def fade_out(self):
self.finishing_ripple = True
rc = self.ripple_color
if self.doing_ripple and not self.fading_out:
Animation.cancel_all(self, 'ripple_color')
anim = Animation(ripple_color=[rc[0], rc[1], rc[2], 0.],
t=self.ripple_func_out,
duration=self.ripple_duration_out)
anim.bind(on_complete=self.anim_complete)
self.fading_out = True
anim.start(self)
def anim_complete(self, anim, *args):
self.doing_ripple = False
self.finishing_ripple = False
self.fading_out = False
anim.cancel_all(self)
self.canvas.after.clear()
class RectangularRippleBehavior(CommonRipple):
ripple_scale = NumericProperty(2.75)
def lay_canvas_instructions(self):
with self.canvas.after:
StencilPush()
Rectangle(pos=self.pos, size=self.size)
StencilUse()
self.col_instruction = Color(rgba=self.ripple_color)
self.ellipse = \
Ellipse(size=(self.ripple_rad, self.ripple_rad),
pos=(self.ripple_pos[0] - self.ripple_rad / 2.,
self.ripple_pos[1] - self.ripple_rad / 2.))
StencilUnUse()
Rectangle(pos=self.pos, size=self.size)
StencilPop()
self.bind(ripple_color=self._set_color,
ripple_rad=self._set_ellipse)
def _set_ellipse(self, instance, value):
super(RectangularRippleBehavior, self)._set_ellipse(instance, value)
self.ellipse.pos = (self.ripple_pos[0] - self.ripple_rad / 2.,
self.ripple_pos[1] - self.ripple_rad / 2.)
class CircularRippleBehavior(CommonRipple):
ripple_scale = NumericProperty(1)
def lay_canvas_instructions(self):
with self.canvas.after:
StencilPush()
Ellipse(size=(self.width * self.ripple_scale,
self.height * self.ripple_scale),
pos=(self.center_x - (self.width * self.ripple_scale)/2,
self.center_y - (self.height * self.ripple_scale)/2))
StencilUse()
self.col_instruction = Color(rgba=self.ripple_color)
self.ellipse = Ellipse(size=(self.ripple_rad, self.ripple_rad),
pos=(self.center_x - self.ripple_rad / 2.,
self.center_y - self.ripple_rad / 2.))
StencilUnUse()
Ellipse(pos=self.pos, size=self.size)
StencilPop()
self.bind(ripple_color=self._set_color,
ripple_rad=self._set_ellipse)
def _set_ellipse(self, instance, value):
super(CircularRippleBehavior, self)._set_ellipse(instance, value)
self.ellipse.pos = (self.center_x - self.ripple_rad / 2.,
self.center_y - self.ripple_rad / 2.)
|
Python
| 0.000024
|
@@ -1732,16 +1732,72 @@
ripple()
+%0A%09%09return super(CommonRipple, self).on_touch_down(touch)
%0A%0A%09def l
@@ -5617,28 +5617,29 @@
er_y - self.ripple_rad / 2.)
+%0A
|
ec2fa6c99e59e80348b6f91f0f7d081df2e99878
|
Update api url.
|
latubot/source/kunto.py
|
latubot/source/kunto.py
|
"""Fetch and parse data from fluentprogress (previously kunto) servers."""
import logging
import json
import requests
import dateutil.parser
from latubot import time_utils
logger = logging.getLogger(__name__)
# all areas with kunto service
ALL_AREAS = (
"HAMEENLINNA",
"HYRYNSALMIPUOLANKA",
"HYVINKAA",
"IISALMI",
"KAJAANI",
"KEMI",
"KIRKKONUMMI",
"KOLI",
"KOUVOLA",
"KUHMO",
"KUOPIO",
"KUUSAMO",
"MANTSALA",
"MIKKELI",
"NIVALA",
"OULU",
"PIEKSAMAKI",
"RAASEPORI",
"SUOMUSSALMI",
"SOTKAMOVUOKATTI",
"SYOTE",
"TORNIO",
"VARKAUS",
"YLIVIESKA",
)
ALL_SPORTS = ("latu", "luistelu")
_DEFAULT_AREA = "OULU"
_DEFAULT_SPORT = "latu"
_URL_TEMPLATE = "https://{area}.fluentprogress.fi/outdoors/"
def load(sport: str = _DEFAULT_SPORT, area: str = _DEFAULT_AREA, fn=None):
"""Load data for (sport, area) combo."""
if sport not in ALL_SPORTS:
raise ValueError(f"invalid sport {sport!r}")
if area not in ALL_AREAS:
raise ValueError(f"invalid area {area!r}")
if fn:
logger.debug(f"Load updates from {fn}")
raw = open(fn).read()
else:
raw = _load_raw_data(area)
updates = _parse(raw, sport)
_log_updates(updates)
return updates
def _load_raw_data(area):
"""Load raw data from kunto server."""
base_url = _URL_TEMPLATE.format(area=area.lower())
url = base_url + "api/venues"
resp = requests.get(url)
resp.encoding = "utf-8"
return resp.text
def _parse(txt, sport):
"""Parse server response for updates on sport.
txt: {
"type": "FeatureCollection",
"features": [
{
"id": <int>,
"type": <str>, ("skitrack", "skatefield", ...)
"group": <str>, ("Oulunsalo", "Kempele", ...)
"name": <str>, ("Kirkonkylän kenttä", ...)
"description": <str>, (free text)
"status": <str>, ("CLOSED", "OPEN", "BAD CONDITION"...) (check spelling)
"images": [<int>], (?)
"maintainedAt": <str> ("2020-03-20T06:50:54.031+02:00")
},
...
}
}
"""
sport_map = {"latu": "skitrack", "luistelu": "skatefield"}
d = json.loads(txt)
updates = (f["properties"] for f in d["features"])
sport_updates = [v for v in updates if v["type"] == sport_map[sport]]
for v in sport_updates:
v["date"] = _parse_maintained_at(v.pop("maintainedAt", None))
return sport_updates
def _parse_maintained_at(v):
"""Parse maintainedAt value from an update."""
try:
return dateutil.parser.isoparse(v)
except Exception as e:
if v:
logger.error(f"Can't parse date from {v!r} ({e})")
return None
def _log_updates(updates):
"""Log updates."""
n = len(updates)
n_with_date = sum(1 for v in updates if v["date"])
logger.info(f"Loaded {n} items ({n_with_date} w/ date)")
if __name__ == "__main__":
import sys
logging.basicConfig(level=logging.DEBUG)
fn = sys.argv[1] if len(sys.argv) > 1 else None
d1 = load(sport="latu", fn=fn)
print(json.dumps(d1, cls=time_utils.DateTimeEncoder, indent=2))
|
Python
| 0
|
@@ -1444,17 +1444,21 @@
pi/venue
-s
+/list
%22%0A re
|
ab989d8ddf24b86a2ac394b97018c242877ec1ed
|
Allow config parameter fallback to config file
|
bumblebee/engine.py
|
bumblebee/engine.py
|
"""Core application engine"""
import os
import time
import pkgutil
import importlib
import bumblebee.error
import bumblebee.modules
def all_modules():
"""Return a list of available modules"""
result = []
path = os.path.dirname(bumblebee.modules.__file__)
for mod in [name for _, name, _ in pkgutil.iter_modules([path])]:
result.append({
"name": mod
})
return result
class Module(object):
"""Module instance base class
Objects of this type represent the modules that
the user configures. Concrete module implementations
(e.g. CPU utilization, disk usage, etc.) derive from
this base class.
"""
def __init__(self, engine, config={}, widgets=[]):
self.name = config.get("name", self.__module__.split(".")[-1])
self._config = config
self.id = self.name
self._widgets = []
if widgets:
self._widgets = widgets if isinstance(widgets, list) else [widgets]
def widgets(self):
"""Return the widgets to draw for this module"""
return self._widgets
def widget(self, name):
for widget in self._widgets:
if widget.name == name:
return widget
def widget_by_id(self, uid):
for widget in self._widgets:
if widget.id == uid:
return widget
return None
def update(self, widgets):
"""By default, update() is a NOP"""
pass
def update_all(self):
self.update(self._widgets)
def parameter(self, name, default=None):
"""Return the config parameter 'name' for this module"""
name = "{}.{}".format(self.name, name)
return self._config["config"].get(name, default)
def threshold_state(self, value, warn, crit):
if value > float(self.parameter("critical", crit)):
return "critical"
if value > float(self.parameter("warning", warn)):
return "warning"
return None
class Engine(object):
"""Engine for driving the application
This class connects input/output, instantiates all
required modules and drives the "event loop"
"""
def __init__(self, config, output=None, inp=None):
self._output = output
self._config = config
self._running = True
self._modules = []
self.input = inp
self._aliases = self._read_aliases()
self.load_modules(config.modules())
self._current_module = None
self.input.register_callback(None, bumblebee.input.WHEEL_UP,
"i3-msg workspace prev_on_output")
self.input.register_callback(None, bumblebee.input.WHEEL_DOWN,
"i3-msg workspace next_on_output")
self.input.start()
def modules(self):
return self._modules
def load_modules(self, modules):
"""Load specified modules and return them as list"""
for module in modules:
mod = self._load_module(module["module"], module["name"])
self._modules.append(mod)
self._register_module_callbacks(mod)
return self._modules
def _register_module_callbacks(self, module):
buttons = [
{ "name": "left-click", "id": bumblebee.input.LEFT_MOUSE },
{ "name": "middle-click", "id": bumblebee.input.MIDDLE_MOUSE },
{ "name": "right-click", "id": bumblebee.input.RIGHT_MOUSE },
{ "name": "wheel-up", "id": bumblebee.input.WHEEL_UP },
{ "name": "wheel-down", "id": bumblebee.input.WHEEL_DOWN },
]
for button in buttons:
if module.parameter(button["name"], None):
self.input.register_callback(obj=module,
button=button["id"], cmd=module.parameter(button["name"]))
def _read_aliases(self):
result = {}
for module in all_modules():
mod = importlib.import_module("bumblebee.modules.{}".format(module["name"]))
for alias in getattr(mod, "ALIASES", []):
result[alias] = module["name"]
return result
def _load_module(self, module_name, config_name=None):
"""Load specified module and return it as object"""
if module_name in self._aliases:
config_name is config_name if config_name else module_name
module_name = self._aliases[module_name]
if config_name is None:
config_name = module_name
try:
module = importlib.import_module("bumblebee.modules.{}".format(module_name))
except ImportError as error:
raise bumblebee.error.ModuleLoadError(error)
return getattr(module, "Module")(self, {
"name": config_name,
"config": self._config
})
def running(self):
"""Check whether the event loop is running"""
return self._running
def stop(self):
"""Stop the event loop"""
self._running = False
def current_module(self):
return self._current_module.__module__
def run(self):
"""Start the event loop"""
self._output.start()
while self.running():
self._output.begin()
for module in self._modules:
self._current_module = module
module.update(module.widgets())
for widget in module.widgets():
widget.link_module(module)
self._output.draw(widget=widget, module=module, engine=self)
self._output.flush()
self._output.end()
if self.running():
self.input.wait(float(self._config.get("interval", 1)))
self._output.stop()
self.input.stop()
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
|
Python
| 0.000294
|
@@ -127,16 +127,134 @@
odules%0A%0A
+try:%0A from ConfigParser import SafeConfigParser%0Aexcept ImportError:%0A from configparser import SafeConfigParser%0A%0A
def all_
@@ -960,24 +960,339 @@
= self.name%0A
+%0A self._configFile = None%0A for cfg in %5B os.path.expanduser(%22~/.bumblebee-status.conf%22), os.path.expanduser(%22~/.config/bumblebee-status.conf%22) %5D:%0A if os.path.exists(cfg):%0A self._configFile = SafeConfigParser()%0A self._configFile.read(cfg)%0A break%0A%0A
self
@@ -2111,38 +2111,39 @@
, name)%0A
-return
+value =
self._config%5B%22c
@@ -2168,16 +2168,196 @@
default)
+%0A if value == default:%0A try:%0A value = self._configFile.get(%22module-parameters%22, name)%0A except:%0A pass%0A return value
%0A%0A de
|
0902b33e3baebfc1e48c321c4a47216addecdee1
|
Use "NOTE(username):" instead of "XXX:"
|
tests/onnx_chainer_tests/functions_tests/test_rnn.py
|
tests/onnx_chainer_tests/functions_tests/test_rnn.py
|
import chainer
import chainer.functions as F
import chainer.links as L
from chainer import testing
import numpy as np
from onnx_chainer import onnx_helper
from onnx_chainer.testing import input_generator
from onnx_chainer_tests.helper import ONNXModelTest
@testing.parameterize(
{'n_layers': 1, 'name': 'n_step_gru_1_layer'},
{'n_layers': 2, 'name': 'n_step_gru_2_layer'},
)
class TestNStepGRU(ONNXModelTest):
def test_output(self):
n_layers = self.n_layers
dropout_ratio = 0.0
batch_size = 3
input_size = 4
hidden_size = 5
seq_length = 6
class Model(chainer.Chain):
def __init__(self):
super().__init__()
def __call__(self, hx, ws1, ws2, ws3, bs, xs):
ws = [F.separate(ws1) + F.separate(ws2)]
if n_layers > 1:
ws.extend([F.separate(w) for w in F.separate(ws3)])
bs = [F.separate(b) for b in F.separate(bs)]
xs = F.separate(xs)
hy, ys = F.n_step_gru(n_layers, dropout_ratio,
hx, ws, bs, xs)
return hy, F.stack(ys, axis=0)
model = Model()
hx = input_generator.increasing(n_layers, batch_size, hidden_size)
ws1 = input_generator.increasing(3, hidden_size, input_size)
ws2 = input_generator.increasing(3, hidden_size, hidden_size)
ws3 = input_generator.increasing(
n_layers - 1, 6, hidden_size, hidden_size)
bs = input_generator.increasing(n_layers, 6, hidden_size)
xs = input_generator.increasing(seq_length, batch_size, input_size)
self.expect(model, (hx, ws1, ws2, ws3, bs, xs))
def convert_Permutate(params):
gb = onnx_helper.GraphBuilder()
# indices_name = params.context.get_name(func.indices)
indices_name = params.context.add_const(params.func.indices,
'indices') # XXX
if params.func.inv:
empty = params.context.add_const(
np.zeros(dtype=np.int64, shape=params.func.indices.shape), 'empty')
r = params.context.add_const(
np.arange(len(params.func.indices), dtype=np.int64),
'range')
op = 'ScatterElements' if params.opset_version == 11 else 'Scatter'
indices_name = gb.op(op, [empty, indices_name, r])
params.input_names.append(indices_name)
gb.op_output_named('Gather', params.input_names, params.output_names,
axis=params.func.axis)
return gb.nodes()
@testing.parameterize(
{'n_layers': 1, 'name': 'TestNStepGRU_1_layer'},
{'n_layers': 2, 'name': 'TestNStepGRU_2_layer'},
)
class TestNStepGRULink(ONNXModelTest):
def test_output(self):
n_layers = self.n_layers
dropout_ratio = 0.0
batch_size = 3
input_size = 4
hidden_size = 5
seq_length = 6
class Model(chainer.Chain):
def __init__(self):
super().__init__()
with self.init_scope():
self.gru = L.NStepGRU(
n_layers, input_size, hidden_size, dropout_ratio)
def __call__(self, *xs):
hy, ys = self.gru(None, xs)
return [hy] + ys
model = Model()
xs = [input_generator.increasing(seq_length, input_size)
for i in range(batch_size)]
# XXX: Replace Permutate converter for avoiding error like:
# ValidationError: Nodes in a graph must be topologically sorted, \
# however input 'v330' of node:
# input: "Permutate_0_const_empty" input: "v330" \
# input: "Permutate_0_const_range" output: "Permutate_0_tmp_0" \
# name: "Permutate_0_tmp_0" op_type: "Scatter"
# is not output of any previous nodes.
addon_converters = {
'Permutate': convert_Permutate,
}
self.expect(model, xs, skip_opset_version=[7, 8],
external_converters=addon_converters)
|
Python
| 0.000651
|
@@ -3436,19 +3436,28 @@
#
-XXX
+NOTE(msakai)
: Replac
|
f32aa62b097ae603bf26e62a1abc5867f31faaed
|
Add run_table parser
|
km3pipe/db.py
|
km3pipe/db.py
|
# coding=utf-8
# Filename: db.py
# pylint: disable=locally-disabled
"""
Database utilities.
"""
from __future__ import division, absolute_import, print_function
from datetime import datetime
import ssl
import urllib
from urllib2 import (Request, build_opener, HTTPCookieProcessor, HTTPHandler)
import cookielib
import json
import sys
import pandas as pd
from km3pipe.tools import Timer
from km3pipe.config import Config
from km3pipe.logger import logging
if sys.version_info[0] < 3:
from StringIO import StringIO
else:
from io import StringIO
__author__ = 'tamasgal'
log = logging.getLogger(__name__) # pylint: disable=C0103
# Ignore invalid certificate error
try:
ssl._create_default_https_context = ssl._create_unverified_context
except AttributeError:
log.warn("Your SSL support is outdate. "
"Please update your Python installation!")
LOGIN_URL = 'https://km3netdbweb.in2p3.fr/home.htm'
BASE_URL = 'https://km3netdbweb.in2p3.fr'
class DBManager(object):
"""A wrapper for the KM3NeT Web DB"""
def __init__(self, username=None, password=None):
"Create database connection"
self.cookies = []
self._parameters = None
self._opener = None
if username is None:
config = Config()
username, password = config.db_credentials
self.login(username, password)
def datalog(self, parameter, run, maxrun=None, detid='D_ARCA001'):
"Retrieve datalogs for given parameter, run(s) and detector"
parameter = parameter.lower()
if maxrun is None:
maxrun = run
with Timer('Database lookup'):
return self._datalog(parameter, run, maxrun, detid)
def _datalog(self, parameter, run, maxrun, detid):
"Extract data from database"
values = {'parameter_name': parameter,
'minrun': run,
'maxrun': maxrun,
'detid': detid,
}
data = urllib.urlencode(values)
content = self._get_content('streamds/datalognumbers.txt?' + data)
if content.startswith('ERROR'):
log.error(content)
return None
try:
dataframe = pd.read_csv(StringIO(content), sep="\t")
except ValueError:
log.warning("Empty dataset")
return None
else:
def convert_data(timestamp):
return datetime.fromtimestamp(float(timestamp) / 1e3)
dataframe['DATETIME'] = dataframe['UNIXTIME'].apply(convert_data)
convert_unit = self.parameters.get_converter(parameter)
dataframe['VALUE'] = dataframe['DATA_VALUE'].apply(convert_unit)
dataframe.unit = self.parameters.unit(parameter)
return dataframe
@property
def parameters(self):
"Return the parameters container for quick access to their details"
if self._parameters is None:
self._load_parameters()
return self._parameters
def _load_parameters(self):
"Retrieve a list of available parameters from the database"
parameters = self._get_json('allparam/s')
if parameters['Result'] != 'OK':
raise ValueError('Error while retrieving the parameter list.')
data = {}
for parameter in parameters['Data']:
data[parameter['Name'].lower()] = parameter
self._parameters = ParametersContainer(data)
def _get_json(self, url):
"Get JSON-type content"
content = self._get_content('jsonds/' + url)
return json.loads(content)
def _get_content(self, url):
"Get HTML content"
f = self.opener.open(BASE_URL + '/' + url)
content = f.read()
return content
@property
def opener(self):
"A reusable connection manager"
if self._opener is None:
opener = build_opener()
for cookie in self.cookies:
cookie_str = cookie.name + '=' + cookie.value
opener.addheaders.append(('Cookie', cookie_str))
self._opener = opener
return self._opener
def login(self, username, password):
"Login to the databse and store cookies for upcoming requests."
cj = cookielib.CookieJar()
opener = build_opener(HTTPCookieProcessor(cj), HTTPHandler())
values = {'usr': username, 'pwd': password}
data = urllib.urlencode(values)
req = Request(LOGIN_URL, data)
f = opener.open(req)
html = f.read()
if 'Bad username or password' in html:
log.error("Bad username or password!")
self.cookies = cj
class ParametersContainer(object):
"""Provides easy access to parameters"""
def __init__(self, parameters):
self._parameters = parameters
self._converters = {}
@property
def names(self):
"A list of parameter names"
return self._parameters.keys()
def get_parameter(self, parameter):
"Return a dict of given parameter"
return self._parameters[parameter]
def get_converter(self, parameter):
"""Generate unit conversion function for given parameter"""
if parameter not in self._converters:
param = self.get_parameter(parameter)
try:
scale = float(param['Scale'])
except KeyError:
scale = 1
def convert(value):
# easy_scale = float(param['EasyScale'])
# easy_scale_multiplier = float(param['EasyScaleMultiplier'])
return value * scale
return convert
def unit(self, parameter):
"Get the unit for given parameter"
return self._parameters[parameter.lower()]['Unit']
|
Python
| 0.000007
|
@@ -2781,16 +2781,374 @@
aframe%0A%0A
+ def run_table(self, detid='D_ARCA001'):%0A url = 'streamds/runs.txt?detid=%7B0%7D'.format(detid) %0A content = self._get_content(url)%0A try:%0A dataframe = pd.read_csv(StringIO(content), sep=%22%5Ct%22)%0A except ValueError:%0A log.warning(%22Empty dataset%22)%0A return None%0A else:%0A return dataframe%0A%0A
@pro
|
57adb8240cf0015e1e10f2e9fd4f090a8d896a27
|
Revert "[examples...bindings_generator] began to update"
|
examples/stationarylinear_bindings_generator.py
|
examples/stationarylinear_bindings_generator.py
|
#! /usr/bin/env python
# This file is part of the dune-pymor project:
# https://github.com/pymor/dune-pymor
# Copyright Holders: Felix Albrecht, Stephan Rave
# License: BSD 2-Clause License (http://opensource.org/licenses/BSD-2-Clause)
import sys
from pybindgen import param, retval
from dune.pymor.core import prepare_python_bindings, inject_lib_dune_pymor, finalize_python_bindings
#from dune.pymor.discretizations import inject_StationaryDiscretizationImplementation
def inject_Example(module):
'''injects the user code into the module'''
namespace = module.add_cpp_namespace('Example')
AnalyticalProblem = namespace.add_class('AnalyticalProblem')
AnalyticalProblem.add_constructor([])
AnalyticalProblem.add_constructor([param('const int', 'dd')])
if __name__ == '__main__':
# prepare the module
module, pybindgen_filename = prepare_python_bindings(sys.argv[1:])
# add all of libdunepymor
module, exceptions, interfaces, CONFIG_H = inject_lib_dune_pymor(module)
# # add example user code
# inject_Example(module)
# # add the users discretization
# discretization = inject_StationaryDiscretizationImplementation(
# module, exceptions, interfaces, CONFIG_H,
# 'Example::SimpleDiscretization',
# Traits={'VectorType': 'Dune::Pymor::LA::DuneDynamicVector< double >',
# 'OperatorType': 'Dune::Pymor::Operators::LinearAffinelyDecomposedContainerBased< Dune::Pymor::Operators::DuneDynamic< double > >',
# 'FunctionalType': 'Dune::Pymor::Functionals::LinearAffinelyDecomposedVectorBased< Dune::Pymor::LA::DuneDynamicVector< double > >',
# 'ProductType': 'Dune::Pymor::Operators::LinearAffinelyDecomposedContainerBased< Dune::Pymor::Operators::DuneDynamic< double > >'})
# # and add the custom constructor to the discretization
# discretization.add_constructor([param('const Example::AnalyticalProblem *', 'prob', transfer_ownership=True)])
# and finally write the pybindgen .cc file
finalize_python_bindings(module, pybindgen_filename)
|
Python
| 0
|
@@ -381,17 +381,16 @@
indings%0A
-#
from dun
@@ -466,16 +466,16 @@
ntation%0A
+
%0A%0Adef in
@@ -993,33 +993,32 @@
e_pymor(module)%0A
-#
# add exampl
@@ -1029,17 +1029,16 @@
er code%0A
-#
inje
@@ -1056,17 +1056,16 @@
module)%0A
-#
# ad
@@ -1083,33 +1083,32 @@
discretization%0A
-#
discretizati
@@ -1159,17 +1159,16 @@
tation(%0A
-#
@@ -1209,17 +1209,16 @@
NFIG_H,%0A
-#
@@ -1250,17 +1250,16 @@
ation',%0A
-#
@@ -1324,25 +1324,24 @@
double %3E',%0A
-#
@@ -1467,33 +1467,32 @@
c%3C double %3E %3E',%0A
-#
@@ -1622,17 +1622,16 @@
e %3E %3E',%0A
-#
@@ -1769,17 +1769,16 @@
%3E %3E'%7D)%0A
-#
# an
@@ -1828,17 +1828,16 @@
ization%0A
-#
disc
|
e6cef6d96a3c2bd6dd07f580f4a704734133d316
|
Bump version to 0.3c2
|
lava_server/__init__.py
|
lava_server/__init__.py
|
# Copyright (C) 2010, 2011 Linaro Limited
#
# Author: Zygmunt Krynicki <zygmunt.krynicki@linaro.org>
#
# This file is part of LAVA Server.
#
# LAVA Server is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License version 3
# as published by the Free Software Foundation
#
# LAVA Server is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with LAVA Server. If not, see <http://www.gnu.org/licenses/>.
__version__ = (0, 3, 0, "candidate", 1)
|
Python
| 0.000001
|
@@ -753,11 +753,11 @@
idate%22,
-1
+2
)%0A
|
40b1b89485216e66d0e422bd1df73180c00072cf
|
update doc string on alarm-request rest api
|
newfies/apirest/alarm_request_serializers.py
|
newfies/apirest/alarm_request_serializers.py
|
# -*- coding: utf-8 -*-
#
# Newfies-Dialer License
# http://www.newfies-dialer.org
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Copyright (C) 2011-2013 Star2Billing S.L.
#
# The Initial Developer of the Original Code is
# Arezqui Belaid <info@star2billing.com>
#
from rest_framework import serializers
from appointment.models.alarms import Alarm, AlarmRequest
from dialer_cdr.models import Callrequest
from appointment.function_def import get_calendar_user_id_list
class AlarmRequestSerializer(serializers.HyperlinkedModelSerializer):
"""
**Create**:
CURL Usage::
curl -u username:password --dump-header - -H "Content-Type:application/json" -X POST --data '{"alarm": "http://localhost:8000/rest-api/alarm/1/", "date": "2013-12-12 12:45:33", "status": "1", "callrequest": "http://localhost:8000/rest-api/callrequest/1/"}' http://localhost:8000/rest-api/alarm-request/
Response::
HTTP/1.0 201 CREATED
Date: Fri, 14 Jun 2013 09:52:27 GMT
Server: WSGIServer/0.1 Python/2.7.3
Vary: Accept, Accept-Language, Cookie
Content-Type: application/json; charset=utf-8
Content-Language: en-us
Allow: GET, POST, HEAD, OPTIONS
**Read**:
CURL Usage::
curl -u username:password -H 'Accept: application/json' http://localhost:8000/rest-api/alarm-request/
curl -u username:password -H 'Accept: application/json' http://localhost:8000/rest-api/alarm-request/%alarm-request-id%/
Response::
{
"count": 1,
"next": null,
"previous": null,
"results": [
{
"alarm": "4",
"url": "http://127.0.0.1:8000/rest-api/alarm-request/2/",
"date": "2013-11-05T06:30:00Z",
"status": 1,
"callstatus": 0,
"calltime": "2013-11-05T06:30:00Z",
"duration": 0,
"callrequest": null,
"created_date": "2013-11-05T06:46:18.635Z"
}
]
}
**Update**:
CURL Usage::
curl -u username:password --dump-header - -H "Content-Type: application/json" -X PATCH --data '{"alarm": "http://localhost:8000/rest-api/alarm/1/", "date": "2013-12-12 12:45:33", "status": "1", "callrequest": "http://localhost:8000/rest-api/callrequest/1/"}' http://localhost:8000/rest-api/alarm-request/%alarm-request-id%/
Response::
HTTP/1.0 200 NO CONTENT
Date: Fri, 23 Sep 2011 06:46:12 GMT
Server: WSGIServer/0.1 Python/2.7.1+
Vary: Accept-Language, Cookie
Content-Length: 0
Content-Type: text/html; charset=utf-8
Content-Language: en-us
**Delete**:
CURL Usage::
curl -u username:password --dump-header - -H "Content-Type: application/json" -X DELETE http://localhost:8000/rest-api/alarm-request/%alarm-request-id%/
"""
class Meta:
model = AlarmRequest
def get_fields(self, *args, **kwargs):
"""filter content_type field"""
fields = super(AlarmRequestSerializer, self).get_fields(*args, **kwargs)
request = self.context['request']
calendar_user_list = get_calendar_user_id_list(request.user)
fields['alarm'].queryset = Alarm.objects.filter(event__creator_id__in=calendar_user_list)
fields['callrequest'].queryset = Callrequest.objects.filter(campaign__user=request.user)
return fields
|
Python
| 0
|
@@ -3270,16 +3270,682 @@
st-id%25/%0A
+%0A%0A **get_nested_alarm_request**:%0A%0A CURL Usage::%0A%0A curl -u username:password -H 'Accept: application/json' http://localhost:8000/rest-api/alarm-request/%25alarm-request-id%25/get_nested_alarm_request/%0A%0A Response::%0A%0A %7B%0A %22url%22: %22http://localhost:8000/rest-api/alarm-request/1/%22, %0A %22callrequest%22: %22http://localhost:8000/rest-api/callrequest/100/%22, %0A %22alarm%22: %7B%0A %22url%22: %22http://localhost:8000/rest-api/alarm/2/%22, %0A %22event%22: %7B%0A %22url%22: %22http://localhost:8000/rest-api/event/2/%22%0A %7D%0A %7D%0A %7D%0A
%22%22%22%0A
|
c6878b3306290760231ca24893418e0c87eb1f8a
|
Make directory in selinux tests
|
tests/pytests/unit/modules/file/test_file_selinux.py
|
tests/pytests/unit/modules/file/test_file_selinux.py
|
import logging
import os
import shutil
import pytest
import salt.config
import salt.loader
import salt.modules.cmdmod as cmdmod
import salt.modules.file as filemod
import salt.utils.data
import salt.utils.files
import salt.utils.platform
import salt.utils.stringutils
from tests.support.mock import MagicMock, patch
log = logging.getLogger(__name__)
pytestmark = pytest.mark.skipif(
salt.modules.selinux.getenforce() != "Enforcing",
reason="Skip if selinux not enabled",
)
@pytest.fixture
def configure_loader_modules():
return {
filemod: {
"__salt__": {
"cmd.run": cmdmod.run,
"cmd.run_all": cmdmod.run_all,
"cmd.retcode": cmdmod.retcode,
"selinux.fcontext_add_policy": MagicMock(
return_value={"retcode": 0, "stdout": ""}
),
},
"__opts__": {"test": False},
}
}
@pytest.fixture
def subdir(tmp_path):
subdir = tmp_path / "file-selinux-test-dir"
yield subdir
shutil.rmtree(str(subdir))
@pytest.fixture
def tfile1(subdir):
filename = str(subdir / "tfile1")
with salt.utils.files.fopen(filename, "w"):
pass
yield filename
os.remove(filename)
@pytest.fixture
def tfile2(subdir):
filename = str(subdir / "tfile2")
with salt.utils.files.fopen(filename, "w"):
pass
yield filename
os.remove(filename)
@pytest.fixture
def tfile3(subdir):
filename = str(subdir / "tfile3")
with salt.utils.files.fopen(filename, "w"):
pass
yield filename
os.remove(filename)
def test_selinux_getcontext(tfile1):
"""
Test get selinux context
Assumes default selinux attributes on temporary files
"""
result = filemod.get_selinux_context(tfile1)
assert result == "unconfined_u:object_r:user_tmp_t:s0"
def test_selinux_setcontext(tfile2):
"""
Test set selinux context
Assumes default selinux attributes on temporary files
"""
result = filemod.set_selinux_context(tfile2, user="system_u")
assert result == "system_u:object_r:user_tmp_t:s0"
def test_selinux_setcontext_persist(tfile2):
"""
Test set selinux context with persist=True
Assumes default selinux attributes on temporary files
"""
result = filemod.set_selinux_context(tfile2, user="system_u", persist=True)
assert result == "system_u:object_r:user_tmp_t:s0"
def test_file_check_perms(tfile3):
expected_result = (
{
"comment": "The file {} is set to be changed".format(tfile3),
"changes": {
"selinux": {"New": "Type: lost_found_t", "Old": "Type: user_tmp_t"},
"mode": "0644",
},
"name": tfile3,
"result": True,
},
{"luser": "root", "lmode": "0600", "lgroup": "root"},
)
# Disable lsattr calls
with patch("salt.utils.path.which") as m_which:
m_which.return_value = None
result = filemod.check_perms(
tfile3,
{},
"root",
"root",
644,
seuser=None,
serole=None,
setype="lost_found_t",
serange=None,
)
assert result == expected_result
|
Python
| 0
|
@@ -1017,16 +1017,35 @@
st-dir%22%0A
+ subdir.mkdir()%0A
yiel
|
68e2f4eb1ca3a4b8e85cc4968db87a85c59b095f
|
Remove stop_trigger
|
examples/chainermn/cifar/train_cifar.py
|
examples/chainermn/cifar/train_cifar.py
|
import argparse
import chainermn
import chainer
import chainer.links as L
from chainer import training
from chainer.training import extensions
from chainer.datasets import get_cifar10
from chainer.datasets import get_cifar100
import models.VGG
def main():
parser = argparse.ArgumentParser(description='Chainer CIFAR example:')
parser.add_argument('--dataset', '-d', default='cifar10',
help='The dataset to use: cifar10 or cifar100')
parser.add_argument('--batchsize', '-b', type=int, default=64,
help='Number of images per GPU in a mini-batch')
parser.add_argument('--learnrate', '-l', type=float, default=0.05,
help='Learning rate for SGD')
parser.add_argument('--epoch', '-e', type=int, default=300,
help='Number of sweeps over the dataset to train')
parser.add_argument('--gpu', '-g', action='store_true',
help='Use GPU (negative value indicates CPU)')
parser.add_argument('--out', '-o', default='result',
help='Directory to output the result')
parser.add_argument('--resume', '-r', default='',
help='Resume the training from snapshot')
parser.add_argument('--communicator', default='hierarchical',
help='Type of communicator')
args = parser.parse_args()
# Prepare ChainerMN communicator.
if args.gpu:
comm = chainermn.create_communicator(args.communicator)
device = comm.intra_rank
else:
comm = chainermn.create_communicator('naive')
device = -1
if comm.rank == 0:
print('==========================================')
print('Num process (COMM_WORLD): {}'.format(comm.size))
if args.gpu:
print('Using GPUs')
print('Using {} communicator'.format(args.communicator))
print('Num Minibatch-size: {}'.format(args.batchsize))
print('Num epoch: {}'.format(args.epoch))
print('==========================================')
# Set up a neural network to train.
# Classifier reports softmax cross entropy loss and accuracy at every
# iteration, which will be used by the PrintReport extension below.
if comm.rank == 0:
if args.dataset == 'cifar10':
print('Using CIFAR10 dataset.')
class_labels = 10
train, test = get_cifar10()
elif args.dataset == 'cifar100':
print('Using CIFAR100 dataset.')
class_labels = 100
train, test = get_cifar100()
else:
raise RuntimeError('Invalid dataset choice.')
model = L.Classifier(models.VGG.VGG(class_labels))
if device >= 0:
# Make a specified GPU current
chainer.backends.cuda.get_device_from_id(device).use()
model.to_gpu() # Copy the model to the GPU
optimizer = chainermn.create_multi_node_optimizer(
chainer.optimizers.MomentumSGD(args.learnrate), comm)
optimizer.setup(model)
optimizer.add_hook(chainer.optimizer_hooks.WeightDecay(5e-4))
train = chainermn.scatter_dataset(train, comm, shuffle=True)
test = chainermn.scatter_dataset(test, comm, shuffle=True)
train_iter = chainer.iterators.SerialIterator(train, args.batchsize,
shuffle=False)
test_iter = chainer.iterators.SerialIterator(test, args.batchsize,
repeat=False, shuffle=False)
# Set up a trainer
updater = training.updaters.StandardUpdater(
train_iter, optimizer, device=device)
trainer = training.Trainer(updater, stop_trigger, out=args.out)
# Evaluate the model with the test dataset for each epoch
evaluator = extensions.Evaluator(test_iter, model, device=device)
evaluator = chainermn.create_multi_node_evaluator(evaluator, comm)
trainer.extend(evaluator)
trainer.extend(extensions.ExponentialShift('lr', 0.5),
trigger=(25, 'epoch'))
if comm.rank == 0:
trainer.extend(extensions.dump_graph('main/loss'))
trainer.extend(extensions.snapshot(
filename='snaphot_epoch_{.updater.epoch}'))
trainer.extend(extensions.LogReport())
trainer.extend(extensions.PrintReport(
['epoch', 'main/loss', 'validation/main/loss',
'main/accuracy', 'validation/main/accuracy', 'elapsed_time']))
trainer.extend(extensions.ProgressBar())
if args.resume:
# Resume from a snapshot
chainer.serializers.load_npz(args.resume, trainer)
# Run the training
trainer.run()
if __name__ == '__main__':
main()
|
Python
| 0.000014
|
@@ -3677,20 +3677,29 @@
er,
-stop_trigger
+(args.epoch, 'epoch')
, ou
|
cf194c1c3a64c4547049c16fb901a2b33dc84ddf
|
Add verbose output
|
src/xii/output.py
|
src/xii/output.py
|
import os
from threading import Lock
from abc import ABCMeta, abstractmethod
# synchronize output from multiple threads
output_lock = Lock()
class colors:
TAG = '\033[0m'
NORMAL = '\033[37m'
CLEAR = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
WARN = '\033[91m'
SUCCESS = '\033[34m'
def width():
# FIXME: port me to subprocess
_, columns = os.popen('stty size', 'r').read().split()
return int(columns)
def warn(msg, tag="[xii]"):
output = "{} {}".format(tag, msg)
print(colors.WARN + colors.BOLD + output + colors.CLEAR)
class HasOutput:
__meta__ = ABCMeta
@abstractmethod
def entity_path(self):
pass
def say(self, msg):
self._tprint(self._generate_tag(),
msg,
colors.NORMAL)
def counted(self, i, msg):
tag = "{}[#{}]".format(self._generate_tag(), i)
self._tprint(tag,
msg,
colors.NORMAL)
def warn(self, msg):
self._tprint(self._generate_tag(),
msg,
colors.WARN + colors.BOLD)
def success(self, msg):
self._tprint(self._generate_tag(),
msg,
colors.SUCCESS + colors.BOLD)
def _tprint(self, tag, msg, wrap=None):
stop = 40
fill = stop - len(tag)
line = "{} {}: {}".format(tag, "." * fill, msg)
if wrap:
line = wrap + line + colors.CLEAR
output_lock.acquire()
print(line)
output_lock.release()
def _generate_tag(self):
tag = ""
for ident in self.entity_path():
tag += "[" + ident + "]"
return tag
|
Python
| 0.999999
|
@@ -679,16 +679,252 @@
pass%0A%0A
+ @abstractmethod%0A def is_verbose(self):%0A pass%0A%0A def verbose(self, msg):%0A if self.is_verbose():%0A self._tprint(self._generate_tag(),%0A msg,%0A colors.NORMAL)%0A%0A
def
|
b3b99ed11d6c86721e9e57441111e0c88461eb70
|
Fix example state relation
|
examples/defining_new_state_relation.py
|
examples/defining_new_state_relation.py
|
import numpy as np
import matplotlib.pyplot as plt
from math import log
from rsfmodel import rsf
# This is really just the Ruina realtion, but let's pretend we invented it!
# We'll inherit attributes from rsf.StateRelation, but you wouldn't have to.
# It does provide velocity contribution calculation for us though!
class MyStateRelation(rsf.StateRelation):
# Need to provide a steady state calcualtion method
def _set_steady_state(self, system):
self.state = self.Dc/system.vref
def evolve_state(self, system):
if self.state is None:
self.state = _set_steady_state(self, system)
return -1 * (system.v * self.state / self.Dc) * log(system.v * self.state / self.Dc)
model = rsf.Model()
# Set model initial conditions
model.mu0 = 0.6 # Friction initial (at the reference velocity)
model.a = 0.01 # Empirical coefficient for the direct effect
model.k = 1e-3 # Normalized System stiffness (friction/micron)
model.v = 1. # Initial slider velocity, generally is vlp(t=0)
model.vref = 1. # Reference velocity, generally vlp(t=0)
state1 = MyStateRelation()
state1.b = 0.005 # Empirical coefficient for the evolution effect
state1.Dc = 10. # Critical slip distance
model.state_relations = [state1] # Which state relation we want to use
# We want to solve for 40 seconds at 100Hz
model.time = np.arange(0, 40.01, 0.01)
# We want to slide at 1 um/s for 10 s, then at 10 um/s for 31
lp_velocity = np.ones_like(model.time)
lp_velocity[10*100:] = 10. # Velocity after 10 seconds is 10 um/s
# Set the model load point velocity, must be same shape as model.model_time
model.loadpoint_velocity = lp_velocity
# Run the model!
model.solve()
# Make the phase plot
rsf.phasePlot(model)
# Make a plot in displacement
rsf.dispPlot(model)
# Make a plot in time
rsf.timePlot(model)
|
Python
| 0.998463
|
@@ -419,17 +419,16 @@
def
-_
set_stea
@@ -533,97 +533,8 @@
m):%0A
- if self.state is None:%0A self.state = _set_steady_state(self, system)%0A%0A
|
dace2c628357dd6e50de62051e3d85fbb0c75666
|
Use new naming scheme for transactions.
|
opbeat/contrib/flask/__init__.py
|
opbeat/contrib/flask/__init__.py
|
"""
opbeat.contrib.flask
~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2011-2012 Opbeat
Large portions are
:copyright: (c) 2010 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
import os
import warnings
import logging
from flask import request, signals
import opbeat.instrumentation.control
from opbeat.conf import setup_logging
from opbeat.base import Client
from opbeat.contrib.flask.utils import get_data_from_request
from opbeat.utils import disabled_due_to_debug, get_name_from_func
from opbeat.handlers.logging import OpbeatHandler
from opbeat.utils.deprecation import deprecated
logger = logging.getLogger('opbeat.errors.client')
def make_client(client_cls, app, organization_id=None, app_id=None, secret_token=None):
opbeat_config = app.config.get('OPBEAT', {})
# raise a warning if OPBEAT_ORGANIZATION_ID is set in the config, but not
# ORGANIZATION_ID. Until 1.3.1, we erroneously checked only
# OPBEAT_ORGANIZATION_ID
if ('OPBEAT_ORGANIZATION_ID' in opbeat_config
and 'ORGANIZATION_ID' not in opbeat_config):
warnings.warn(
'Please use ORGANIZATION_ID to set the opbeat '
'organization id your configuration',
DeprecationWarning,
)
# raise a warning if APP_ID is set in the environment, but not OPBEAT_APP_ID
# Until 1.3.1, we erroneously checked only APP_ID
if 'APP_ID' in os.environ and 'OPBEAT_APP_ID' not in os.environ:
warnings.warn(
'Please use OPBEAT_APP_ID to set the opbeat '
'app id in the environment',
DeprecationWarning,
)
# raise a warning if SECRET_TOKEN is set in the environment, but not
# OPBEAT_SECRET_TOKEN. Until 1.3.1, we erroneously checked only SECRET_TOKEN
if 'SECRET_TOKEN' in os.environ and 'OPBEAT_SECRET_TOKEN' not in os.environ:
warnings.warn(
'Please use OPBEAT_SECRET_TOKEN to set the opbeat secret token '
'in the environment',
DeprecationWarning,
)
organization_id = (
organization_id
or opbeat_config.get('ORGANIZATION_ID') # config
or os.environ.get('OPBEAT_ORGANIZATION_ID') # environment
or opbeat_config.get('OPBEAT_ORGANIZATION_ID') # deprecated fallback
)
app_id = (
app_id
or opbeat_config.get('APP_ID') # config
or os.environ.get('OPBEAT_APP_ID') # environment
or os.environ.get('APP_ID') # deprecated fallback
)
secret_token = (
secret_token
or opbeat_config.get('SECRET_TOKEN') # config
or os.environ.get('OPBEAT_SECRET_TOKEN') # environment
or os.environ.get('SECRET_TOKEN') # deprecated fallback
)
return client_cls(
include_paths=set(opbeat_config.get('INCLUDE_PATHS', [])) | set([app.import_name]),
exclude_paths=opbeat_config.get('EXCLUDE_PATHS'),
servers=opbeat_config.get('SERVERS'),
hostname=opbeat_config.get('HOSTNAME'),
timeout=opbeat_config.get('TIMEOUT'),
organization_id=organization_id,
app_id=app_id,
secret_token=secret_token,
)
class Opbeat(object):
"""
Flask application for Opbeat.
Look up configuration from ``os.environ['OPBEAT_ORGANIZATION_ID']``,
``os.environ.get('OPBEAT_APP_ID')`` and
``os.environ.get('OPBEAT_SECRET_TOKEN')``::
>>> opbeat = Opbeat(app)
Pass an arbitrary ORGANIZATION_ID, APP_ID and SECRET_TOKEN::
>>> opbeat = Opbeat(app, organiation_id='1', app_id='1', secret_token='asdasdasd')
Pass an explicit client::
>>> opbeat = Opbeat(app, client=client)
Automatically configure logging::
>>> opbeat = Opbeat(app, logging=True)
Capture an exception::
>>> try:
>>> 1 / 0
>>> except ZeroDivisionError:
>>> opbeat.capture_exception()
Capture a message::
>>> opbeat.captureMessage('hello, world!')
"""
def __init__(self, app=None, organization_id=None, app_id=None,
secret_token=None, client=None,
client_cls=Client, logging=False):
self.organization_id = organization_id
self.app_id = app_id
self.secret_token = secret_token
self.logging = logging
self.client_cls = client_cls
self.client = client
if app:
self.init_app(app)
def handle_exception(self, *args, **kwargs):
if not self.client:
return
if disabled_due_to_debug(
self.app.config.get('OPBEAT', {}),
self.app.config.get('DEBUG', False)
):
return
self.client.capture(
'Exception', exc_info=kwargs.get('exc_info'),
data=get_data_from_request(request),
extra={
'app': self.app,
},
)
def init_app(self, app):
self.app = app
if not self.client:
self.client = make_client(
self.client_cls, app,
self.organization_id, self.app_id, self.secret_token
)
if self.logging:
setup_logging(OpbeatHandler(self.client))
signals.got_request_exception.connect(self.handle_exception, sender=app, weak=False)
# Instrument to get traces
skip_env_var = 'SKIP_INSTRUMENT'
if skip_env_var in os.environ:
logger.debug("Skipping instrumentation. %s is set.", skip_env_var)
else:
opbeat.instrumentation.control.instrument(self.client)
signals.request_started.connect(self.request_started)
signals.request_finished.connect(self.request_finished)
def request_started(self, app):
self.client.begin_transaction("transaction.flask")
def request_finished(self, app, response):
rule = request.url_rule.rule if request.url_rule is not None else ""
self.client.end_transaction(rule, response.status_code)
def capture_exception(self, *args, **kwargs):
assert self.client, 'capture_exception called before application configured'
return self.client.capture_exception(*args, **kwargs)
def capture_message(self, *args, **kwargs):
assert self.client, 'capture_message called before application configured'
return self.client.capture_message(*args, **kwargs)
@deprecated(alternative="capture_exception()")
def captureException(self, *args, **kwargs):
return self.capture_exception(*args, **kwargs)
@deprecated(alternative="capture_message()")
def captureMessage(self, *args, **kwargs):
return self.capture_message(*args, **kwargs)
|
Python
| 0
|
@@ -5790,27 +5790,19 @@
action(%22
-transaction
+web
.flask%22)
|
cdbf31d056f1991b5206a4f42f2ab129f800e7a7
|
Add get_maximum_transfer_length function
|
letmecreate/core/spi.py
|
letmecreate/core/spi.py
|
#!/usr/bin/env python3
"""Python binding of SPI wrapper of LetMeCreate library."""
import ctypes
_LIB = ctypes.CDLL('libletmecreate_core.so')
# SPI_SPEED
SPI_680K = 680000
SPI_1M36 = 1360000
SPI_2M73 = 2730000
SPI_5M46 = 5460000
SPI_10M93 = 10930000
SPI_21M87 = 21870000
SPI_43M75 = 43750000
def init():
"""Initialise SPI on all mikrobus.
ALL SPI buses are configured as:
- 8 bits per word
- 2.73MHz
- Mode 3
The current SPI bus is set to MIKROBUS_1.
Note: An exception is thrown if an error occurs during initialisation.
"""
ret = _LIB.spi_init()
if ret < 0:
raise Exception("spi init failed")
def set_mode(mikrobus_index, mode):
"""Set the spi mode of the current SPI bus.
The SPI bus must be initialised before calling this function.
mikrobus_index: must be 0 (MIKROBUS_1) or 1 (MIKROBUS_2)
mode: must be 0, 1, 2 or 3.
Note: An exception is thrown if it fails to set the mode.
"""
ret = _LIB.spi_set_mode(mikrobus_index, mode)
if ret < 0:
raise Exception("spi set mode failed")
def set_speed(mikrobus_index, speed):
"""Set the clock speed of the current SPI bus.
The SPI bus must be initialised before calling this function. The SPI driver
has only seven different speeds available, defined in #SPI_SPEED. If you try
to set a speed that is not supported by the driver, it will find the closest
speed without exceeding it.
For instance, if you try to set the speed to 3MHz, the actual speed will be
set to 2.73MHz.
mikrobus_index: must be 0 (MIKROBUS_1) or 1 (MIKROBUS_2)
speed: Clock speed in Hz of the current SPI bus.
Note: An exception is thrown if it fails to set the mode.
"""
ret = _LIB.spi_set_speed(mikrobus_index, speed)
if ret < 0:
raise Exception("spi set speed failed")
def select_bus(mikrobus_index):
"""Select the SPI bus
mikrobus_index: must be 0 (MIKROBUS_1) or 1 (MIKROBUS_2)
"""
_LIB.spi_select_bus(mikrobus_index)
def get_current_bus():
"""Returns the currently selected SPI bus."""
return _LIB.spi_get_current_bus()
def transfer(tx_data):
"""Transfers data using the current SPI bus. Returns a list of bytes.
tx_data: A list of bytes to send.
Note: An exception is thrown if an error occurs during the transfer.
"""
length = len(tx_data)
tx_buffer = (ctypes.c_uint8 * length)(*tx_data)
rx_buffer = (ctypes.c_uint8 * length)()
ret = _LIB.spi_transfer(tx_buffer, rx_buffer, length)
if ret < 0:
raise Exception("spi transfer failed")
return [rx_buffer[i] for i in range(length)]
def release():
"""Release all SPI bus.
Note: An exception is thrown if it fails to release all SPI bus.
"""
ret = _LIB.spi_release()
if ret < 0:
raise Exception("spi release failed")
|
Python
| 0
|
@@ -2649,16 +2649,429 @@
gth)%5D%0A%0A%0A
+def get_maximum_tranfer_length():%0A %22%22%22Returns maximum length of a transfer in bytes.%0A%0A Note: An exception is thrown if it fails to find the limit.%0A %22%22%22%0A transfer_length_limit = ctypes.c_uint32(0)%0A ret = _LIB.spi_get_maximum_tranfer_length(ctypes.byref(transfer_length_limit))%0A if ret %3C 0:%0A raise Exception(%22spi get maximum tranfer length failed%22)%0A return transfer_length_limit.value%0A%0A%0A
def rele
|
abd3daed5cd0c70d76bf8fa1cfdda93efcda3e70
|
Make the `now` helper timezone aware
|
knights/compat/django.py
|
knights/compat/django.py
|
from django.core.urlresolvers import reverse
from django.utils.encoding import iri_to_uri
import datetime
from knights.library import Library
register = Library()
@register.helper
def now(fmt):
return datetime.datetime.now().strftime(fmt)
@register.helper
def url(name, *args, **kwargs):
try:
return reverse(name, args=args, kwargs=kwargs)
except:
return None
@register.helper
def static(filename):
try:
from django.conf import settings
except ImportError:
prefix = ''
else:
prefix = iri_to_uri(getattr(settings, filename, ''))
return prefix
|
Python
| 0.000001
|
@@ -39,16 +39,50 @@
reverse%0A
+from django.utils import timezone%0A
from dja
@@ -242,24 +242,15 @@
urn
-datetime.datetim
+timezon
e.no
|
cbe4f5470fec966538f63ca9beb04838bfbf3aa3
|
change to contentnode_1 and contentnode_2 for content relationship
|
kolibri/content/admin.py
|
kolibri/content/admin.py
|
from django.contrib import admin
from .models import PrerequisiteContentRelationship, RelatedContentRelationship
class PrerequisiteRelationshipInline1(admin.TabularInline):
model = PrerequisiteContentRelationship
fk_name = 'contentmetadata_1'
max = 20
extra = 0
class PrerequisiteRelationshipInline2(admin.TabularInline):
model = PrerequisiteContentRelationship
fk_name = 'contentmetadata_2'
max = 20
extra = 0
class RelatedRelationshipInline1(admin.TabularInline):
model = RelatedContentRelationship
fk_name = 'contentmetadata_1'
max = 20
extra = 0
class RelatedRelationshipInline2(admin.TabularInline):
model = RelatedContentRelationship
fk_name = 'contentmetadata_2'
max = 20
extra = 0
class ContentMetadataAdmin(admin.ModelAdmin):
inlines = (PrerequisiteRelationshipInline1, PrerequisiteRelationshipInline2, RelatedRelationshipInline1, RelatedRelationshipInline2)
|
Python
| 0.000001
|
@@ -231,32 +231,28 @@
e = 'content
-metadata
+node
_1'%0A max
@@ -393,32 +393,28 @@
e = 'content
-metadata
+node
_2'%0A max
@@ -549,24 +549,20 @@
'content
-metadata
+node
_1'%0A
@@ -705,16 +705,12 @@
tent
-metadata
+node
_2'%0A
@@ -754,16 +754,12 @@
tent
-Metadata
+Node
Admi
|
d43657286f49271a6236499bdba288925fb23087
|
update tests to v1.2.0 (#1307)
|
exercises/roman-numerals/roman_numerals_test.py
|
exercises/roman-numerals/roman_numerals_test.py
|
import unittest
import roman_numerals
# Tests adapted from `problem-specifications//canonical-data.json` @ v1.0.0
class RomanTest(unittest.TestCase):
numerals = {
1: 'I',
2: 'II',
3: 'III',
4: 'IV',
5: 'V',
6: 'VI',
9: 'IX',
27: 'XXVII',
48: 'XLVIII',
59: 'LIX',
93: 'XCIII',
141: 'CXLI',
163: 'CLXIII',
402: 'CDII',
575: 'DLXXV',
911: 'CMXI',
1024: 'MXXIV',
3000: 'MMM',
}
def test_numerals(self):
for arabic, numeral in self.numerals.items():
self.assertEqual(roman_numerals.numeral(arabic), numeral)
if __name__ == '__main__':
unittest.main()
|
Python
| 0
|
@@ -106,17 +106,17 @@
n%60 @ v1.
-0
+2
.0%0A%0Aclas
@@ -321,24 +321,44 @@
: 'XLVIII',%0A
+ 49: 'XLIX',%0A
59:
|
1c2c7eb9b14a8abaea22e644d8b6e4bd4a649fcb
|
move replacing img path holder logic to back-end because if we deal with it at front-end, we are dealing with a JS object
|
kolibri/content/views.py
|
kolibri/content/views.py
|
import datetime
import mimetypes
import os
import zipfile
from django.http import Http404
from django.http.response import FileResponse, HttpResponseNotModified
from django.utils.http import http_date
from django.views.generic.base import View
from .utils.paths import get_content_storage_file_path
class ZipContentView(View):
def get(self, request, zipped_filename, embedded_filepath):
"""
Handles GET requests and serves a static file from within the zip file.
"""
# calculate the local file path to the zip file
zipped_path = get_content_storage_file_path(zipped_filename)
# if the zipfile does not exist on disk, return a 404
if not os.path.exists(zipped_path):
raise Http404('"%(filename)s" does not exist locally' % {'filename': zipped_filename})
# if client has a cached version, use that (we can safely assume nothing has changed, due to MD5)
if request.META.get('HTTP_IF_MODIFIED_SINCE'):
return HttpResponseNotModified()
with zipfile.ZipFile(zipped_path) as zf:
# get the details about the embedded file, and ensure it exists
try:
info = zf.getinfo(embedded_filepath)
except KeyError:
raise Http404('"{}" does not exist inside "{}"'.format(embedded_filepath, zipped_filename))
# try to guess the MIME type of the embedded file being referenced
content_type = mimetypes.guess_type(embedded_filepath)[0] or 'application/octet-stream'
# generate a streaming response object, pulling data from within the zip file
response = FileResponse(zf.open(info), content_type=content_type)
# set the last-modified header to the date marked on the embedded file
if info.date_time:
response["Last-Modified"] = http_date(float(datetime.datetime(*info.date_time).strftime("%s")))
# cache these resources forever; this is safe due to the MD5-naming used on content files
response["Expires"] = "Sun, 17-Jan-2038 19:14:07 GMT"
# set the content-length header to the size of the embedded file
if info.file_size:
response["Content-Length"] = info.file_size
# ensure the browser knows not to try byte-range requests, as we don't support them here
response["Accept-Ranges"] = "none"
return response
class DownloadContentView(View):
def get(self, request, filename, new_filename):
"""
Handles GET requests and serves a static file as an attachment.
"""
# calculate the local file path of the file
path = get_content_storage_file_path(filename)
# if the file does not exist on disk, return a 404
if not os.path.exists(path):
raise Http404('"%(filename)s" does not exist locally' % {'filename': filename})
# generate a file response
response = FileResponse(open(path, 'rb'))
# set the content-type by guessing from the filename
response['Content-Type'] = mimetypes.guess_type(filename)[0]
# set the content-disposition as attachment to force download
response['Content-Disposition'] = 'attachment;'
# set the content-length to the file size
response['Content-Length'] = os.path.getsize(path)
return response
|
Python
| 0
|
@@ -486,32 +486,104 @@
e.%0A %22%22%22%0A%0A
+ # path placeholder%0A path_place_holder = %22($%7Baronsface%7D%22%0A%0A
# calcul
@@ -1706,24 +1706,200 @@
e zip file%0A
+ content = zf.open(info).read()%0A content_with_path = content.replace(path_place_holder, %22%5C%5Cn%5C%5Cn!%5B%5D(/zipcontent/6d6fcdfb5d80e839918a03fea8ca0c9d.perseus%22)%0A
@@ -1922,29 +1922,33 @@
esponse(
-zf.open(info)
+content_with_path
, conten
@@ -2486,30 +2486,38 @@
gth%22%5D =
-info.file_size
+len(content_with_path)
%0A%0A
|
34b2385d6a3bb7acdbcd3f894d30dfbc734bd52e
|
allow to set matplotlib backend from env MATPLOTLIB_BACKEND
|
ggplot/__init__.py
|
ggplot/__init__.py
|
from .ggplot import *
from .exampledata import *
|
Python
| 0.000001
|
@@ -1,12 +1,331 @@
+# For testing purposes we might need to set mpl backend before any%0A# other import of matplotlib.%0Adef _set_mpl_backend():%0A import os%0A import matplotlib as mpl%0A%0A env_backend = os.environ.get('MATPLOTLIB_BACKEND')%0A if env_backend:%0A # we were instructed%0A mpl.use(env_backend)%0A%0A_set_mpl_backend()%0A%0A
from .ggplot
|
4e9a530403dce47f322df471255a0fc40fd1071f
|
Change number of episodes to 60000
|
examples/tic_ql_tabular_selfplay_all.py
|
examples/tic_ql_tabular_selfplay_all.py
|
'''
The Q-learning algorithm is used to learn the state-action values for all
Tic-Tac-Toe positions by playing games against itself (self-play).
'''
from capstone.game.games import TicTacToe
from capstone.game.players import RandPlayer
from capstone.rl import Environment, GameMDP
from capstone.rl.learners import QLearningSelfPlay
from capstone.rl.policies import EGreedy, RandomPolicy
from capstone.rl.utils import EpisodicWLDPlotter
from capstone.rl.value_functions import TabularQ
game = TicTacToe()
env = Environment(GameMDP(game))
tabularq = TabularQ(random_state=23)
egreedy = EGreedy(env.actions, tabularq, epsilon=0.5, random_state=23)
rand_policy = RandomPolicy(env.actions, random_state=23)
qlearning = QLearningSelfPlay(
env=env,
qf=tabularq,
policy=rand_policy,
learning_rate=0.1,
discount_factor=0.99,
n_episodes=3000,
verbose=0,
callbacks=[
EpisodicWLDPlotter(
game=game,
opp_player=RandPlayer(random_state=23),
n_matches=2000,
period=1000,
filename='tic_ql_tabular_selfplay_all.pdf'
)
]
)
qlearning.learn()
|
Python
| 0.999996
|
@@ -845,17 +845,18 @@
pisodes=
-3
+60
000,%0A
|
f694b2a234216ae7ecc7b925799f43caca5e9a32
|
add more debug output for config file
|
preprocess.py
|
preprocess.py
|
#!/usr/bin/env python3
# Copyright 2016 Curtis Sand <curtissand@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Preprocessor script for Squadron."""
import sys
from lib.cmdline import PreprocessUI
from lib.cmdline import dprint
from lib.cmdline import vprint
from lib.config import ConfigFile
VERSION = "0.0"
def main():
"""Main method for Squadron preprocessor."""
user_interface = PreprocessUI(version=VERSION)
options = user_interface.parse_cmd_line()
dprint('cmdline args: %s' % options)
vprint('Parsing Config File...')
config_file = ConfigFile(options.config)
config_file.load()
dprint('Config Sections: %s' % config_file.parser.sections())
for section in config_file.parser.sections():
dprint('Options in section %s: %s' %
(section, config_file.parser.options(section)))
return 0
if __name__ == '__main__':
try:
sys.exit(main())
except SystemExit:
sys.exit(0)
except KeyboardInterrupt:
print('...interrupted by user, exiting.')
sys.exit(1)
except Exception as exc:
import lib.cmdline
if lib.cmdline.DEBUG:
raise
else:
print(exc)
sys.exit(1)
|
Python
| 0
|
@@ -1347,16 +1347,324 @@
ction)))
+%0A dprint('Enabled Tasks: %25s' %25 config_file.enabled_tasks)%0A dprint('Copy Files Jobs:')%0A for cfj in list(config_file.copy_files_jobs):%0A dprint(%22%25s:%22 %25 cfj.name)%0A dprint(%22 sources: %25s%22 %25 ' '.join(cfj.value.sources.value))%0A dprint(%22 destination: %25s%22 %25 cfj.value.destination.value)
%0A%0A re
|
292fd33a3d251b4c5773e96989e45d8e3d7c6c3b
|
Change tasks in settingfs
|
krunchr/settings/base.py
|
krunchr/settings/base.py
|
from socket import gethostname
HOSTNAME = gethostname()
HOSTNAME_SHORT = HOSTNAME.split('.')[0]
APPLICATION_ROOT = '/v1/krunchr'
DEBUG = True
RETHINKDB_HOST = 'batman.krunchr.net'
RETHINKDB_PORT = 28019
RETHINKDB_AUTH = ''
RETHINKDB_DB = 'krunchr'
BROKER_URL = 'redis://localhost:6379'
CELERY_RESULT_BACKEND = 'redis://localhost:6379'
# we must use a safe serializer in order to run celery as root
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CELERY_ACCEPT_CONTENT = ['json']
CELERYD_HIJACK_ROOT_LOGGER = False
CELERY_IMPORTS = ('analyser.tasks', 'map.jobs.sum', 'utils.tasks.execute')
DISCO_FILES = '/tmp'
DISCO_NODES = '3'
|
Python
| 0.000001
|
@@ -571,24 +571,8 @@
ks',
- 'map.jobs.sum',
'ut
|
9fa72e3df775d4848336bcb0965cfd6afeaf5953
|
change cmi_and_test to invoke the targets 'all', 'tests' and 'run_tests' separately
|
scripts/devel/catkin_make_isolated_and_test.py
|
scripts/devel/catkin_make_isolated_and_test.py
|
#!/usr/bin/env python3
import argparse
import os
import sys
from ros_buildfarm.catkin_workspace import call_catkin_make_isolated
from ros_buildfarm.catkin_workspace import clean_workspace
from ros_buildfarm.catkin_workspace import ensure_workspace_exists
def main(argv=sys.argv[1:]):
parser = argparse.ArgumentParser(
description="Invoke 'catkin_make_isolated' on a workspace while "
"enabling and running the tests")
parser.add_argument(
'--rosdistro-name',
required=True,
help='The name of the ROS distro to identify the setup file to be '
'sourced (if available)')
parser.add_argument(
'--workspace-root',
required=True,
help='The root path of the workspace to compile')
parser.add_argument(
'--parent-result-space',
help='The path of the parent result space')
parser.add_argument(
'--clean-before',
action='store_true',
help='The flag if the workspace should be cleaned before the '
'invocation')
parser.add_argument(
'--clean-after',
action='store_true',
help='The flag if the workspace should be cleaned after the '
'invocation')
args = parser.parse_args(argv)
ensure_workspace_exists(args.workspace_root)
if args.clean_before:
clean_workspace(args.workspace_root)
try:
test_results_dir = os.path.join(args.workspace_root, 'test_results')
rc = call_catkin_make_isolated(
args.rosdistro_name, args.workspace_root,
['--cmake-args', '-DCATKIN_ENABLE_TESTING=1',
'-DCATKIN_SKIP_TESTING=0',
'-DCATKIN_TEST_RESULTS_DIR=%s' % test_results_dir,
'--catkin-make-args', '-j1', 'run_tests'],
parent_result_space=args.parent_result_space)
finally:
if args.clean_after:
clean_workspace(args.workspace_root)
return rc
if __name__ == '__main__':
sys.exit(main())
|
Python
| 0.000008
|
@@ -1492,93 +1492,21 @@
-rc = call_catkin_make_isolated(%0A args.rosdistro_name, args.workspace_root,
+arguments = %5B
%0A
@@ -1514,17 +1514,16 @@
-%5B
'--cmake
@@ -1563,33 +1563,32 @@
1',%0A
-
'-DCATKIN_SKIP_T
@@ -1598,17 +1598,16 @@
ING=0',%0A
-
@@ -1669,25 +1669,24 @@
-
'--catkin-ma
@@ -1700,18 +1700,581 @@
', '-j1'
-,
+%5D%0A rc = call_catkin_make_isolated(%0A args.rosdistro_name, args.workspace_root,%0A arguments,%0A parent_result_space=args.parent_result_space)%0A if not rc:%0A rc = call_catkin_make_isolated(%0A args.rosdistro_name, args.workspace_root,%0A arguments + %5B'tests'%5D,%0A parent_result_space=args.parent_result_space)%0A if not rc:%0A rc = call_catkin_make_isolated(%0A args.rosdistro_name, args.workspace_root,%0A arguments + %5B
'run_tes
@@ -2271,32 +2271,40 @@
%5B'run_tests'%5D,%0A
+
pare
|
2d391f9e6183f06c0785cbb2c57b7a4fcf703a80
|
Bump version to 0.1a14
|
chirptext/__version__.py
|
chirptext/__version__.py
|
# -*- coding: utf-8 -*-
# chirptext's package version information
__author__ = "Le Tuan Anh"
__email__ = "tuananh.ke@gmail.com"
__copyright__ = "Copyright (c) 2012, Le Tuan Anh"
__credits__ = []
__license__ = "MIT License"
__description__ = "ChirpText is a collection of text processing tools for Python."
__url__ = "https://github.com/letuananh/chirptext"
__maintainer__ = "Le Tuan Anh"
__version_major__ = "0.1"
__version__ = "{}a13".format(__version_major__)
__version_long__ = "{} - Alpha".format(__version_major__)
__status__ = "Prototype"
|
Python
| 0
|
@@ -431,9 +431,9 @@
%7B%7Da1
-3
+4
%22.fo
|
6436a8adf4088f59c704a7d49e5f61c30d665058
|
return true in ping cli
|
lacli/server/__init__.py
|
lacli/server/__init__.py
|
from lacli.decorators import command
from lacli.command import LaBaseCommand
from twisted.python.log import startLogging, msg
from twisted.internet import reactor
from thrift.transport import TTwisted
from thrift.protocol import TBinaryProtocol
from lacli.server.interface.ClientInterface import CLI
import sys
import zope
class LaServerCommand(LaBaseCommand):
"""Run a RPC server
Usage: lacli server [--no-detach] [--port <port>]
Options:
--no-detach don't detach from terminal
--port <port> port to listen on [default: 9090]
"""
zope.interface.implements(CLI.Iface)
prompt = 'lacli:server> '
def makecmd(self, options):
cmd = ["run"]
if options['--port']:
cmd.append(options['--port'])
return " ".join(cmd)
@command(port=int)
def do_run(self, port=9090):
"""
Usage: run [<port>]
"""
reactor.listenTCP(port, TTwisted.ThriftServerFactory(
processor=CLI.Processor(self),
iprot_factory=TBinaryProtocol.TBinaryProtocolFactory()))
startLogging(sys.stderr)
msg('Running reactor')
reactor.run()
def PingCLI(self):
msg('pingCLI()')
|
Python
| 0.00219
|
@@ -1230,8 +1230,28 @@
CLI()')%0A
+ return True%0A
|
af40e69bca873a7d0060aaf3391fb8feb91bf673
|
Use correct format for custom payload keys
|
openprescribing/frontend/signals/handlers.py
|
openprescribing/frontend/signals/handlers.py
|
import logging
from allauth.account.signals import user_logged_in
from anymail.signals import tracking
from requests_futures.sessions import FuturesSession
from django.contrib.auth.models import User
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.conf import settings
from common.utils import google_user_id
from frontend.models import Profile
logger = logging.getLogger(__name__)
@receiver(post_save, sender=User)
def handle_user_save(sender, instance, created, **kwargs):
if created:
Profile.objects.create(user=instance)
@receiver(user_logged_in, sender=User)
def handle_user_logged_in(sender, request, user, **kwargs):
user.searchbookmark_set.update(approved=True)
user.orgbookmark_set.update(approved=True)
def send_ga_event(event):
user = User.objects.filter(email=event.recipient)
if user:
user = user[0]
session = FuturesSession()
payload = {
'v': 1,
'tid': settings.GOOGLE_TRACKING_ID,
'cid': google_user_id(user),
't': 'event',
'ec': 'email',
'ea': event.event_type,
'ua': event.user_agent,
'cm': 'email',
}
if event.metadata:
payload['dt'] = event.metadata['subject']
payload['cn'] = event.metadata['campaign_name']
payload['cs'] = event.metadata['campaign_source']
payload['dp'] = "/email/%s/%s/%s/%s" % (
event.metadata['campaign_name'],
event.metadata['campaign_source'],
event.metadata['user_id'],
event.event_type
)
else:
logger.info("No metadata found for event type %s" %
event.event_type)
logger.debug("Full event data: %s" % event.__dict__)
logger.debug("Recording event in Analytics: %s" % payload)
session.post(
'https://www.google-analytics.com/collect', data=payload)
else:
logger.error("Could not find receipient %s" % event.recipient)
@receiver(tracking)
def handle_anymail_webhook(sender, event, esp_name, **kwargs):
logger.info("Received webhook from %s: %s" % (esp_name, event.event_type))
send_ga_event(event)
|
Python
| 0.000004
|
@@ -1228,90 +1228,32 @@
-%7D%0A
- if event.metadata:%0A payload%5B
'dt'
-%5D =
+:
event.
-metadata%5B'
subject
-'%5D
+,
%0A
@@ -1265,40 +1265,20 @@
-payload%5B
'cn'
-%5D =
+:
event.
-metadata%5B'
camp
@@ -1286,18 +1286,17 @@
ign_name
-'%5D
+,
%0A
@@ -1304,40 +1304,20 @@
-payload%5B
'cs'
-%5D =
+:
event.
-metadata%5B'
camp
@@ -1327,18 +1327,17 @@
n_source
-'%5D
+,
%0A
@@ -1345,23 +1345,13 @@
-payload%5B
'dp'
-%5D =
+:
%22/e
@@ -1390,34 +1390,24 @@
event.
-metadata%5B'
campaign_nam
@@ -1407,18 +1407,16 @@
ign_name
-'%5D
,%0A
@@ -1431,26 +1431,16 @@
event.
-metadata%5B'
campaign
@@ -1446,18 +1446,16 @@
n_source
-'%5D
,%0A
@@ -1474,27 +1474,15 @@
ent.
-metadata%5B'
user_id
-'%5D
,%0A
@@ -1538,184 +1538,9 @@
-else:%0A logger.info(%22No metadata found for event type %25s%22 %25%0A event.event_type)%0A logger.debug(%22Full event data: %25s%22 %25 event.__dict__)
+%7D
%0A
@@ -1940,16 +1940,73 @@
_type))%0A
+ logger.debug(%22Full event data: %25s%22 %25 event.__dict__)%0A
send
|
52a6ea1e7dd4333b9db6a0bbd53b8ae0b39a1f6d
|
Add __doc__ to module functions
|
Designs/redundant.py
|
Designs/redundant.py
|
'''Due to the needs arising from completing the project on time, I have defined redundant.py
which will hold replacement modules as I migrate from file based application to lists only web application. This modules
so far will offer the capabilities of registration, creating a shopping list and adding items into
a shopping list'''
global account
account=[]
def register(username,email,password):
account.append(username)
account.append(email)
account.append(password)
return account
global shopping_list_container
shopping_list_container=[]#contain shopping lists only
def create(list_name):
#list_name=[]
shopping_list_container.append(list_name)
return shopping_list_container#list of dictionaries
def list_update(nameoflist,item):
nameoflist.append(item)
shopping_list_container.append(nameoflist)
global itemsdictionary
itemsdictionary={}
def create1(slist):
itemsdictionary.update(slist)
global shared_shopping_list_container
shared_shopping_list_container=[]
def create3(list_name):
#list_name=[]
shared_shopping_list_container.append(list_name)
return shared_shopping_list_container#list of dictionaries
global shareditemsdictionary
shareditemsdictionary={}
def create2(slist):
shareditemsdictionary.update(slist)
|
Python
| 0.000025
|
@@ -391,16 +391,44 @@
sword):%0A
+ '''registration list'''%0A
acco
@@ -444,24 +444,24 @@
d(username)%0A
-
account.
@@ -625,32 +625,79 @@
ate(list_name):%0A
+ '''container of names of shopping lists'''%0A
#list_na
@@ -846,16 +846,62 @@
,item):%0A
+ '''adding item to a given name of list'''%0A
%0A
@@ -1031,24 +1031,99 @@
te1(slist):%0A
+ '''update shopping lists with key (names) and items(as dictionaris)'''%0A
itemsdic
@@ -1237,24 +1237,108 @@
list_name):%0A
+ '''container for the shared lists. In future may be integrated with facebook'''%0A
#lis
@@ -1472,18 +1472,16 @@
naries%0A%0A
-%0A%0A
global s
@@ -1518,32 +1518,32 @@
msdictionary=%7B%7D%0A
-
def create2(slis
@@ -1542,24 +1542,61 @@
te2(slist):%0A
+ '''updating shared dictionary'''%0A
sharedit
|
965ac6a04871d45bcec25ccaabe80197b1c8e104
|
Fix flake8 errors.
|
st2actions/st2actions/runners/actionchainrunner.py
|
st2actions/st2actions/runners/actionchainrunner.py
|
import ast
import eventlet
import jinja2
import json
import six
import uuid
from oslo.config import cfg
from st2actions.runners import ActionRunner
from st2common import log as logging
from st2common.exceptions import actionrunner as runnerexceptions
from st2common.models.api import action
from st2common.services import action as action_service
from st2common.util import action_db as action_db_util
LOG = logging.getLogger(__name__)
class ActionChain(object):
class Node(object):
def __init__(self, name, action_name, params):
self.name = name
self.action_name = action_name
self.params = params
class Link(object):
def __init__(self, head, tail, condition):
self.head = head
self.tail = tail
self.condition = condition
def __init__(self, chainspec):
chain = chainspec.get('chain', [])
self.default = chainspec.get('default', '')
self.nodes = {}
self.links = {}
for node in chain:
node_name = node['name']
self.nodes[node_name] = ActionChain.Node(
node_name, node['action'], node['params'])
self.links[node_name] = []
on_success = node.get('on-success', None)
if on_success:
self.links[node_name].append(ActionChain.Link(node_name, on_success, 'on-success'))
on_failure = node.get('on-failure', None)
if on_failure:
self.links[node_name].append(ActionChain.Link(node_name, on_failure, 'on-failure'))
def get_next_node(self, curr_node_name=None, condition='on-success'):
if not curr_node_name:
return self.nodes.get(self.default, None)
links = self.links.get(curr_node_name, None)
if not links:
return None
for link in links:
if link.condition == condition:
return self.nodes.get(link.tail, None)
return None
class ActionChainRunner(ActionRunner):
def __init__(self, id):
self.id = id
def pre_run(self):
chainspec_file = self.entry_point
LOG.debug('Reading action chain from %s for action %s.', chainspec_file,
self.action)
try:
with open(chainspec_file, 'r') as fd:
chainspec = json.load(fd)
self.action_chain = ActionChain(chainspec)
except Exception as e:
LOG.exception('Failed to instantiate ActionChain.')
raise runnerexceptions.ActionRunnerPreRunError(e.message)
def run(self, action_parameters):
action_node = self.action_chain.get_next_node()
results = {}
while action_node:
actionexec = None
fail = False
try:
resolved_params = ActionChainRunner._resolve_params(action_node, action_parameters,
results)
actionexec = ActionChainRunner._run_action(action_node.action_name, resolved_params)
except:
LOG.exception('Failure in running action %s.', action_node.name)
else:
# for now append all successful results
results[action_node.name] = actionexec.result
finally:
if not actionexec or actionexec.status == action.ACTIONEXEC_STATUS_ERROR:
fail = True
action_node = self.action_chain.get_next_node(action_node.name, 'on-failure')
elif actionexec.status == action.ACTIONEXEC_STATUS_COMPLETE:
action_node = self.action_chain.get_next_node(action_node.name, 'on-success')
self.container_service.report_result(results)
return not fail
@staticmethod
def _resolve_params(action_node, original_parameters, results):
# setup context with original parameters and the intermediate results.
context = {}
context.update(original_parameters)
context.update(results)
env = jinja2.Environment(undefined=jinja2.StrictUndefined)
rendered_params = {}
for k, v in six.iteritems(action_node.params):
# jinja2 works with string so transform list and dict to strings.
reverse_json_dumps = False
if isinstance(v, dict) or isinstance(v, list):
v = json.dumps(v)
reverse_json_dumps = True
else:
v = str(v)
rendered_v = env.from_string(v).render(context)
# no change therefore no templatization so pick params from original to retain
# original type
if rendered_v == v:
rendered_params[k] = action_node.params[k]
continue
if reverse_json_dumps:
rendered_v = json.loads(rendered_v)
rendered_params[k] = rendered_v
return rendered_params
@staticmethod
def _run_action(action_name, params, wait_for_completion=True):
execution = action.ActionExecutionAPI(**{'action': {'name': action_name}})
execution.parameters = ActionChainRunner._cast_params(action_name, params)
execution = action_service.schedule(execution)
while (wait_for_completion and
execution.status != action.ACTIONEXEC_STATUS_COMPLETE and
execution.status != action.ACTIONEXEC_STATUS_ERROR):
eventlet.sleep(1)
execution = action_db_util.get_actionexec_by_id(execution.id)
return execution
@staticmethod
def _cast_params(action_name, params):
casts = {
'array': (lambda x: json.loads(x) if isinstance(x, str) or isinstance(x, unicode)
else x),
'boolean': (lambda x: ast.literal_eval(x.capitalize())
if isinstance(x, str) or isinstance(x, unicode) else x),
'integer': int,
'number': float,
'object': (lambda x: json.loads(x) if isinstance(x, str) or isinstance(x, unicode)
else x),
'string': str
}
action_db = action_db_util.get_action_by_name(action_name)
action_parameters_schema = action_db.parameters
runnertype_db = action_db_util.get_runnertype_by_name(action_db.runner_type['name'])
runner_parameters_schema = runnertype_db.parameters
# combine into 1 list of parameter schemas
parameters_schema = {}
if runner_parameters_schema:
parameters_schema.update(runner_parameters_schema)
if action_parameters_schema:
parameters_schema.update(action_parameters_schema)
# cast each param individually
for k, v in six.iteritems(params):
parameter_schema = parameters_schema.get(k, None)
if not parameter_schema:
continue
parameter_type = parameter_schema.get('type', None)
if not parameter_type:
continue
cast = casts.get(parameter_type, None)
if not cast:
continue
params[k] = cast(v)
return params
def get_runner():
return ActionChainRunner(str(uuid.uuid4()))
|
Python
| 0
|
@@ -74,37 +74,8 @@
id%0A%0A
-from oslo.config import cfg%0A%0A
from
|
9f52714e696879e46ebf98164827ebf0cc4cd666
|
Return to homepage after logout
|
opendebates/registration_urls.py
|
opendebates/registration_urls.py
|
"""
URLconf for registration and activation, using django-registration's
one-step backend.
If the default behavior of these views is acceptable to you, simply
use a line like this in your root URLconf to set up the default URLs
for registration::
(r'^accounts/', include('registration.backends.simple.urls')),
This will also automatically set up the views in
``django.contrib.auth`` at sensible default locations.
If you'd like to customize registration behavior, feel free to set up
your own URL patterns for these views instead.
"""
from django.conf.urls import include
from django.conf.urls import patterns
from django.conf.urls import url
from django.contrib.auth import views as auth_views
from .forms import OpenDebatesAuthenticationForm
from .views import OpenDebatesRegistrationView
urlpatterns = patterns('',
url(r'^register/$',
OpenDebatesRegistrationView.as_view(),
name='registration_register'),
# url(r'^register/closed/$',
# TemplateView.as_view(template_name='registration/registration_closed.html'),
# name='registration_disallowed'),
url(r'^register/complete/$',
'opendebates.views.registration_complete', name="registration_complete"),
url(r'^login/$',
'django.contrib.auth.views.login',
{
'template_name': 'registration/login.html',
'authentication_form': OpenDebatesAuthenticationForm,
},
name='auth_login'),
# override the default urls
url(r'^password/change/$',
auth_views.password_change,
name='password_change'),
url(r'^password/change/done/$',
auth_views.password_change_done,
name='password_change_done'),
url(r'^password/reset/$',
auth_views.password_reset,
{'html_email_template_name': "registration/password_reset_email.html",
'email_template_name': "registration/password_reset_email.txt",
'template_name': "registration/password_reset_form.html"},
name='password_reset'),
url(r'^password/reset/done/$',
auth_views.password_reset_done,
name='password_reset_done'),
url(r'^password/reset/complete/$',
auth_views.password_reset_complete,
name='password_reset_complete'),
url(r'^password/reset/confirm/(?P<uidb64>[0-9A-Za-z]+)-(?P<token>.+)/$',
auth_views.password_reset_confirm,
name='password_reset_confirm'),
(r'', include('registration.auth_urls')),
)
|
Python
| 0.000001
|
@@ -3108,24 +3108,265 @@
confirm'),%0A%0A
+ url(r'%5Elogout/$',%0A auth_views.logout,%0A %7B'template_name': 'registration/logout.html',%0A 'next_page': %22list_ideas%22%7D),%0A %0A
|
daf4cb0f7c273be107cef871e9a67f52f674495f
|
Fix test failure from e26de49a6104.
|
fixlib/engine.py
|
fixlib/engine.py
|
# Copyright (C) 2010 KenTyde BV
# All rights reserved.
#
# This software is licensed as described in the file LICENSE,
# which you should have received as part of this distribution.
from datetime import datetime
import fix42
import asyncore
class Engine(asyncore.dispatcher):
def __init__(self, sock):
asyncore.dispatcher.__init__(self, sock)
self.channels = []
self.closed = False
@property
def next(self):
return self.store.last[1] + 1
def handle_close(self):
self.closed = True
self.hook('close')
self.close()
def hook(self, hook, data=None):
for fun in self.hooks.get(hook, []):
fun(hook, data)
def handle_read(self):
bits = [self.recv(8192)]
while len(bits[-1]) == 8192:
bits.append(self.recv(8192))
raw = ''.join(bits)
self.hook('recv', raw)
msgs = fix42.parse(raw)
for msg in msgs:
self.process(msg)
def writable(self):
return self.buffer
def handle_write(self):
sent = self.send(self.buffer[0])
if len(self.buffer[0]) == sent:
self.buffer.pop(0)
else:
self.buffer[0] = self.buffer[0][sent:]
def queue(self, msg):
if 'MsgSeqNum' not in msg:
msg['MsgSeqNum'] = self.next
if 'SendingTime' not in msg:
msg['SendingTime'] = datetime.utcnow()
msg['SenderCompID'] = self.parties[0]
msg['TargetCompID'] = self.parties[1]
self.store.save('out', msg)
self.hook('write', msg)
raw = fix42.construct(msg)
self.hook('send', raw)
self.buffer.append(raw)
def resend(self, req):
start, end = req['BeginSeqNo'], req['EndSeqNo']
fill, cur = None, start
for i in range(start, end + 1):
msg = self.store.get('out', i)
if msg is None or msg['MsgType'] in fix42.IGNORE:
fill = i
continue
if fill is not None:
self.queue({
'MsgType': 'Sequence Reset',
'MsgSeqNum': cur,
'GapFillFlag': True,
'NewSeqNo': fill + 1,
})
fill = None
msg.pop('_id')
msg.pop('_rev')
msg['PossDupFlag'] = True
self.queue(msg)
cur += 1
if fill is not None:
self.queue({
'MsgType': 'Sequence Reset',
'MsgSeqNum': cur,
'GapFillFlag': True,
'NewSeqNo': fill + 1,
})
def process(self, msg):
type = 'admin' if msg['MsgType'] in fix42.ADMIN else 'app'
self.hook(type, msg)
if msg['MsgSeqNum'] > self.store.last[0] + 1:
rsp = {'MsgType': 'Resend Request', 'EndSeqNo': 0}
rsp['BeginSeqNo'] = self.store.last[0] + 1
rsp['EndSeqNo'] = msg['MsgSeqNum'] - 1
self.queue(rsp)
self.store.save('in', msg)
if msg['MsgType'] == 'Test Request':
rsp = {'MsgType': 'HeartBeat', 'TestReqID': msg['TestReqID']}
self.queue(rsp)
elif msg['MsgType'] == 'HeartBeat':
self.queue({'MsgType': 'HeartBeat'})
elif msg['MsgType'] == 'Resend Request':
self.resend(msg)
class Initiator(Engine):
def __init__(self, sock, store, parties):
Engine.__init__(self, sock)
self.parties = parties
self.store = store
self.buffer = []
def logon(self, hbi, em, reset=False, login=None):
req = {'MsgType': 'Logon', 'HeartBtInt': hbi, 'EncryptMethod': em}
if reset:
req.update({'ResetSeqNumFlag': True, 'MsgSeqNum': 1})
if login:
req.update({'Username': login[0], 'Password': login[1]})
self.queue(req)
class AcceptorServer(asyncore.dispatcher):
def __init__(self, sock, store):
asyncore.dispatcher.__init__(self, sock)
self.store = store
self.hooks = {}
def handle_accept(self):
client = self.accept()
a = Acceptor(client[0], self.store)
a.hooks = self.hooks
class Acceptor(Engine):
def __init__(self, sock, store):
Engine.__init__(self, sock)
self.store = store
self.buffer = []
self.hooks = {}
self.parties = None
def process(self, msg):
if msg['MsgType'] == 'Logon':
self.parties = msg['TargetCompID'], msg['SenderCompID']
rsp = {
'MsgType': 'Logon',
'HeartBtInt': msg['HeartBtInt'],
'EncryptMethod': msg['EncryptMethod'],
}
if msg.get('ResetSeqNumFlag'):
rsp.update({'ResetSeqNumFlag': True})
self.queue(rsp)
Engine.process(self, msg)
|
Python
| 0
|
@@ -2925,16 +2925,34 @@
er = %5B%5D%0A
+%09%09self.hooks = %7B%7D%0A
%09%0A%09def l
|
111cdf1496074e25b764e042fa0ab1b7b0e2a2b7
|
Add agriculture import to calendar registry
|
pandas_market_calendars/calendar_registry.py
|
pandas_market_calendars/calendar_registry.py
|
from .market_calendar import MarketCalendar
from .exchange_calendar_asx import ASXExchangeCalendar
from .exchange_calendar_bmf import BMFExchangeCalendar
from .exchange_calendar_cfe import CFEExchangeCalendar
from .exchange_calendar_cme import CMEExchangeCalendar
from .exchange_calendar_eurex import EUREXExchangeCalendar
from .exchange_calendar_hkex import HKEXExchangeCalendar
from .exchange_calendar_ice import ICEExchangeCalendar
from .exchange_calendar_jpx import JPXExchangeCalendar
from .exchange_calendar_lse import LSEExchangeCalendar
from .exchange_calendar_nyse import NYSEExchangeCalendar
from .exchange_calendar_ose import OSEExchangeCalendar
from .exchange_calendar_six import SIXExchangeCalendar
from .exchange_calendar_sse import SSEExchangeCalendar
from .exchange_calendar_tsx import TSXExchangeCalendar
def get_calendar(name, open_time=None, close_time=None):
"""
Retrieves an instance of an MarketCalendar whose name is given.
:param name: The name of the MarketCalendar to be retrieved.
:param open_time: Market open time override as datetime.time object. If None then default is used.
:param close_time: Market close time override as datetime.time object. If None then default is used.
:return: MarketCalendar of the desired calendar.
"""
return MarketCalendar.factory(name, open_time=open_time, close_time=close_time)
def get_calendar_names():
"""All Market Calendar names and aliases that can be used in "factory"
:return: list(str)
"""
return MarketCalendar.calendar_names()
|
Python
| 0
|
@@ -249,32 +249,110 @@
xchangeCalendar%0A
+from .exchange_calendar_cme_agriculture import CMEAgricultureExchangeCalendar%0A
from .exchange_c
|
1534c3c47fea71db2cf4f9f224c2e5ff5a8632e2
|
Remove audio file after playing, not while
|
tests/test.py
|
tests/test.py
|
from time import sleep
import sys
import os
sys.path.insert(0, os.path.realpath("../../swood"))
import swood
def find_program(prog):
for path in os.environ["PATH"].split(os.pathsep):
vlc_location = os.path.join(path.strip('"'), prog)
if os.path.isfile(fpath):
return vlc_location, args
return None
def play_audio(clip):
import subprocess
if os.name == "nt":
if os.path.isfile("C:/Program Files (x86)/VideoLAN/VLC/vlc.exe"):
return subprocess.Popen(["C:/Program Files (x86)/VideoLAN/VLC/vlc.exe", clip, "vlc://quit"])
elif find_program("vlc.exe"):
return subprocess.Popen([find_program("vlc.exe"), clip, "vlc://quit"])
elif os.path.isfile("C:/Program Files (x86)/Windows Media Player/wmplayer.exe"):
return subprocess.Popen(["C:/Program Files (x86)/Windows Media Player/wmplayer.exe", clip, "/Play", "/Close"])
elif find_program("wmplayer.exe"):
return subprocess.Popen([find_program("wmplayer.exe"), clip, "/Play", "/Close"])
else:
raise FileNotFoundError("Can't find an audio player.")
running_player = None
def run(midi, *args, play=False):
global running_player
print("~~~~~~~~~~ Testing '{}' ~~~~~~~~~~".format(midi))
out = "outputs/" + midi + ".wav"
swood.run_cmd(["samples/doot.wav", "midis/" + midi + ".mid", out, "--no-pbar", *args])
if play:
if not os.path.isfile(out):
return
if running_player:
os.remove(out)
running_player.wait()
running_player = play_audio(out)
if sys.argv[1] == "playall":
try:
run("beethoven", play=True)
run("dummy", play=True)
run("pitchbend", play=True)
finally:
import glob
for wav in glob.iglob("outputs/*.wav"):
os.remove(wav)
elif sys.argv[1] == "all":
run("beethoven")
run("dummy")
run("pitchbend")
elif sys.argv[1] == "bend":
run("pitchbend")
|
Python
| 0.000001
|
@@ -1515,21 +1515,28 @@
-os.remove(out
+running_player.wait(
)%0A
@@ -1537,32 +1537,42 @@
t()%0A
+os.remove(
running_player.w
@@ -1562,37 +1562,39 @@
(running_player.
-wait(
+args%5B1%5D
)%0A runnin
|
c9c27a51d8c4469f3167d6e5449b31723379241e
|
fix test for PY3
|
tests/test.py
|
tests/test.py
|
from easyprocess import Proc
from nose.tools import eq_, ok_
from unittest import TestCase
import os.path
d = os.path.dirname(__file__)
example1_py = os.path.join(d, 'example1.py')
example2_py = os.path.join(d, 'example2.py')
example3_py = os.path.join(d, 'example3.py')
def test_1_call():
import example1
eq_(example1.f(3), 3)
eq_('description' in example1.f.__doc__, True)
eq_(example1.f.__name__, 'f')
def test_2_call():
import example2
eq_(example2.f(5, 1), 6)
eq_(example2.f.__doc__, None)
eq_(example2.f.__name__, 'f')
def test_3_call():
import example3
eq_(example3.f(), 7)
eq_(example3.f.__doc__, None)
eq_(example3.f.__name__, 'f')
def test_1_cli():
cmd = 'python %s 5' % example1_py
p = Proc(cmd).call()
eq_(p.return_code, 0)
eq_(p.stdout, '')
eq_(p.stderr, '')
cmd = 'python %s 5 --two 7 --debug' % example1_py
p = Proc(cmd).call()
eq_(p.return_code, 0)
eq_(p.stdout, '')
eq_(p.stderr, '')
cmd = 'python %s 5 --three -t 2 --debug' % example1_py
p = Proc(cmd).call()
eq_(p.return_code, 0)
eq_(p.stdout, '')
eq_(p.stderr, '')
cmd = 'python %s 5 -t x' % example1_py
p = Proc(cmd).call()
eq_(p.return_code > 0, 1)
eq_(p.stdout, '')
eq_(p.stderr != '', 1)
cmd = 'python %s -t 1 5 --debug' % example1_py
p = Proc(cmd).call()
eq_(p.return_code, 0)
eq_(p.stdout, '')
eq_(p.stderr, '')
def test_2_cli():
cmd = 'python %s 5 2' % example2_py
p = Proc(cmd).call()
eq_(p.return_code, 0)
eq_(p.stdout, '')
eq_(p.stderr, '')
cmd = 'python %s --debug 5 2' % example2_py
p = Proc(cmd).call()
eq_(p.return_code, 0)
eq_(p.stdout, '')
ok_('root - DEBUG - 5' in p.stderr)
def test_3_cli():
cmd = 'python %s ' % example3_py
p = Proc(cmd).call()
eq_(p.return_code, 0)
eq_(p.stdout, '')
eq_(p.stderr, '')
def test_1_ver():
cmd = 'python %s --version' % example1_py
p = Proc(cmd).call()
eq_(p.stdout, '')
eq_(p.stderr, '3.2')
eq_(p.return_code, 0)
def test_2_ver():
cmd = 'python %s --version' % example2_py
p = Proc(cmd).call()
eq_(p.stdout, '')
eq_(p.stderr, '1.2')
eq_(p.return_code, 0)
def test_3_ver():
cmd = 'python %s --version' % example3_py
p = Proc(cmd).call()
eq_(p.stdout, '')
ok_(p.stderr)
ok_(p.return_code!=0)
def test_1_help():
cmd = 'python %s --help' % example1_py
p = Proc(cmd).call()
eq_(p.stderr, '')
eq_(p.return_code, 0)
eq_('one' in p.stdout, 1)
eq_('--two' in p.stdout, 1)
eq_('-t' in p.stdout, 1)
eq_('--three' in p.stdout, 1)
def test_2_help():
cmd = 'python %s --help' % example2_py
p = Proc(cmd).call()
eq_(p.stderr, '')
eq_(p.return_code, 0)
def test_3_help():
cmd = 'python %s --help' % example3_py
p = Proc(cmd).call()
eq_(p.stderr, '')
eq_(p.return_code, 0)
|
Python
| 0.000001
|
@@ -98,16 +98,27 @@
os.path
+%0Aimport sys
%0A%0Ad = os
@@ -277,16 +277,47 @@
3.py')%0A%0A
+PY3 = sys.version_info%5B0%5D %3E= 3%0A
%0Adef tes
@@ -2035,32 +2035,113 @@
roc(cmd).call()%0A
+ if PY3:%0A eq_(p.stderr, '')%0A eq_(p.stdout, '3.2')%0A else:%0A
eq_(p.stdout
@@ -2138,32 +2138,36 @@
_(p.stdout, '')%0A
+
eq_(p.stderr
@@ -2283,32 +2283,113 @@
roc(cmd).call()%0A
+ if PY3:%0A eq_(p.stderr, '')%0A eq_(p.stdout, '1.2')%0A else:%0A
eq_(p.stdout
@@ -2386,32 +2386,36 @@
_(p.stdout, '')%0A
+
eq_(p.stderr
|
788d20dc049d0f14760e3ff1012d8b3e0edc041a
|
Use six.reraise
|
flubber/tasks.py
|
flubber/tasks.py
|
#
# This file is part of flubber. See the NOTICE for more information.
#
import flubber
from flubber.event import Event
from flubber._tasklet import tasklet, get_current, TaskletExit
__all__ = ['Task', 'TaskExit', 'spawn', 'sleep', 'task']
def sleep(seconds=0):
"""Yield control to another eligible coroutine until at least *seconds* have
elapsed.
*seconds* may be specified as an integer, or a float if fractional seconds
are desired.
"""
loop = flubber.current.loop
current = get_current()
assert loop.tasklet is not current
timer = loop.call_later(seconds, current.switch)
try:
loop.switch()
finally:
timer.cancel()
def spawn(func, *args, **kwargs):
"""Create a task to run ``func(*args, **kwargs)``. Returns a
:class:`Task` objec.
Execution control returns immediately to the caller; the created task
is merely scheduled to be run at the next available opportunity.
Use :func:`spawn_later` to arrange for tasks to be spawned
after a finite delay.
"""
t = Task(target=func, args=args, kwargs=kwargs)
t.start()
return t
def task(func):
"""Decorator to run the decorated function as a Task
"""
def task_wrapper(*args, **kwargs):
return spawn(func, *args, **kwargs)
return task_wrapper
TaskExit = TaskletExit
# Helper to generate new thread names
_counter = 0
def _newname(template="Task-%d"):
global _counter
_counter = _counter + 1
return template % _counter
class Task(tasklet):
def __init__(self, target=None, name=None, args=(), kwargs={}):
super(Task, self).__init__(parent=flubber.current.loop.tasklet)
self._name = str(name or _newname())
self._target = target
self._args = args
self._kwargs = kwargs
self._started = False
self._exit_event = Event()
def start(self):
if self._started:
raise RuntimeError('tasks can only be started once')
self._started = True
flubber.current.loop.call_soon(self.switch)
def run_(self):
if self._target:
self._target(*self._args, **self._kwargs)
def join(self, timeout=None):
"""Wait for this Task to end. If a timeout is given, after the time expires the function
will return anyway."""
return self._exit_event.wait(timeout)
def kill(self, *throw_args):
"""Terminates the current task by raising an exception into it.
Whatever that task might be doing; be it waiting for I/O or another
primitive, it sees an exception as soon as it yields control.
By default, this exception is TaskExit, but a specific exception
may be specified. *throw_args* should be the same as the arguments to
raise; either an exception instance or an exc_info tuple.
"""
if self.dead:
return
if not self:
# task hasn't started yet and therefore throw won't work
def just_raise(*a, **kw):
if throw_args:
raise throw_args[0], throw_args[1], throw_args[2]
else:
raise TaskExit()
self.run_ = just_raise
return
flubber.current.loop.call_soon(self.throw, *throw_args)
def __repr__(self):
status = "initial"
if self._started:
status = "started"
if self.dead:
status = "dead"
if self._exit_event.is_set():
status = "ended"
return "<%s(%s, %s)>" % (self.__class__.__name__, self._name, status)
@property
def name(self):
return self._name
# internal
def switch(self, *args, **kwargs):
current = flubber.current.task
if current is not flubber.current.loop.tasklet:
raise RuntimeError('only the loop tasklet can switch to a Task')
return super(Task, self).switch(*args, **kwargs)
def throw(self, *args):
current = flubber.current.task
if current is not flubber.current.loop.tasklet:
raise RuntimeError('only the loop tasklet can throw to a Task')
return super(Task, self).throw(*args)
def run(self):
try:
self.run_()
finally:
del self._target, self._args, self._kwargs
self._exit_event.set()
|
Python
| 0.000132
|
@@ -83,16 +83,40 @@
lubber%0A%0A
+from flubber import six%0A
from flu
@@ -3098,22 +3098,28 @@
+six.re
raise
-
+(
throw_ar
@@ -3153,16 +3153,17 @@
_args%5B2%5D
+)
%0A
|
aba93ed6b72fabcad1aeead10702dae40d72ac1c
|
Add docstring to reporting info
|
file_transfer/datamover/transporters.py
|
file_transfer/datamover/transporters.py
|
import os
from datetime import datetime
from .connectors import FTPConnector, LocalConnector
from .s3enram import S3EnramHandler
class Porter:
def __init__(self):
""""""
self.transferred = []
self.stalled = []
def transfer(self):
raise 'Not implemented'
def log_transfer(self, succes, filename, verbose=True):
"""store the filename in stalled or transferred report list"""
if succes:
self.transferred.append(filename)
if verbose:
print("{} is succesfully transferred "
"to S3 bucket".format(filename))
else:
self.stalled.append(filename)
if verbose:
print("{} is not transferred to S3 bucket".format(filename))
def report(self, reset_file=False, transfertype="Baltrad to S3"):
"""report about the transferred and stalled files"""
if reset_file:
file_handler = "w"
else:
file_handler = "a"
with open('./log_file_transfer', file_handler) as outfile:
outfile.write("-" * 55 + "\n")
outfile.write("Data transfer at {} from {}:\n".format(
datetime.now().strftime("%Y-%m-%d %H:%M"), transfertype))
outfile.write("-" * 55 + "\n")
outfile.write("\n")
outfile.write("Files not transferred:\n")
outfile.write("\n".join(self.stalled))
outfile.write("\n\n")
outfile.write("Files succesfully transferred:\n")
outfile.write("\n".join(self.transferred))
outfile.write("\n\n\n")
class BaltradToS3(Porter):
def __init__(self, ftp_url, ftp_username, ftp_pwd, bucket_name):
"""Port files from Baltrad server to S3
:param ftp_url: url of the FTP
:param ftp_username: username of the FTP
:param ftp_pwd: password of the FTP username
:param bucket_name: name of the S3 bucket
:type bucket_name: string
"""
Porter.__init__(self)
self.ftp = FTPConnector(ftp_url, ftp_username, ftp_pwd)
self.s3 = S3EnramHandler(bucket_name)
def transfer(self, name_match="_vp_", overwrite=False,
limit=None, verbose=False):
"""Transfer all current Baltrad files to s3 with the given name_match
included
:param name_match: string that should be contained in the file name,
default _vp_ (bird profile data)
:param overwrite: If True, overwrite the existing file on the bucket
:type overwrite: bool
:param limit: for debugging/testing purposes only, limit the total
number of transfers
:param verbose: Make transfer description more extended
"""
for j, filename in enumerate(self.ftp.list_files(
name_match=name_match)):
# get the files from ftp:
with open(filename, 'bw') as f:
self.ftp._ftp.retrbinary('RETR ' + filename, f.write)
upload_succes = self.s3.upload_enram_file(filename,
overwrite=overwrite)
self.log_transfer(upload_succes, filename, verbose)
os.remove(filename)
if isinstance(limit, int) and j >= limit-1:
break
class LocalToS3(Porter):
def __init__(self, bucket_name, filepath):
"""Port files from local file system to S3
:param bucket_name: name of the S3 bucket
:type bucket_name: string
:param filepath: main project directory to write files to
"""
Porter.__init__(self)
self.local = LocalConnector(filepath)
self.s3 = S3EnramHandler(bucket_name)
def transfer(self, name_match="_vp_", overwrite=False,
limit=None, verbose=False):
"""transfer all profiles in folder to s3
:param name_match: string that should be contained in the file name,
default _vp_ (bird profile data)
:param overwrite: If True, overwrite the existing file on the bucket
:type overwrite: bool
:param limit: for debugging/testing purposes only, limit the total
number of transfers
:param verbose: Make transfer description more extended
"""
for j, filepath in enumerate(
self.local.list_files(name_match, paths=True)):
upload_succes = self.s3.upload_file_enram(filepath,
overwrite=overwrite)
self.log_transfer(upload_succes, os.path.split(filepath)[-1],
verbose)
if isinstance(limit, int) and j >= limit-1:
break
|
Python
| 0
|
@@ -909,16 +909,282 @@
ed files
+%0A%0A :param reset_file: if True, a new file is created and an existing%0A log file is deleted; if False, text appends%0A :param transfertype: Additional text to define the transfer type, %0A provided in the header of the transfer section%0A
%22%22%22%0A
|
fda1544249585a9a65136af24f92b976a19938d7
|
Fix bug when not using translation
|
cbpos/translator.py
|
cbpos/translator.py
|
import cbpos
import gettext
cbpos.config.set_default('locale', 'use', True)
cbpos.config.set_default('locale', 'languages', list())
cbpos.config.set_default('locale', 'fallback', True)
cbpos.config.set_default('locale', 'codeset', '')
class TranslatorBuilder(object):
"""
Helper class to create a GlobalTranslator and install it in the
cbpos namespace with the gettext translators for the respective
modules.
"""
def __init__(self, localedir=None, languages=None, class_=None, fallback=None, codeset=None):
self.tr = GlobalTranslator()
self.localedir = localedir
self.languages = languages
self.class_ = class_
self.fallback = fallback
self.codeset = codeset
def add(self, module_name, domain=None, localedir=None, languages=None, class_=None, fallback=True, codeset=None):
"""
Create and regitser a module translator to the global translator
given the appropriate parameters for the gettext translator.
"""
module_tr = ModuleTranslator(domain=domain if domain is not None else self.default_domain(module_name),
localedir=localedir if localedir is not None else self.localedir,
languages=languages if languages is not None else self.languages,
class_=class_ if class_ is not None else self.class_,
fallback=fallback if fallback is not None else self.fallback,
codeset=codeset if codeset is not None else self.codeset)
self.register(module_name, module_tr)
def add_main(self):
"""
Create and register a default translator to the global translator
using the default parameters passed on creation of the class.
"""
main_tr = ModuleTranslator(domain="coinbox_main",
localedir=self.localedir,
languages=self.languages,
class_=self.class_,
fallback=self.fallback,
codeset=self.codeset)
self.register_main(main_tr)
def install(self):
"""
Install the global translator object to the main package as "cbpos.tr"
"""
self.add_main()
cbpos.tr = self.tr
def default_domain(self, module_name):
"""
Returns the default gettext domain name for modules
"""
return 'coinbox_mod_%s' % (module_name,)
def register(self, module_name, translator):
"""
Adds a translator entry to the global translator for a module under "tr.{mod_name}_"
"""
setattr(self.tr, module_name + '_', translator)
def register_main(self, translator):
"""
Adds a translator entry to the global translator for the main translator under "tr._"
"""
setattr(self.tr, '_', translator)
class DummyTranslatorBuilder(TranslatorBuilder):
"""
A dummy translator builder which generates a global translator that only returns the same input.
"""
dummy_gettext = lambda message: message
def add(self, module_name, domain=None, localedir=None, languages=None, class_=None, fallback=True, codeset=None):
self.register(module_name, self.dummy_gettext)
def add_main(self):
self.register_main(self.dummy_gettext)
class GlobalTranslator(object):
pass
class ModuleTranslator(object):
"""
A proxy to the appropriate gettext function call for translation.
"""
def __init__(self, domain, localedir=None, languages=None, class_=None, fallback=None, codeset=None):
self.translation = gettext.translation(domain, localedir, languages, class_, fallback, codeset)
def __call__(self, message):
return self.translation.ugettext(message)
|
Python
| 0
|
@@ -3231,16 +3231,20 @@
%22%22%22%0A
+def
dummy_ge
@@ -3252,26 +3252,39 @@
text
- = lambda message:
+(self, message):%0A return
mes
|
f2350b4be2e88f282e7a49cafebb7e8e7c37efd9
|
Bump version
|
YaDiskClient/__init__.py
|
YaDiskClient/__init__.py
|
"""
Client for Yandex.Disk.
"""
__version__ = '0.5.1'
|
Python
| 0
|
@@ -44,11 +44,11 @@
= '
-0.5
+1.0
.1'%0A
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.