commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
|---|---|---|---|---|---|---|---|
0d609e1639ca9bf4a08e6cf765e17ad5d6e36bda
|
Update path in ProvisioningTest.py
|
resource/csdk/security/provisioning/sample/provisioningTest.py
|
resource/csdk/security/provisioning/sample/provisioningTest.py
|
#!/usr/bin/python -W
import subprocess
import os
import time
import sys
import shutil
import platform
import argparse
import textwrap
# Note: to see the return value of a process
# - in Windows type: echo %errorlevel%
# - in Linux type: echo $?
# Resets the state of sampleserver_justworks and autoprovisioningclient so that
# state from one test is not present during subsequent tests
def cleanup(iotivity_base_path, exe_path):
# Copy fresh oic_svr_db_server_justworks.dat
dat_file_src = os.path.join(iotivity_base_path, 'resource', 'csdk', 'security', 'provisioning', 'sample', 'oic_svr_db_server_justworks.dat' )
dat_file_dest = exe_path
shutil.copy(dat_file_src, dat_file_dest)
# Copy fresh oic_svr_db_client.dat
dat_file_src = os.path.join(iotivity_base_path, 'resource', 'examples', 'oic_svr_db_client.dat' )
dat_file_dest = exe_path
shutil.copy(dat_file_src, dat_file_dest)
#Delete provisioning DB file
db_file = os.path.join(exe_path, 'oic_autoprvn_mng.db')
if os.path.isfile(db_file):
os.unlink(db_file)
# Print the environment variables (useful for debugging)
def print_environment():
for key in os.environ.keys():
print("%30s %s \n" % (key,os.environ[key]))
print('current PATH is %s' % sys.path)
### main ###
# Number of unit tests in autoprovisioningclient
NUM_TESTS = 7
usage = '''
Run end-to-end certificate tests between autoprovisioningclient and sampleserver_justworks
Usage Notes
- script assumes it's being run from the root of iotivity, e.g.:
t:\\iotivity\\resource\\csdk\\security\\provisioning\\sample\\provisioningTest.py
- I have added
t:\\iotivity\\out\\windows\\amd64\\debug\\resource\\csdk
t:\\iotivity\\out\\windows\\amd64\\debug
to my PATH
- The discovery timeout in autoprovisioning client may be a bit agressive, 3 seconds
'''
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description=usage
)
parser.add_argument('--arch', nargs='?', choices = ['amd64', 'x86', 'arm'], help= 'Architecture, one of x86, amd64 or arm. Defaults to amd64.', default='amd64')
parser.add_argument('--build', nargs='?', choices = ['debug', 'release'], help= 'Build type, one of debug or release. Defaults to debug.', default='debug')
parser.add_argument('--onetest', nargs='?', help= 'Run a single test, specified by number (1, ..., ' + str(NUM_TESTS) + '). By default all tests are run.')
args = parser.parse_args()
iotivity_base_path = os.getcwd()
os_name = platform.system()
if os_name == 'Windows':
os_name = 'windows'
elif os_name == 'Linux':
os_name = 'linux'
exe_path = os.path.join(iotivity_base_path, 'out', os_name, args.arch, args.build, 'resource', 'csdk', 'security', 'provisioning', 'sample')
# Set PATH so octbstack.dll is found
cwd = os.getcwd()
sys.path.append(os.path.join(cwd, exe_path))
sys.path.append(os.path.join(cwd, 'out', os_name, args.arch, args.build))
# Work in the output dir with the test binaries
os.chdir(exe_path)
output_text = ""
num_failures = 0
test_range = range(1, NUM_TESTS + 1) #default to running all tests
if args.onetest:
try:
if int(args.onetest) > NUM_TESTS or int(args.onetest) < 1:
print('Argument to --onetest out of range')
sys.exit(-1)
test_range = range(int(args.onetest), int(args.onetest) + 1)
except ValueError:
print('invalid argument to --onetest')
sys.exit(-1)
for i in test_range:
print('\nRunning test %d...\n' % i)
# Clear state from previous test
cleanup(iotivity_base_path, exe_path)
# Start the device/server with a non-blocking call
#note: Popen can take file descriptors to redirect the processes stdin, stdout, stderr
try:
server_process = subprocess.Popen('sampleserver_justworks')
except:
print('Failed to start sampleserver_justworks: %s', sys.exc_info()[0])
sys.exit(-1)
# Run the auto provisioning client with the test number as argument, block until it returns
return_code = subprocess.call(["autoprovisioningclient", str(i)])
if return_code != 0:
num_failures += 1
print("Test %d failed" % i)
output_text += "Test %d failed\n" % i
else:
print("Test %d passed" % i)
server_process.kill()
print("\n------------------------------------")
print(" Test Results: %d of %d tests passed" % (len(test_range) - num_failures, len(test_range)))
print("------------------------------------")
print(output_text)
print('\n')
|
Python
| 0
|
@@ -1690,32 +1690,39 @@
%5C%5Cout%5C%5Cwindows%5C%5C
+win32%5C%5C
amd64%5C%5Cdebug%5C%5Cre
@@ -1770,16 +1770,23 @@
indows%5C%5C
+win32%5C%5C
amd64%5C%5Cd
@@ -2607,28 +2607,33 @@
ws':%0A os_
-name
+directory
= 'windows'
@@ -2631,16 +2631,22 @@
'windows
+%5Cwin32
'%0Aelif o
@@ -2671,20 +2671,25 @@
%0A os_
-name
+directory
= 'linu
@@ -2738,36 +2738,41 @@
path, 'out', os_
-name
+directory
, args.arch, arg
@@ -2983,20 +2983,25 @@
ut', os_
-name
+directory
, args.a
|
e34d437fb9ede1c5a547bbabe99978207e2a389b
|
Make paths manipulation stuff private
|
sugar/env.py
|
sugar/env.py
|
import os
import sys
import pwd
try:
from sugar.__uninstalled__ import *
except ImportError:
from sugar.__installed__ import *
import sugar.setup
def add_to_python_path(path):
sys.path.insert(0, path)
if os.environ.has_key('PYTHONPATH'):
old_path = os.environ['PYTHONPATH']
os.environ['PYTHONPATH'] = path + ':' + old_path
else:
os.environ['PYTHONPATH'] = path
def add_to_bin_path(path):
if os.environ.has_key('PATH'):
old_path = os.environ['PATH']
os.environ['PATH'] = path + ':' + old_path
else:
os.environ['PATH'] = path
def setup():
for path in sugar_python_path:
add_to_python_path(path)
for path in sugar_bin_path:
add_to_bin_path(path)
if sugar_source_dir:
source = os.path.join(sugar_source_dir, 'activities')
runner = os.path.join(sugar_source_dir, 'shell/sugar-activity-factory')
sugar.setup.setup_activities(source, sugar_activities_dir, runner)
bin = os.path.join(sugar_source_dir, 'shell/sugar-presence-service')
sugar.setup.write_service('org.laptop.Presence', bin,
sugar_activities_dir)
def get_profile_path():
if os.environ.has_key('SUGAR_PROFILE'):
profile_id = os.environ['SUGAR_PROFILE']
else:
profile_id = 'default'
path = os.path.expanduser('~/.sugar')
return os.path.join(path, profile_id)
def get_data_dir():
return sugar_data_dir
def get_activities_dir():
return sugar_activities_dir
def get_dbus_config():
return sugar_dbus_config
|
Python
| 0
|
@@ -152,34 +152,50 @@
def
-add_to_python_path(
+setup():%0A%09for path in sugar_python_
path
-)
:%0A
+%09
%09sys
@@ -216,16 +216,17 @@
, path)%0A
+%09
%09if os.e
@@ -247,32 +247,33 @@
('PYTHONPATH'):%0A
+%09
%09%09old_path = os.
@@ -288,32 +288,33 @@
'PYTHONPATH'%5D%0A%09%09
+%09
os.environ%5B'PYTH
@@ -347,23 +347,25 @@
d_path %0A
+%09
%09else:%0A
+%09
%09%09os.env
@@ -395,18 +395,26 @@
th%0A%0A
-def add_to
+%09for path in sugar
_bin
@@ -422,16 +422,11 @@
path
-(path)
:%0A
+%09
%09if
@@ -453,16 +453,17 @@
PATH'):%0A
+%09
%09%09old_pa
@@ -488,16 +488,17 @@
ATH'%5D%0A%09%09
+%09
os.envir
@@ -532,23 +532,25 @@
ld_path%0A
+%09
%09else:%0A
+%09
%09%09os.env
@@ -574,135 +574,8 @@
th%0A%0A
-def setup():%0A%09for path in sugar_python_path:%0A%09%09add_to_python_path(path)%0A%0A%09for path in sugar_bin_path:%0A%09%09add_to_bin_path(path)%0A%0A
%09if
|
0e2d9b496ab12d512e56041d9f4ffbadf7fab4ab
|
Remove unused method
|
sugar/env.py
|
sugar/env.py
|
# Copyright (C) 2006, Red Hat, Inc.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
import os
import sys
import pwd
try:
from sugar.__uninstalled__ import *
except ImportError:
from sugar.__installed__ import *
def get_profile_path():
if os.environ.has_key('SUGAR_PROFILE'):
profile_id = os.environ['SUGAR_PROFILE']
else:
profile_id = 'default'
path = os.path.join(os.path.expanduser('~/.sugar'), profile_id)
if not os.path.isdir(path):
try:
os.makedirs(path)
except OSError, exc:
print "Could not create user directory."
return path
def get_data_dir():
return sugar_data_dir
def get_activities_dir():
return sugar_activities_dir
def get_activity_info_dir():
return sugar_activity_info_dir
def get_services_dir():
return sugar_services_dir
def get_dbus_config():
return sugar_dbus_config
def get_shell_bin_dir():
return sugar_shell_bin_dir
# http://standards.freedesktop.org/basedir-spec/basedir-spec-0.6.html
def get_data_dirs():
if os.environ.has_key('XDG_DATA_DIRS'):
return os.environ['XDG_DATA_DIRS'].split(':')
else:
return [ '/usr/local/share/', '/usr/share/' ]
_dbus_version = None
def get_dbus_version():
global _dbus_version
if _dbus_version == None:
f = os.popen('dbus-daemon --version')
version_line = f.readline()
if version_line:
splitted_line = version_line.split()
_dbus_version = splitted_line[len(splitted_line) - 1]
f.close()
return _dbus_version
|
Python
| 0.000006
|
@@ -1789,319 +1789,4 @@
' %5D%0A
-%0A_dbus_version = None%0Adef get_dbus_version():%0A%09global _dbus_version%0A%09if _dbus_version == None:%0A%09%09f = os.popen('dbus-daemon --version')%0A%09%09version_line = f.readline()%0A%09%09if version_line:%0A%09%09%09splitted_line = version_line.split()%0A%09%09%09_dbus_version = splitted_line%5Blen(splitted_line) - 1%5D%0A%09%09f.close()%0A%09return _dbus_version%0A
|
40afa5ccedd4181a01a7ffc85c39c11a1561e7b6
|
Drop memo comment
|
devito/core/gpu_openacc.py
|
devito/core/gpu_openacc.py
|
from functools import partial
import cgen as c
from devito.core.gpu_openmp import (DeviceOpenMPNoopOperator, DeviceOpenMPIteration,
DeviceOmpizer, DeviceOpenMPDataManager)
from devito.exceptions import InvalidOperator
from devito.logger import warning
from devito.passes.iet import optimize_halospots, mpiize, hoist_prodders
from devito.tools import as_tuple, timed_pass
__all__ = ['DeviceOpenACCNoopOperator', 'DeviceOpenACCOperator',
'DeviceOpenACCCustomOperator']
# TODO: currently inhereting from the OpenMP Operators. Ideally, we should/could
# abstract things away so as to have a separate, language-agnostic superclass
class DeviceOpenACCIteration(DeviceOpenMPIteration):
@classmethod
def _make_construct(cls, **kwargs):
return 'acc parallel loop'
class DeviceAccizer(DeviceOmpizer):
lang = {
'atomic': c.Pragma('acc atomic update'),
'map-enter-to': lambda i, j:
c.Pragma('acc enter data copyin(%s%s)' % (i, j)),
# 'map-enter-alloc': lambda i, j:
# c.Pragma('omp target enter data map(alloc: %s%s)' % (i, j)),
'map-update': lambda i, j:
c.Pragma('acc exit data copyout(%s%s)' % (i, j)),
'map-release': lambda i, j:
c.Pragma('acc exit data delete(%s%s)' % (i, j)),
'map-exit-delete': lambda i, j:
c.Pragma('acc exit data delete(%s%s)' % (i, j)),
}
_Iteration = DeviceOpenACCIteration
def _make_parallel(self, iet):
iet, metadata = super(DeviceAccizer, self)._make_parallel(iet)
metadata['includes'] = ['openacc.h']
return iet, metadata
class DeviceOpenACCDataManager(DeviceOpenMPDataManager):
_Parallelizer = DeviceAccizer
class DeviceOpenACCNoopOperator(DeviceOpenMPNoopOperator):
@classmethod
@timed_pass(name='specializing.IET')
def _specialize_iet(cls, graph, **kwargs):
options = kwargs['options']
# Distributed-memory parallelism
if options['mpi']:
mpiize(graph, mode=options['mpi'])
# GPU parallelism via OpenACC offloading
DeviceAccizer().make_parallel(graph)
# Symbol definitions
data_manager = DeviceOpenACCDataManager()
data_manager.place_ondevice(graph, efuncs=list(graph.efuncs.values()))
data_manager.place_definitions(graph)
data_manager.place_casts(graph)
return graph
class DeviceOpenACCOperator(DeviceOpenACCNoopOperator):
@classmethod
@timed_pass(name='specializing.IET')
def _specialize_iet(cls, graph, **kwargs):
options = kwargs['options']
# Distributed-memory parallelism
optimize_halospots(graph)
if options['mpi']:
mpiize(graph, mode=options['mpi'])
# GPU parallelism via OpenACC offloading
DeviceAccizer().make_parallel(graph)
# Misc optimizations
hoist_prodders(graph)
# Symbol definitions
data_manager = DeviceOpenACCDataManager()
data_manager.place_ondevice(graph, efuncs=list(graph.efuncs.values()))
data_manager.place_definitions(graph)
data_manager.place_casts(graph)
return graph
class DeviceOpenACCCustomOperator(DeviceOpenACCOperator):
_known_passes = ('optcomms', 'openacc', 'mpi', 'prodders')
_known_passes_disabled = ('blocking', 'openmp', 'denormals', 'wrapping', 'simd')
assert not (set(_known_passes) & set(_known_passes_disabled))
@classmethod
def _make_passes_mapper(cls, **kwargs):
options = kwargs['options']
accizer = DeviceAccizer()
return {
'optcomms': partial(optimize_halospots),
'openacc': partial(accizer.make_parallel),
'mpi': partial(mpiize, mode=options['mpi']),
'prodders': partial(hoist_prodders)
}
@classmethod
def _build(cls, expressions, **kwargs):
# Sanity check
passes = as_tuple(kwargs['mode'])
for i in passes:
if i not in cls._known_passes:
if i in cls._known_passes_disabled:
warning("Got explicit pass `%s`, but it's unsupported on an "
"Operator of type `%s`" % (i, str(cls)))
else:
raise InvalidOperator("Unknown pass `%s`" % i)
return super(DeviceOpenACCCustomOperator, cls)._build(expressions, **kwargs)
@classmethod
@timed_pass(name='specializing.IET')
def _specialize_iet(cls, graph, **kwargs):
options = kwargs['options']
passes = as_tuple(kwargs['mode'])
# Fetch passes to be called
passes_mapper = cls._make_passes_mapper(**kwargs)
# Call passes
for i in passes:
try:
passes_mapper[i](graph)
except KeyError:
pass
# Force-call `mpi` if requested via global option
if 'mpi' not in passes and options['mpi']:
passes_mapper['mpi'](graph)
# GPU parallelism via OpenACC offloading
if 'openacc' not in passes:
passes_mapper['openacc'](graph)
# Symbol definitions
data_manager = DeviceOpenACCDataManager()
data_manager.place_ondevice(graph, efuncs=list(graph.efuncs.values()))
data_manager.place_definitions(graph)
data_manager.place_casts(graph)
return graph
|
Python
| 0.000001
|
@@ -1023,124 +1023,8 @@
)),%0A
- # 'map-enter-alloc': lambda i, j:%0A # c.Pragma('omp target enter data map(alloc: %25s%25s)' %25 (i, j)),%0A
|
29b0f18a3b7dcc6e0123889c1c845d7511677c96
|
fix indentation
|
squad/run.py
|
squad/run.py
|
import os
import sys
from squad.version import __version__
from squad.manage import main as manage
__usage__ = """usage: squad [OPTIONS]
Options:
-f, --fast Fast startup: skip potentially slow operations, such as
running database migrations and compiling static assets
-h, --help show this help message and exit
-v, --version show program's version number and exit
ALL other options are passed as-is to gunicorn. See gunicorn(1),
gunicorn3(1), or http://docs.gunicorn.org/ for details.
gunicorn options:\
"""
def usage():
print(__usage__)
sys.stdout.flush()
os.system(r'%s -m gunicorn.app.wsgiapp --help | sed -e "/^\S/d"' % sys.executable)
def main():
argv = sys.argv
fast = False
for f in ['--fast', '-f']:
if f in argv:
argv.remove(f)
fast = True
if '--help' in argv or '-h' in argv:
usage()
return
if '--version' in argv or '-v' in argv:
print('squad (version %s)' % __version__)
return
os.environ.setdefault("ENV", "production")
if not fast:
sys.argv = ['squad-admin', 'migrate']
manage()
sys.argv = ['squad-admin', 'collectstatic', '--no-input', '-v', '0']
manage()
argv = [sys.executable, '-m', 'gunicorn.app.wsgiapp', 'squad.wsgi'] + argv[1:]
os.execv(sys.executable, argv)
if __name__ == "__main__":
main()
|
Python
| 0.000096
|
@@ -143,16 +143,18 @@
tions:%0A%0A
+
-f, --
@@ -167,18 +167,16 @@
-
Fast sta
@@ -303,16 +303,18 @@
assets%0A
+
-h, --
@@ -323,26 +323,24 @@
lp
-
show this he
@@ -359,16 +359,18 @@
nd exit%0A
+
-v, --
@@ -379,26 +379,24 @@
rsion
-
show program
@@ -423,16 +423,18 @@
d exit%0A%0A
+
ALL ot
@@ -492,16 +492,18 @@
orn(1),%0A
+
gunico
|
5fe859230c5a07825e27c38c0a16de4ff9e18456
|
Return the decorated function.
|
kazoo/recipe/watchers.py
|
kazoo/recipe/watchers.py
|
"""Child and Data watching higher level API's
"""
import logging
from kazoo.client import KazooState
log = logging.getLogger(__name__)
class DataWatch(object):
"""Watches a node for data updates and calls the specified
function each time it changes
The function will also be called the very first time its
registered to get the data.
Returning `False` from the registered function will disable
future data change calls.
Example with client:
.. code-block:: python
@client.DataWatch('/path/to/watch')
def my_func(data, stat):
print "Data is %s" % data
print "Version is %s" % stat.version
# Above function is called immediately and prints
"""
def __init__(self, client, path, func=None,
allow_session_lost=True):
"""Create a children watcher for a path
:param client: A zookeeper client
:type client: :class:`~kazoo.client.KazooClient`
:param path: The path to watch for children on
:type path: str
:param func: Function to call initially and every time the
children change. `func` will be called with a
tuple, the value of the node and a
:class:`~kazoo.client.ZnodeStat` instance
:type func: callable
:param allow_session_lost: Whether the watch should be
re-registered if the zookeeper
session is lost.
:type allow_session_lost: bool
The path must already exist for the children watcher to
run.
"""
self._client = client
self._path = path
self._func = func
self._stopped = False
self._watch_established = False
self._allow_session_lost = allow_session_lost
self._run_lock = client._handler.lock_object()
self._prior_data = ()
# Register our session listener if we're going to resume
# across session losses
if func:
if allow_session_lost:
self._client.add_listener(self._session_watcher)
self._get_data()
def __call__(self, func):
"""Callable version for use as a decorator
:param func: Function to call initially and every time the
children change. `func` will be called with a
tuple, the value of the node and a
:class:`~kazoo.client.ZnodeStat` instance
:type func: callable
"""
self._func = func
if self._allow_session_lost:
self._client.add_listener(self._session_watcher)
self._get_data()
def _get_data(self):
with self._run_lock: # Ensure this runs one at a time
if self._stopped:
return
data, stat = self._client.retry(self._client.get,
self._path, self._watcher)
if not self._watch_established:
self._watch_established = True
# If we already had data, and it hasn't changed, this is a
# session re-establishment and nothing changed, don't call the
# func
if self._prior_data and \
self._prior_data[1].mzxid == stat.mzxid:
return
self._prior_data = data, stat
try:
if self._func(data, stat) is False:
self._stopped = True
except Exception as exc:
log.exception(exc)
raise
def _watcher(self, event):
self._get_data()
def _session_watcher(self, state):
if state == KazooState.LOST:
self._watch_established = False
elif state == KazooState.CONNECTED and \
not self._watch_established and not self._stopped:
self._get_data()
class ChildrenWatch(object):
"""Watches a node for children updates and calls the specified
function each time it changes
The function will also be called the very first time its
registered to get children.
Returning `False` from the registered function will disable
future children change calls.
Example with client:
.. code-block:: python
@client.ChildrenWatch('/path/to/watch')
def my_func(children):
print "Children are %s" % children
# Above function is called immediately and prints children
"""
def __init__(self, client, path, func=None,
allow_session_lost=True):
"""Create a children watcher for a path
:param client: A zookeeper client
:type client: :class:`~kazoo.client.KazooClient`
:param path: The path to watch for children on
:type path: str
:param func: Function to call initially and every time the
children change. `func` will be called with a
single argument, the list of children.
:type func: callable
:param allow_session_lost: Whether the watch should be
re-registered if the zookeeper
session is lost.
:type allow_session_lost: bool
The path must already exist for the children watcher to
run.
"""
self._client = client
self._path = path
self._func = func
self._stopped = False
self._watch_established = False
self._allow_session_lost = allow_session_lost
self._run_lock = client._handler.lock_object()
self._prior_children = None
# Register our session listener if we're going to resume
# across session losses
if func:
if allow_session_lost:
self._client.add_listener(self._session_watcher)
self._get_children()
def __call__(self, func):
"""Callable version for use as a decorator
:param func: Function to call initially and every time the
children change. `func` will be called with a
single argument, the list of children.
:type func: callable
"""
self._func = func
if self._allow_session_lost:
self._client.add_listener(self._session_watcher)
self._get_children()
def _get_children(self):
with self._run_lock: # Ensure this runs one at a time
if self._stopped:
return
children = self._client.retry(self._client.get_children,
self._path, self._watcher)
if not self._watch_established:
self._watch_established = True
if self._prior_children is not None and \
self._prior_children == children:
return
self._prior_children = children
try:
if self._func(children) is False:
self._stopped = True
except Exception as exc:
log.exception(exc)
raise
def _watcher(self, event):
self._get_children()
def _session_watcher(self, state):
if state == KazooState.LOST:
self._watch_established = False
elif state == KazooState.CONNECTED and \
not self._watch_established and not self._stopped:
self._get_children()
|
Python
| 0.001501
|
@@ -2696,32 +2696,52 @@
self._get_data()
+%0A return func
%0A%0A def _get_d
@@ -6411,32 +6411,52 @@
._get_children()
+%0A return func
%0A%0A def _get_c
|
0e0096e664997ffa935273ba66b46a1e943a685a
|
add json support to dump_lol
|
python/tools/dump_lol.py
|
python/tools/dump_lol.py
|
#!/usr/bin/python
import argparse
from l20n.format.lol.parser import Parser
import pyast
def read_file(filename, charset='utf-8', errors='strict'):
with open(filename, 'rb') as f:
return f.read().decode(charset, errors)
def dump_lol(path):
source = read_file(path)
p = Parser()
lol = p.parse(source)
print(pyast.dump(lol))
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Dump LOL\'s AST.',
prog="dump_lol")
parser.add_argument('path', type=str,
help='path to lol file')
args = parser.parse_args()
dump_lol(args.path)
|
Python
| 0.000001
|
@@ -83,16 +83,42 @@
rt pyast
+.dump.raw, pyast.dump.json
%0A%0Adef re
@@ -272,16 +272,19 @@
lol(path
+, t
):%0A s
@@ -358,19 +358,102 @@
-print(pyast
+if t == 'raw':%0A print(pyast.dump.raw.dump(lol))%0A else:%0A print(pyast.dump.json
.dum
@@ -682,16 +682,227 @@
file')%0A
+ parser.add_argument('--type', '-t',%0A type=str,%0A choices=('json', 'raw'),%0A default='raw',%0A help='path to lol file')%0A
args
@@ -942,14 +942,25 @@
ol(args.path
+, args.type
)%0A
|
9e7137c241684d450e8ec62fc365fd21bd20b38d
|
Fix gunicorn socket path
|
docker/usr/local/etc/gunicorn/pixel.py
|
docker/usr/local/etc/gunicorn/pixel.py
|
# Gunicorn-django settings
bind = ['unix:/app/run/gunicorn.sock']
graceful_timeout = 90
loglevel = 'error'
name = 'pixel'
python_path = '/app/pixel'
timeout = 90
workers = 3
|
Python
| 0.000001
|
@@ -39,16 +39,22 @@
ix:/app/
+pixel/
run/guni
|
2934b9d8de31c65fcc19bceacd16070856ca51b7
|
Remove unused import of mock.
|
analog/tests/test_formats.py
|
analog/tests/test_formats.py
|
"""Test the analog.formats module."""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import datetime
try:
from unittest import mock
except ImportError:
import mock
import pytest
from analog.exceptions import InvalidFormatExpressionError
from analog.formats import LogFormat, NGINX
def test_predefined_valid_nginx():
"""The predefined ``NGINX`` ``LogFormat`` is valid."""
# NGINX is a LogFormat instance and registered as such
assert isinstance(NGINX, LogFormat)
assert 'nginx' in LogFormat.all_formats()
# all required match groups are available
match_groups = NGINX.pattern.groupindex.keys()
for required in LogFormat._required_attributes:
assert required in match_groups
# timestamp conversion is working
now = datetime.datetime.now().replace(microsecond=0)
now_str = now.strftime(NGINX.time_format)
now_parsed = datetime.datetime.strptime(now_str, NGINX.time_format)
assert now == now_parsed
# try matching a log entry
log_line = ('123.123.123.123 - test_client [16/Jan/2014:13:30:30 +0000] '
'"POST /auth/token HTTP/1.1" 200 174 "-" '
'"OAuthClient 0.2.3" "-" 0.633 0.633')
match = NGINX.pattern.search(log_line)
log_entry = NGINX.entry(match)
# all entry attributes are correctly populated
assert log_entry.remote_addr == '123.123.123.123'
assert log_entry.remote_user == 'test_client'
assert log_entry.timestamp == '16/Jan/2014:13:30:30 +0000'
assert log_entry.verb == 'POST'
assert log_entry.path == '/auth/token'
assert log_entry.status == '200'
assert log_entry.body_bytes_sent == '174'
assert log_entry.http_referer == '-'
assert log_entry.http_user_agent == 'OAuthClient 0.2.3'
assert log_entry.http_x_forwarded_for == '-'
assert log_entry.request_time == '0.633'
assert log_entry.upstream_response_time == '0.633'
def test_custom_logformat_missing_groups():
"""Custom ``LogFormat`` patterns must include all required match groups."""
pattern_regex = r'(?P<some_group>.*)'
time_format = '%d/%b/%Y:%H:%M:%S +0000'
with pytest.raises(InvalidFormatExpressionError) as exc:
LogFormat('invalid', pattern_regex, time_format)
assert ('analog.exceptions.InvalidFormatExpressionError: '
'Format pattern must at least define the groups: {0}'.format(
', '.join(LogFormat._required_attributes))) in str(exc)
def test_custom_logformat_invalid_regex():
"""Custom ``LogFormat`` patterns must be valid regular expressions."""
pattern_regex = r'(?P<incomplete)'
time_format = '%d/%b/%Y:%H:%M:%S +0000'
with pytest.raises(InvalidFormatExpressionError) as exc:
LogFormat('invalid', pattern_regex, time_format)
assert 'Invalid regex in format.' in str(exc)
|
Python
| 0
|
@@ -159,79 +159,8 @@
time
-%0Atry:%0A from unittest import mock%0Aexcept ImportError:%0A import mock
%0A%0Aim
|
d288a9c2433a3771e163700b44c51124da3ec338
|
Fix date value from last modified to created.
|
post.py
|
post.py
|
# -*- coding: utf-8 -*-
import codecs
import re
import subprocess
import markdown
from jinja2 import Markup
class Post:
_pattern = r"\[\[(.*)\|(.*)\]\]|\[\[(.*)\]\]"
def __init__(self, post_id, working_dir=None):
self.post_id = post_id
self.title = post_id.replace('-', ' ')
filename = u"{0}.md".format(post_id)
path = u"{0}/{1}.md".format(working_dir, post_id)
f = codecs.open(path, 'r', "utf-8")
self.content = f.read()
f.close()
self.read_categories()
self.replace_links()
self.author, self.date = self.read_author_date(filename, working_dir)
# markdown to html
self.content_markup = Markup(markdown.markdown(self.content))
def read_author_date(self, filename, working_dir):
cmd = u"git log --date=iso -1 {0}".format(filename)
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, cwd=working_dir, shell=True)
p.wait()
if p.returncode == 0:
for match in re.finditer(r"\nAuthor:[\s]*(.*?)[\s]*<[^/]*>\nDate:[\s]*(.*?)\n", p.stdout.read(), re.U):
return match.group(1), match.group(2)
return "", ""
# TODO fix regex workaround.
def read_categories(self):
def sub(match):
if match.group(1):
return match.group(1)
else:
return match.group(3)
lines = self.content.splitlines()
if len(lines) >= 2:
candidate = lines.pop()
hr = lines.pop()
hr = "".join(hr.split())
# markdown horizontal rules (gollum log convention: category separator)
if hr.startswith(u'---') or hr.startswith(u'***') or hr.startswith(u'___'):
tags = re.sub(Post._pattern, sub, candidate, 0, re.U)
self.tags = tags.split(',')
self.content = "\n".join(lines)
else:
self.tags = []
def replace_links(self):
def sub(match):
if match.group(1):
return u"[{1}]({0}/{2})".format(u"/posts", match.group(1), match.group(2))
else:
return u"[{1}]({0}/{2})".format(u"/posts", match.group(3), match.group(3))
self.content = re.sub(Post._pattern, sub, self.content, 0, re.U)
|
Python
| 0
|
@@ -811,16 +811,32 @@
git log
+--diff-filter=A
--date=i
|
343973ded2d15bbb9a0c2385db1e9fbd9bda2363
|
Fix return value of send() on Python 2
|
pexpect/popen_spawn.py
|
pexpect/popen_spawn.py
|
"""Spawn interface using subprocess.Popen
"""
import os
import threading
import subprocess
import sys
import time
import signal
import shlex
try:
from queue import Queue, Empty # Python 3
except ImportError:
from Queue import Queue, Empty # Python 2
from .spawnbase import SpawnBase, PY3
from .exceptions import EOF
class PopenSpawn(SpawnBase):
if PY3:
crlf = '\n'.encode('ascii')
else:
crlf = '\n'
def __init__(self, cmd, timeout=30, maxread=2000, searchwindowsize=None,
logfile=None, cwd=None, env=None, encoding=None,
codec_errors='strict'):
super(PopenSpawn, self).__init__(timeout=timeout, maxread=maxread,
searchwindowsize=searchwindowsize, logfile=logfile,
encoding=encoding, codec_errors=codec_errors)
kwargs = dict(bufsize=0, stdin=subprocess.PIPE,
stderr=subprocess.STDOUT, stdout=subprocess.PIPE,
cwd=cwd, env=env)
if sys.platform == 'win32':
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
kwargs['startupinfo'] = startupinfo
kwargs['creationflags'] = subprocess.CREATE_NEW_PROCESS_GROUP
if not isinstance(cmd, (list, tuple)):
cmd = shlex.split(cmd)
self.proc = subprocess.Popen(cmd, **kwargs)
self.closed = False
self._buf = ''
self._read_queue = Queue()
self._read_thread = threading.Thread(target=self._read_incoming)
self._read_thread.setDaemon(True)
self._read_thread.start()
def read_nonblocking(self, size, timeout):
if self.closed:
raise ValueError('I/O operation on closed file.')
elif self.flag_eof:
self.closed = True
raise EOF('End Of File (EOF).')
if timeout == -1:
timeout = self.timeout
elif timeout is None:
timeout = 1e6
t0 = time.time()
buf = self.string_type()
while (time.time() - t0) < timeout and size and len(buf) < size:
try:
incoming = self._read_queue.get_nowait()
except Empty:
break
else:
if incoming is None:
self.flag_eof = True
raise EOF('End of File')
buf += self._decoder.decode(incoming, final=False)
if len(buf) > size:
self.buffer = buf[size:]
buf = buf[:size]
self._log(buf, 'read')
return buf
def _read_incoming(self):
"""Run in a thread to move output from a pipe to a queue."""
fileno = self.proc.stdout.fileno()
while 1:
buf = b''
try:
buf = os.read(fileno, 1024)
except OSError as e:
self._log(e, 'read')
if not buf:
self._read_queue.put(None)
return
self._read_queue.put(buf)
time.sleep(0.001)
def write(self, s):
'''This is similar to send() except that there is no return value.
'''
self.send(s)
def writelines(self, sequence):
'''This calls write() for each element in the sequence.
The sequence can be any iterable object producing strings, typically a
list of strings. This does not add line separators. There is no return
value.
'''
for s in sequence:
self.send(s)
def _send(self, s):
return self.proc.stdin.write(s)
def send(self, s):
s = self._coerce_send_string(s)
self._log(s, 'send')
return self._send(s)
def sendline(self, s=''):
'''Wraps send(), sending string ``s`` to child process, with os.linesep
automatically appended. Returns number of bytes written. '''
n = self.send(s)
return n + self.send(self.linesep)
def wait(self):
status = self.proc.wait()
if status >= 0:
self.exitstatus = status
self.signalstatus = None
else:
self.exitstatus = None
self.signalstatus = -status
self.terminated = True
return status
def kill(self, sig):
if sys.platform == 'win32':
if sig in [signal.SIGINT, signal.CTRL_C_EVENT]:
sig = signal.CTRL_C_EVENT
elif sig in [signal.SIGBREAK, signal.CTRL_BREAK_EVENT]:
sig = signal.CTRL_BREAK_EVENT
else:
sig = signal.SIGTERM
os.kill(self.proc.pid, sig)
def sendeof(self):
self.proc.stdin.close()
|
Python
| 0.000941
|
@@ -3556,17 +3556,16 @@
def
-_
send(sel
@@ -3563,32 +3563,171 @@
send(self, s):%0A
+ s = self._coerce_send_string(s)%0A self._log(s, 'send')%0A%0A b = self._encoder.encode(s, final=False)%0A if PY3:%0A
return s
@@ -3751,83 +3751,186 @@
ite(
-s)%0A%0A def send(self, s):%0A s = self._coerce_send_string(s)%0A
+b)%0A else:%0A # On Python 2, .write() returns None, so we return the length of%0A # bytes written ourselves. This assumes they all got written.%0A
self
@@ -3925,38 +3925,47 @@
+
self.
-_log(s, 'send')%0A%0A
+proc.stdin.write(b)%0A
@@ -3975,20 +3975,13 @@
urn
-self._send(s
+len(b
)%0A%0A
|
8bbead8f6414a41579086a97a6cb4ac52fab2a39
|
fix a typo in a table config
|
twirl.py
|
twirl.py
|
#!/usr/bin/env python
# Tai Sakuma <sakuma@cern.ch>
import os, sys
import argparse
import ROOT
import AlphaTwirl
import Framework
import Scribbler
ROOT.gROOT.SetBatch(1)
##__________________________________________________________________||
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--input", help = "the path to the input file")
parser.add_argument("-p", "--process", default = 1, type = int, help = "number of processes to run in parallel")
parser.add_argument('-o', '--outdir', default = os.path.join('tbl', 'out'))
parser.add_argument('-q', '--quiet', action = 'store_true', default = False, help = 'quiet mode')
parser.add_argument('-n', '--nevents', default = -1, type = int, help = 'maximum number of events to process for each component')
parser.add_argument('--max-events-per-process', default = -1, type = int, help = 'maximum number of events per process')
parser.add_argument('--force', action = 'store_true', default = False, help = 'recreate all output files')
args = parser.parse_args()
##__________________________________________________________________||
def main():
reader_collector_pairs = [ ]
#
# configure scribblers
#
NullCollector = AlphaTwirl.Loop.NullCollector
reader_collector_pairs.extend([
(Scribbler.EventAuxiliary(), NullCollector()),
(Scribbler.MET(), NullCollector()),
(Scribbler.GenParticle(), NullCollector()),
(Scribbler.HFPreRecHit(), NullCollector()),
# (Scribbler.Scratch(), NullCollector()),
])
#
# configure tables
#
Binning = AlphaTwirl.Binning.Binning
Echo = AlphaTwirl.Binning.Echo
Round = AlphaTwirl.Binning.Round
RoundLog = AlphaTwirl.Binning.RoundLog
Combine = AlphaTwirl.Binning.Combine
echo = Echo(nextFunc = None)
tblcfg = [
dict(keyAttrNames = ('run', ), binnings = (echo, )),
dict(keyAttrNames = ('lumi', ), binnings = (echo, )),
dict(keyAttrNames = ('eventId', ), binnings = (echo, )),
dict(keyAttrNames = ('pfMet', ), binnings = (Round(10, 0), )),
dict(keyAttrNames = ('genParticle_pdgId', ), keyIndices = ('*', ), binnings = (echo, ), keyOutColumnNames = ('gen_pdg', )),
dict(keyAttrNames = ('genParticle_eta', ), keyIndices = ('*', ), binnings = (Round(0.1, 0), ), keyOutColumnNames = ('gen_eta', )),
dict(keyAttrNames = ('genParticle_pdgId', 'genParticle_eta'), keyIndices = ('*', '\\0'), binnings = (echo, Round(0.1, 0)), keyOutColumnNames = ('gen_pdg', 'gen_eta')),
dict(keyAttrNames = ('genParticle_phi', ), keyIndices = ('*', ), binnings = (Round(0.1, 0), ), keyOutColumnNames = ('gen_phi', )),
dict(keyAttrNames = ('genParticle_energy', ), keyIndices = ('*', ), binnings = (Round(0.1, 0), ), keyOutColumnNames = ('gen_energy', )),
dict(
keyAttrNames = ('hfrechit_ieta', 'hfrechit_iphi', 'hfrechit_QIE10_index'),
keyIndices = ('(*)', '\\1', '\\1'),
binnings = (echo, echo, echo),
valAttrNames = ('hfrechit_QIE10_energy', ),
valIndices = ('\\1', ),
keyOutColumnNames = ('ieta', 'iphi', 'idxQIE10'),
valOutColumnNames = ('energy', ),
summaryClass = AlphaTwirl.Summary.Sum,
outFile = True,
),
]
# complete table configs
tableConfigCompleter = AlphaTwirl.Configure.TableConfigCompleter(
defaultSummaryClass = AlphaTwirl.Summary.Count,
defaultOutDir = args.outdir
)
tblcfg = [tableConfigCompleter.complete(c) for c in tblcfg]
# do not recreate tables that already exist unless the force option is used
if not args.force:
tblcfg = [c for c in tblcfg if c['outFile'] and not os.path.exists(c['outFilePath'])]
reader_collector_pairs.extend(
[AlphaTwirl.Configure.build_counter_collector_pair(c) for c in tblcfg]
)
#
# configure data sets
#
dataset = Framework.Dataset('root3', [args.input])
#
# run
#
fw = Framework.Framework(
quiet = args.quiet,
process = args.process,
max_events_per_dataset = args.nevents,
max_events_per_process = args.max_events_per_process
)
fw.run(
dataset = dataset,
reader_collector_pairs = reader_collector_pairs
)
##__________________________________________________________________||
if __name__ == '__main__':
main()
|
Python
| 0.00129
|
@@ -2448,17 +2448,19 @@
ces = ('
-*
+(*)
', '%5C%5C0'
|
9f45f8e59d08b1276be7bdc3d4c88cf616ad581a
|
add fix for header without GN or PE in sprot
|
src/sprot.py
|
src/sprot.py
|
#!/usr/bin/env python
import sys
from src.annotation import Annotation
#this functions takes an ipr file and returns a list of annotations. 2 types of annotations are retrieved based on the following keys: "name" and "product"
def read_sprot(blast_file, gff_file, fasta_file):
#retrieve relevant information from files
fasta_info = get_fasta_info(fasta_file)
gff_info = get_gff_info(gff_file)
blast_info = get_blast_info(blast_file)
sprot_list = []
for mrna, dbxref in blast_info.items(): #blast_info maps mrna's to dbxrefs
if dbxref not in fasta_info: #these two if's shouldn't occur but just in case...
print(mrna+" has dbxref "+dbxref+" that's not in the fasta. Skipping...")
continue
if mrna not in gff_info:
print( mrna+" not in gff. Skipping...")
continue
#fasta_info maps dbxrefs to products and names
product = fasta_info[dbxref][0]
gene_name = fasta_info[dbxref][1]
#gff_info maps mrna's to the parent gene id's
gene_id = gff_info[mrna]
#add annotations to annotation list
sprot_list.append(Annotation(gene_id, "name", gene_name))
sprot_list.append(Annotation(mrna, "product", product))
return sprot_list
#this function reads a fasta file and returns a dictionary mapping dbxrefs to the 2-tuple (product, gene name)
def get_fasta_info(fasta_file):
dbxrefs = {}
for line in fasta_file:
if line[0] == '>': #if we have a header line
words = line.split(" ") #we break the line by spaces to get "words"
ref = words[0][1:] #we are assuming the first "word" is the dbxref with no spaces. the "[1:]" is to get rid of the starting '>'
i=0
#loop through the words till we find "OS=" (which we assume exists). We are assuming all the words between here and the ref is the product.
while words[i].find("OS=") == -1:
i += 1
product = " ".join(words[1:i])
#loop through the words till we find "GN=" or "PE=". We are assuming "PE=" comes immediately after "GN=" so if we hit "PE=" first, then the gene name doesn't exist. We also assume the gene name is one word
while (words[i].find("GN=") == -1 and words[i].find("PE=") == -1) or (i+1 < len(words)):
i += 1
if not words[i].find("GN=") == -1: #if gene name exists
name = words[i][3:] #the "[3:]" is to get rid of the "GN=" in the beginning
else: #if gene name doesn't exist, use part of the dbxref for the name
name = ref.split("|")[2].split("_")[0]
#add to dictionary
dbxrefs[ref] = (product,name)
return dbxrefs
#this function reads a blast file and returns a dictionary mapping mrna's to dbxrefs. This function assumes the file is tab-separated and has mrna in the 0th and dbxref in the 1st column
def get_blast_info(blast_file):
mrna_dbxrefs = {}
for line in blast_file:
columns = line.split("\t")
mrna = columns[0]
ref = columns[1]
if mrna not in mrna_dbxrefs:
mrna_dbxrefs[mrna] = ref
return mrna_dbxrefs
#this function reads a gff file and returns a dictionary mapping mrna id's to its parent gene id
def get_gff_info(gff_file):
mrna_genes = {}
for i, line in enumerate(gff_file):
columns = line.split("\t")
if len(columns)>1 and columns[2] == "mRNA": #if this is an "mRNA row"
mrna_id = ""
parent_gene = ""
for attribute in columns[8].strip().split(";"):
split = attribute.split("=")
key, val = split[0], split[1]
if key == "ID":
mrna_id = val
elif key == "Parent":
parent_gene = val
if not mrna_id or not parent_gene:
print("Failed to get mRNA info at line "+str(i)+" of GFF because it is missing the ID and/or Parent attributes")
continue
mrna_genes[mrna_id] = parent_gene
return mrna_genes
|
Python
| 0
|
@@ -2323,18 +2323,19 @@
== -1)
-or
+and
(i+1 %3C
|
b829a4b8e53dc84703e03aba662b21cf1faa0a29
|
Update annual_emissions.py
|
cea/plots/optimization/annual_emissions.py
|
cea/plots/optimization/annual_emissions.py
|
from __future__ import division
from __future__ import print_function
import plotly.graph_objs as go
import cea.plots.optimization
from cea.plots.variable_naming import NAMING, COLOR
__author__ = "Daren Thomas"
__copyright__ = "Copyright 2019, Architecture and Building Systems - ETH Zurich"
__credits__ = ["Jimeno A. Fonseca", "Daren Thomas"]
__license__ = "MIT"
__version__ = "0.1"
__maintainer__ = "Daren Thomas"
__email__ = "cea@arch.ethz.ch"
__status__ = "Production"
class AnnualEmissionsPlot(cea.plots.optimization.GenerationPlotBase):
"""Implement the "CAPEX vs. OPEX of centralized system in generation X" plot"""
name = "Annual emissions"
expected_parameters = {
'generation': 'plots-optimization:generation',
'scenario-name': 'general:scenario-name',
}
def __init__(self, project, parameters, cache):
super(AnnualEmissionsPlot, self).__init__(project, parameters, cache)
self.analysis_fields = ["GHG_sys_connected_tonCO2",
"GHG_sys_disconnected_tonCO2",
]
self.input_files = [(self.locator.get_optimization_generation_total_performance, [self.generation])]
@property
def title(self):
return "Annual emissions for generation #%s" % self.generation
@property
def output_path(self):
return self.locator.get_timeseries_plots_file(
'gen{generation}_annualized_costs'.format(generation=self.generation),
self.category_name)
@property
def layout(self):
return go.Layout(barmode='relative',
yaxis=dict(title='Annual emissions [ton CO2-eq/year]'))
def calc_graph(self):
self.multi_criteria = False # TODO: add capabilities to plot muticriteria in this plot too
data = self.process_generation_total_performance()
graph = []
for field in self.analysis_fields:
y = data[field].values
flag_for_unused_technologies = all(v == 0 for v in y)
if not flag_for_unused_technologies:
trace = go.Bar(x=data['individual_name'], y=y, name=NAMING[field],
marker=dict(color=COLOR[field]))
graph.append(trace)
return graph
def main():
"""Test this plot"""
import cea.config
import cea.plots.cache
config = cea.config.Configuration()
cache = cea.plots.cache.NullPlotCache()
locator = cea.inputlocator.InputLocator(config.scenario)
# cache = cea.plots.cache.PlotCache(config.project)
AnnualEmissionsPlot(config.project,
{'buildings': None,
'scenario-name': config.scenario_name,
'generation': config.plots_optimization.generation},
cache).plot(auto_open=True)
if __name__ == '__main__':
main()
|
Python
| 0
|
@@ -1856,16 +1856,23 @@
formance
+_pareto
()%0A
|
4d1a33462e73111f2507c4fd1e990af2952ad3df
|
Fix serializer tests
|
demo/tests/serializers/tests_validations.py
|
demo/tests/serializers/tests_validations.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.test import TestCase
from formidable.models import Formidable
from formidable.serializers.validation import (
MinLengthSerializer, RegexpSerializer,
ValidationSerializer
)
class ValidationSerializerTest(TestCase):
def setUp(self):
super(ValidationSerializerTest, self).setUp()
self.form = Formidable.objects.create(
label='test', description='test'
)
self.text = self.form.fields.create(
type_id='text', slug='input-text', label='name',
)
def test_int_value(self):
data = {'field_id': self.text.id, 'value': 5, 'type': 'minlength'}
serializer = MinLengthSerializer(data=data)
self.assertTrue(serializer.is_valid())
def test_non_int_value(self):
data = {'field_id': self.text.id, 'value': 'test', 'type': 'minlength'}
serializer = MinLengthSerializer(data=data)
self.assertFalse(serializer.is_valid())
def test_regexp_value(self):
data = {
'field_id': self.text.id, 'value': '\w+ly', 'type': 'minlength'
}
serializer = RegexpSerializer(data=data)
self.assertTrue(serializer.is_valid())
def test_invalid_regexp_value(self):
data = {
'field_id': self.text.id, 'value': '\w+ly(', 'type': 'minlength'
}
serializer = RegexpSerializer(data=data)
self.assertFalse(serializer.is_valid())
def test_update_validations(self):
list_serializer = ValidationSerializer(many=True)
self.text.validations.create(
value='5', type='minlength'
)
list_serializer.update(
self.text.validations,
[{'type': 'minlength', 'value': '12'}],
self.text
)
self.assertEquals(self.text.validations.count(), 1)
validation = self.text.validations.first()
self.assertEquals(validation.value, '12')
|
Python
| 0.000003
|
@@ -295,16 +295,34 @@
stCase):
+%0A increment = 0
%0A%0A de
@@ -340,62 +340,8 @@
f):%0A
- super(ValidationSerializerTest, self).setUp()%0A
@@ -451,20 +451,54 @@
self.
-text
+increment += 1%0A self.text_field
= self.
@@ -544,16 +544,28 @@
='text',
+%0A
slug='i
@@ -573,18 +573,56 @@
put-text
-',
+-%7B%7D'.format(self.increment),%0A
label='
@@ -628,16 +628,37 @@
'name',%0A
+ order=1,%0A
@@ -698,32 +698,45 @@
data = %7B
+%0A
'field_id': self
@@ -744,44 +744,84 @@
text
-.id, 'value': 5, 'type': 'minlength'
+_field.id,%0A 'type': 'MINLENGTH',%0A 'value': 5,%0A
%7D%0A
@@ -968,16 +968,29 @@
data = %7B
+%0A
'field_i
@@ -1006,49 +1006,89 @@
text
-.id, 'value': 'test', 'type': 'minlength'
+_field.id,%0A 'type': 'MINLENGTH',%0A 'value': 'test',%0A
%7D%0A
@@ -1273,50 +1273,78 @@
text
-.id, 'value': '%5Cw+ly', 'type': 'minlength'
+_field.id,%0A 'type': 'REGEXP',%0A 'value': '%5Cw+ly',
%0A
@@ -1542,51 +1542,79 @@
text
-.id, 'value': '%5Cw+ly(', 'type': 'minlength'
+_field.id,%0A 'type': 'REGEXP',%0A 'value': '%5Cw+ly(',
%0A
@@ -1824,32 +1824,38 @@
self.text
+_field
.validations.cre
@@ -1875,35 +1875,48 @@
-value='5', type='minlength'
+type='MINLENGTH',%0A value='5',
%0A
@@ -1967,32 +1967,38 @@
self.text
+_field
.validations,%0A
@@ -2011,30 +2011,92 @@
-%5B%7B'type': 'minlength',
+self.text_field,%0A %5B%7B%0A 'type': 'MINLENGTH',%0A
'va
@@ -2109,11 +2109,8 @@
'12'
-%7D%5D,
%0A
@@ -2118,25 +2118,19 @@
-self.text
+%7D%5D,
%0A
@@ -2159,32 +2159,38 @@
Equals(self.text
+_field
.validations.cou
@@ -2228,16 +2228,22 @@
elf.text
+_field
.validat
|
74a9cfe1206e3314890af165e5c8193c687844a0
|
Add files via upload
|
post.py
|
post.py
|
# -*- coding: utf-8 -*-
"""
Created on Mon Sep 4 16:23:07 2017
@author: mojod
"""
import random
import tweepy
consumer_key='L3MsyCOoqgSPc4jzZV8wero0d'
consumer_secret='ZCOI3x1f8GZ9c2cJ8kPYyyBW4gRX4MJBbyHijGE1UObnAow6ka'
access_token='3789452353-dmM75KVaDGqIPz6ZtzP8b5Q6VkvzQQo9Sn34ZOZ'
access_token_secret='JvYlzlqM3AHj7IXSMMoIgS0A8auqzI1KyMjKnJeT3gn8w'
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth)
f = open("speech.txt")
sentence = f.read().split('.')
status = random.choice(sentence) + "."
print(status)
api.update_status(status)
f.close
|
Python
| 0
|
@@ -89,9 +89,8 @@
%0D%0A%0D%0A
-%09
%0D%0Aim
@@ -116,16 +116,29 @@
tweepy%0D
+%0Aimport time%0D
%0A%0D%0Aconsu
@@ -551,16 +551,33 @@
.txt%22)%0D%0A
+while True:%0D%0A
sentence
@@ -600,16 +600,20 @@
t('.')%0D%0A
+
status =
@@ -648,27 +648,12 @@
.%22%0D%0A
-%0D%0Aprint(status)%0D%0A%0D%0A
+
api.
@@ -679,11 +679,26 @@
s)%0D%0A
-f.close
+ time.sleep(3600)%0D%0A
|
f922671cf3f29ea55ac9077fd3579da5a7504f25
|
Add typecheck to SigmodCrossEntropy
|
chainer/functions/sigmoid_cross_entropy.py
|
chainer/functions/sigmoid_cross_entropy.py
|
import numpy
from chainer import cuda
from chainer import function
from chainer.functions import sigmoid
class SigmoidCrossEntropy(function.Function):
"""Sigmoid activation followed by a sigmoid cross entropy loss."""
def __init__(self, use_cudnn=True):
self.use_cudnn = use_cudnn
def forward_cpu(self, inputs):
x, t = inputs
assert x.shape == t.shape
self.y, = sigmoid.Sigmoid().forward_cpu((x,))
# stable computation of the cross entropy.
loss = -numpy.sum(
x * (t - (x >= 0)) - numpy.log1p(numpy.exp(-numpy.abs(x))))
return numpy.array(loss / t.shape[0], dtype=numpy.float32),
def forward_gpu(self, inputs):
x, t = inputs
self.y, = sigmoid.Sigmoid(self.use_cudnn).forward_gpu((x,))
loss = -cuda.reduce(
'int* t, float* x',
'x[i] * (t[i] - (x[i] >= 0)) - log1pf(expf(-fabsf(x[i])))',
'a+b', '0', 'sigmoid_crossent_fwd', numpy.float32)(t, x)
return loss / t.shape[0],
def backward_cpu(self, inputs, grad_outputs):
t, gloss = inputs[1], grad_outputs[0]
gx = gloss * (self.y - t) / t.shape[0]
return gx, None
def backward_gpu(self, inputs, grad_outputs):
t, gloss = inputs[1], grad_outputs[0]
gx = cuda.empty_like(self.y)
coeff = gloss / t.shape[0]
cuda.elementwise(
'float* gx, const float* y, const int* t, const float* coeff',
'gx[i] = *coeff * (y[i] - t[i])',
'sigmoid_crossent_bwd')(gx, self.y, t, coeff)
return gx, None
def sigmoid_cross_entropy(x, t, use_cudnn=True):
"""Computes cross entropy loss for sigmoid activations.
Args:
x (Variable): A variable object holding a matrix whose (i, j)-th
element indicates the unnormalized log probability of the j-th unit
at the i-th example.
t (Variable): A variable object holding an int32 vector of groundtruth
binary labels.
Returns:
Variable: A variable object holding a scalar array of the cross entropy
loss.
.. note::
This function is differentiable only by ``x``.
"""
return SigmoidCrossEntropy(use_cudnn)(x, t)
|
Python
| 0
|
@@ -99,16 +99,53 @@
sigmoid%0A
+from chainer.utils import type_check%0A
%0A%0Aclass
@@ -333,16 +333,306 @@
_cudnn%0A%0A
+ def check_type_forward(self, in_types):%0A type_check.expect(in_types.size() == 2)%0A%0A x_type, t_type = in_types%0A type_check.expect(%0A x_type.dtype == numpy.float32,%0A t_type.dtype == numpy.int32,%0A x_type.shape == t_type.shape%0A )%0A%0A
def
@@ -684,42 +684,8 @@
uts%0A
- assert x.shape == t.shape%0A
|
85d2c012bfaeeb04fa8dd31cd05a04a8dc43c14e
|
Add tests that have and get of nonterms raise exceptions
|
tests/grammar_term-nonterm_test/NonterminalsInvalidTest.py
|
tests/grammar_term-nonterm_test/NonterminalsInvalidTest.py
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy.RawGrammar import RawGrammar
class NonterminalsInvalidTest(TestCase):
pass
if __name__ == '__main__':
main()
|
Python
| 0
|
@@ -195,68 +195,1579 @@
mmar
-%0D%0A%0D%0A%0D%0Aclass NonterminalsInvalidTest(TestCase):%0D%0A pass%0D%0A%0D%0A
+ as Grammar%0D%0Afrom grammpy import Nonterminal%0D%0Afrom grammpy.exceptions import NotNonterminalException%0D%0A%0D%0A%0D%0Aclass TempClass(Nonterminal):%0D%0A pass%0D%0A%0D%0A%0D%0Aclass NonterminalsInvalidTest(TestCase):%0D%0A def test_invalidAddNumber(self):%0D%0A gr = Grammar()%0D%0A with self.assertRaises(NotNonterminalException):%0D%0A gr.add_nonterm(0)%0D%0A%0D%0A def test_invalidAddString(self):%0D%0A gr = Grammar()%0D%0A with self.assertRaises(NotNonterminalException):%0D%0A gr.add_nonterm(%22string%22)%0D%0A%0D%0A def test_invalidAddAfterCorrectAdd(self):%0D%0A gr = Grammar()%0D%0A gr.add_nonterm(TempClass)%0D%0A with self.assertRaises(NotNonterminalException):%0D%0A gr.add_nonterm(%22asdf%22)%0D%0A%0D%0A def test_invalidAddInArray(self):%0D%0A gr = Grammar()%0D%0A with self.assertRaises(NotNonterminalException):%0D%0A gr.add_nonterm(%5BTempClass, %22asdf%22%5D)%0D%0A%0D%0A def test_invalidHaveNumber(self):%0D%0A gr = Grammar()%0D%0A with self.assertRaises(NotNonterminalException):%0D%0A gr.have_nonterm(0)%0D%0A%0D%0A def test_invalidHaveString(self):%0D%0A gr = Grammar()%0D%0A with self.assertRaises(NotNonterminalException):%0D%0A gr.have_nonterm(%22string%22)%0D%0A%0D%0A def test_invalidHaveAfterCorrectAdd(self):%0D%0A gr = Grammar()%0D%0A gr.add_nonterm(TempClass)%0D%0A with self.assertRaises(NotNonterminalException):%0D%0A gr.have_nonterm(%22asdf%22)%0D%0A%0D%0A def test_invalidHaveInArray(self):%0D%0A gr = Grammar()%0D%0A with self.assertRaises(NotNonterminalException):%0D%0A gr.have_nonterm(%5BTempClass, %22asdf%22%5D)
%0D%0A%0D%0A
|
25737b0d0389d0ccbd12d01f9076a889891f0a22
|
Update XENIFACE and XENVIF
|
manifestspecific.py
|
manifestspecific.py
|
# Copyright (c) Citrix Systems Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms,
# with or without modification, are permitted provided
# that the following conditions are met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the
# following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the
# following disclaimer in the documentation and/or other
# materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
# CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
build_tar_source_files = {
"xenbus" : "http://xenbus-build.uk.xensource.com:8080/job/XENBUS.git/13/artifact/xenbus.tar",
"xenvif" : "http://xenvif-build.uk.xensource.com:8080/job/XENVIF.git/16/artifact/xenvif.tar",
"xennet" : "http://xennet-build.uk.xensource.com:8080/job/XENNET.git/7/artifact/xennet.tar",
"xeniface" : "http://xeniface-build.uk.xensource.com:8080/job/XENIFACE.git/5/artifact/xeniface.tar",
"xenvbd" : "http://xenvbd-build.uk.xensource.com:8080/job/XENVBD.git/8/artifact/xenvbd.tar",
"xenguestagent" : "http://xeniface-build.uk.xensource.com:8080/job/guest%20agent.git/33/artifact/xenguestagent.tar",
"xenvss" : "http://xenvbd-build.uk.xensource.com:8080/job/XENVSS.git/6/artifact/xenvss.tar",
}
all_drivers_signed = False
|
Python
| 0
|
@@ -1621,17 +1621,17 @@
IF.git/1
-6
+8
/artifac
@@ -1835,9 +1835,9 @@
git/
-5
+7
/art
|
b6a09c80d349adc91e2a05de8864b75bcb4b71dc
|
Put whqled xenvif #56 into trunk
|
manifestspecific.py
|
manifestspecific.py
|
# Copyright (c) Citrix Systems Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms,
# with or without modification, are permitted provided
# that the following conditions are met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the
# following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the
# following disclaimer in the documentation and/or other
# materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
# CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
secureserver = r'\\10.80.13.10\distfiles\distfiles\WindowsBuilds'
localserver = r'\\camos.uk.xensource.com\build\windowsbuilds\WindowsBuilds'
build_tar_source_files = {
"xenbus" : r'standard-lcm\10\xenbus-7-2-0-47.tar',
"xenvif" : r'xenvif.git\56\xenvif.tar',
"xennet" : r'standard-lcm\13\xennet-7-2-0-14.tar',
"xeniface" : r'standard-lcm\12\xeniface-7-2-0-14.tar',
"xenvbd" : r'standard-lcm\14\xenvbd-7-2-0-40.tar',
"xenguestagent" : r'standard-lcm\11\xenguestagent-34.tar',
"xenvss" : r'standard-lcm\16\xenvss-7.tar',
}
all_drivers_signed = False
|
Python
| 0
|
@@ -1675,16 +1675,21 @@
nvif.git
+.whql
%5C56%5Cxenv
@@ -1690,16 +1690,25 @@
6%5Cxenvif
+-7-2-0-56
.tar',%0D%0A
@@ -2038,11 +2038,10 @@
d =
-Fals
+Tru
e%0D%0A
|
5d749f1d3e69ce233bd5ac81b39e535c0d02a954
|
Move back to last merged tools versions, to overcome buildnumber issue
|
manifestspecific.py
|
manifestspecific.py
|
# Copyright (c) Citrix Systems Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms,
# with or without modification, are permitted provided
# that the following conditions are met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the
# following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the
# following disclaimer in the documentation and/or other
# materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
# CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
build_tar_source_files = {
"xenbus" : "http://xenbus-build.uk.xensource.com:8080/job/XENBUS.git/35/artifact/xenbus.tar",
"xenvif" : "http://xenvif-build.uk.xensource.com:8080/job/XENVIF.git/37/artifact/xenvif.tar",
"xennet" : "http://xennet-build.uk.xensource.com:8080/job/XENNET.git/14/artifact/xennet.tar",
"xeniface" : "http://xeniface-build.uk.xensource.com:8080/job/XENIFACE.git/14/artifact/xeniface.tar",
"xenvbd" : "http://xenvbd-build.uk.xensource.com:8080/job/XENVBD.git/18/artifact/xenvbd.tar",
"xenguestagent" : "http://xeniface-build.uk.xensource.com:8080/job/guest%20agent.git/34/artifact/xenguestagent.tar",
"xenvss" : "http://xenvbd-build.uk.xensource.com:8080/job/XENVSS.git/7/artifact/xenvss.tar",
}
all_drivers_signed = False
|
Python
| 0
|
@@ -1521,10 +1521,10 @@
git/
-35
+18
/art
@@ -1624,10 +1624,10 @@
git/
-37
+24
/art
@@ -1720,25 +1720,25 @@
XENNET.git/1
-4
+2
/artifact/xe
@@ -1933,26 +1933,25 @@
/XENVBD.git/
-18
+9
/artifact/xe
|
bd3d60cdfbc4cb8ed7810eb433b7ccb8f802f235
|
Move post.py to the new wiki helper.
|
post.py
|
post.py
|
#!/usr/bin/python
# Read irc logs from our private channel and post them to our wiki
import json
import os
import re
import sys
import textwrap
from simplemediawiki import MediaWiki
with open(os.path.expanduser('~/.mediawiki'), 'r') as f:
conf = json.loads(f.read())
wiki = MediaWiki(conf['url'])
day_re = re.compile('^--- Day changed (.*)$')
human_re = re.compile('.*<([^>]+)>.*')
days = []
def make_wiki_login_call(packet):
packet.update({'lgname': conf['username'],
'lgpassword': conf['password']})
return wiki.call(packet)
def post_page(title, text):
page_token = wiki.call({'action': 'query',
'prop': 'info',
'titles': title,
'intoken': 'edit'})
pages = page_token['query']['pages']
page_id = pages.keys()[0]
response = wiki.call({'action': 'edit',
'minor': True,
'bot': True,
'title': title,
'text': json.dumps(text).replace('\\n', '\n')[1:-1],
'token': pages[page_id]['edittoken']})
if not 'nochange' in response['edit']:
print 'Modified %s' % title
days.append(title)
if __name__ == '__main__':
login = make_wiki_login_call({'action': 'login'})
token = make_wiki_login_call({'action': 'login',
'lgtoken': login['login']['token']})
day = None
content = []
with open(os.path.expanduser(conf['logpath']), 'r') as f:
l = f.readline()
while l:
if l.startswith('--- '):
m = day_re.match(l)
if m:
if content:
post_page('rcbau irc log for %s' % day,
''.join(content))
content = []
day = m.group(1)
elif day:
lines = textwrap.wrap(l.rstrip(), 120)
m = human_re.match(l)
if m and len(m.group(1)) > 1:
content.append(' \'\'\'%s\'\'\'\n'
% '\n \'\'\' '.join(lines))
elif l[7] == '*':
content.append(' \'\'%s\'\'\n'
% '\n \'\' '.join(lines))
else:
content.append(' %s\n' % '\n '.join(lines))
l = f.readline()
if day and content:
post_page('rcbau irc log for %s' % day,
''.join(content))
if days:
post_page('rcbau irc log index',
'* [[%s]]' % ']]\n* [['.join(days))
|
Python
| 0
|
@@ -143,43 +143,16 @@
rap%0A
-%0Afrom simplemediawiki
import
-MediaW
+w
iki%0A
@@ -247,39 +247,8 @@
))%0A%0A
-wiki = MediaWiki(conf%5B'url'%5D)%0A%0A
day_
@@ -333,1084 +333,101 @@
')%0A%0A
-days = %5B%5D%0A%0A%0Adef make_wiki_login_call(packet):%0A packet.update(%7B'lgname': conf%5B'username'%5D,%0A 'lgpassword': conf%5B'password'%5D%7D)%0A return wiki.call(packet)%0A%0Adef post_page(title, text):%0A page_token = wiki.call(%7B'action': 'query',%0A 'prop': 'info',%0A 'titles': title,%0A 'intoken': 'edit'%7D)%0A pages = page_token%5B'query'%5D%5B'pages'%5D%0A page_id = pages.keys()%5B0%5D%0A%0A response = wiki.call(%7B'action': 'edit',%0A 'minor': True,%0A 'bot': True,%0A 'title': title,%0A 'text': json.dumps(text).replace('%5C%5Cn', '%5Cn')%5B1:-1%5D,%0A 'token': pages%5Bpage_id%5D%5B'edittoken'%5D%7D)%0A if not 'nochange' in response%5B'edit'%5D:%0A print 'Modified %25s' %25 title%0A days.append(title)%0A%0A%0Aif __name__ == '__main__':%0A login = make_wiki_login_call(%7B'action': 'login'%7D)%0A token = make_wiki_login_call(%7B'action': 'login',%0A 'lgtoken': login%5B'login'%5D%5B'token'%5D%7D
+%0Aif __name__ == '__main__':%0A w = wiki.Wiki(conf%5B'url'%5D, conf%5B'username'%5D, conf%5B'password'%5D
)%0A%0A
@@ -440,16 +440,30 @@
= None%0A
+ days = %5B%5D%0A
cont
@@ -467,24 +467,25 @@
ontent = %5B%5D%0A
+%0A
with ope
@@ -725,34 +725,32 @@
-post_page(
+title =
'rcbau irc l
@@ -757,33 +757,32 @@
og for %25s' %25 day
-,
%0A
@@ -795,34 +795,87 @@
- ''.join(content)
+w.post_page(title, ''.join(content))%0A days.append(title
)%0A
@@ -1535,34 +1535,32 @@
nt:%0A
-post_page(
+title =
'rcbau irc l
@@ -1575,17 +1575,16 @@
s' %25 day
-,
%0A
@@ -1584,25 +1584,34 @@
-
+w.post_page(title,
''.join
@@ -1620,16 +1620,43 @@
ontent))
+%0A days.append(title)
%0A%0A if
@@ -1670,16 +1670,18 @@
+w.
post_pag
@@ -1705,16 +1705,18 @@
index',%0A
+
|
516c18a74f1b606b03ab07091cb0004e75c0a49b
|
Fix kate plugin
|
kate_plugin.py
|
kate_plugin.py
|
"""
isort/kate_plugin.py
Provides a simple kate plugin that enables the use of isort to sort Python imports
in the currently open kate file.
Copyright (C) 2013 Timothy Edmund Crosley
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
from PyKDE4.ktexteditor import KTextEditor
import kate
from isort import SortImports
@kate.action(text="Sort Imports", shortcut="Ctrl+[", menu="Python")
def sortImports():
document = kate.activeDocument()
document.setText(SortImports(file_contents=document.text()).output)
document.activeView().setCursorPosition(KTextEditor.Cursor(0, 0))
|
Python
| 0
|
@@ -927,16 +927,46 @@
A.%0A%22%22%22%0A%0A
+from isort import SortImports%0A
from PyK
@@ -1017,38 +1017,8 @@
ate%0A
-from isort import SortImports%0A
%0A%0A@k
|
ea251f4c40119108f7969af945caf6da7c11c7f7
|
read binary
|
kitchensink/rpc/views.py
|
kitchensink/rpc/views.py
|
import logging
from os.path import exists
import traceback
from flask import request, current_app, jsonify, send_file
from rq.job import Status
from .app import rpcblueprint
from ..serialization import pack_result, pack_results
from .. import settings
logger = logging.getLogger(__name__)
# we assume that you set rpc.rpc to some instance of an RPC object
def make_json(jsonstring, status_code=200, headers={}):
"""like jsonify, except accepts string, so we can do our own custom
json serialization. should move this to continuumweb later
"""
return current_app.response_class(response=jsonstring,
status=status_code,
headers=headers,
mimetype='application/json')
### job endpoints
@rpcblueprint.route("/call/<rpcname>/", methods=['POST'])
def call(rpcname):
msg = request.data
rpc = rpcblueprint.rpcs[rpcname]
result = rpc.call(msg)
return current_app.response_class(response=result,
status=200,
mimetype='application/octet-sream')
@rpcblueprint.route("/status/<job_id>/")
def status(job_id):
timeout = request.values.get('timeout')
if timeout:
timeout = float(timeout)
metadata, value = rpcblueprint.task_queue.status(job_id, timeout=timeout)
result = pack_result(metadata, value, fmt=metadata['result_fmt'])
return current_app.response_class(response=result,
status=200,
mimetype='application/octet-stream')
@rpcblueprint.route("/cancel/<job_id>/")
def cancel(job_id):
rpcblueprint.task_queue.cancel(job_id)
return "success"
@rpcblueprint.route("/bulkstatus/")
def bulk_status():
timeout = request.values.get('timeout', 1)
job_ids = request.values.get('job_ids').split(",")
if timeout:
timeout = int(timeout)
metadata_data_pairs = rpcblueprint.task_queue.bulkstatus(job_ids, timeout=timeout)
fmt = [x[0]['result_fmt'] for x in metadata_data_pairs]
result = pack_results(metadata_data_pairs, fmt=fmt)
return current_app.response_class(response=result,
status=200,
mimetype='application/octet-stream')
### Data endpoints
@rpcblueprint.route("/data/<path:path>/", methods=['GET'])
def get_data(path):
#check auth here if we're doing auth
offset = request.values.get('offset')
length = request.values.get('length')
if offset is not None and length is not None:
local_path = settings.catalog.get_file_path(path, unfinished=True)
offset = int(offset)
length = int(length)
if not exists(local_path):
data = b""
else:
with open(local_path, "r") as f:
f.seek(offset)
data = f.read(length)
logger.info("sending %s of %s", len(data), path)
return current_app.response_class(response=data,
status=200,
mimetype='application/octet-stream')
else:
local_path = settings.catalog.get_file_path(path)
logger.info("sending %s", path)
return send_file(local_path)
@rpcblueprint.route("/data/<path:path>/", methods=['POST'])
def put_data(path):
#check auth here if we're doing auth
fstorage = request.files['data']
try:
settings.catalog.write(fstorage, path, is_new=True)
return jsonify(success=True)
except Exception as e:
exc_info = traceback.format_exc()
return jsonify(error=exc_info)
@rpcblueprint.route("/chunkeddata/<path:path>/", methods=['GET'])
def get_chunked_data(path):
#check auth here if we're doing auth
offset = int(request.values['offset'])
length = int(request.values['offset'])
local_path = settings.catalog.get_file_path(path)
with open(local_path, "rb") as f:
f.seek(offset)
data = f.read(local_path, settings.chunk_size)
return current_app.response_class(response=data,
status=200,
mimetype='application/octet-sream')
|
Python
| 0.999982
|
@@ -2867,16 +2867,17 @@
path, %22r
+b
%22) as f:
|
f2dfbfbee1cd87f2e6f499b78eae1a8ca39dd529
|
create a category form
|
qanda/qanda_app/forms.py
|
qanda/qanda_app/forms.py
|
from django.forms import ModelForm, Textarea, TextInput, Select
from models import Question, Answer, Reply
from django import forms
from django.conf import settings
from django.utils.translation import ugettext as _
class QuestionForm(ModelForm):
def __init__(self, *args, **kwargs):
super(QuestionForm, self).__init__(*args, **kwargs)
self.fields['category'].required = False
class Meta:
model = Question
fields = ('title', 'category', 'text', 'tags')
widgets = {
'title': TextInput(attrs={'size': 220, 'class':'span8'}),
'category' : Select(attrs={'class':'span7'},),
'text': Textarea(attrs={'cols': 120, 'rows': 16, 'class':'span8'}),
# 'tags': TextInput(attrs={'size': 220, 'class':'span8'}),
}
class QuestionCloseForm(forms.Form):
message = forms.CharField(max_length=511, widget=Textarea(attrs={'rows': 10, 'id':'questionCloseForm',}), initial=_("This question is closed by the administrators. ")+getattr(settings, 'ROOT_URL', 'http://127.0.0.1:8000/qanda'))
class AnswerForm(ModelForm):
class Meta:
model = Answer
fields = ( 'text', )
widgets = {
'text': Textarea(attrs={'cols': 120, 'rows': 10, 'class':'span8'}),
}
class ReplyForm(ModelForm):
class Meta:
model = Reply
fields = ( 'text', )
widgets = {
'text': Textarea(attrs={'cols': 120, 'rows': 2, 'class':'span7'}),
}
class SubscriptionForm(forms.Form):
subscribed = forms.BooleanField(initial=False)
|
Python
| 0.000911
|
@@ -99,16 +99,26 @@
r, Reply
+, Category
%0Afrom dj
@@ -1000,16 +1000,313 @@
nda'))%0A%0A
+class CategoryForm(ModelForm):%0A%09class Meta:%0A%09%09model = Category%0A%09%09fields = ('name', 'about', )%0A%09%09widgets = %7B%0A%09%09%09'name' : TextInput(attrs=%7B'size': 180, 'class':'span6', 'id':'addCategoryName',%7D),%0A%09%09%09'about' : Textarea(attrs=%7B'rows': 6, 'cols':80, 'class':'span6', 'id':'addCategoryAbout',%7D),%0A%09%09%7D%0A%09%0A
class An
|
5d69fa2a169274c65bfd047199a2df9c88f188e3
|
use the taggit widget in question form
|
qanda/qanda_app/forms.py
|
qanda/qanda_app/forms.py
|
from django.forms import ModelForm, Textarea, TextInput, Select
from models import Question, Answer, Reply
from django import forms
class QuestionForm(ModelForm):
def __init__(self, *args, **kwargs):
super(QuestionForm, self).__init__(*args, **kwargs)
self.fields['category'].required = False
class Meta:
model = Question
fields = ('title', 'category', 'text', 'tags')
widgets = {
'title': TextInput(attrs={'size': 220, 'class':'span8'}),
'category' : Select(attrs={'class':'span7'},),
'text': Textarea(attrs={'cols': 120, 'rows': 20, 'class':'span8'}),
'tags': TextInput(attrs={'size': 220, 'class':'span8'}),
}
class AnswerForm(ModelForm):
class Meta:
model = Answer
fields = ( 'text', )
widgets = {
'text': Textarea(attrs={'cols': 120, 'rows': 10, 'class':'span8'}),
}
class ReplyForm(ModelForm):
class Meta:
model = Reply
fields = ( 'text', )
widgets = {
'text': Textarea(attrs={'cols': 120, 'rows': 2, 'class':'span7'}),
}
class SubscriptionForm(forms.Form):
subscribed = forms.BooleanField(initial=False)
|
Python
| 0.000001
|
@@ -574,16 +574,18 @@
'%7D),%0A%09%09%09
+#
'tags':
|
174a374a685829ede49236f820122b442b9ec920
|
Fix taichi_dynamic example (#4767)
|
python/taichi/examples/features/sparse/taichi_dynamic.py
|
python/taichi/examples/features/sparse/taichi_dynamic.py
|
import taichi as ti
x = ti.field(ti.i32)
l = ti.field(ti.i32)
n = 16
ti.init()
ti.root.dense(ti.i, n).dynamic(ti.j, n).place(x)
ti.root.dense(ti.i, n).place(l)
@ti.kernel
def make_lists():
for i in range(n):
for j in range(i):
ti.append(x.parent(), i, j * j)
l[i] = ti.length(x.parent(), i)
make_lists()
for i in range(n):
assert l[i] == i
for j in range(n):
assert x[i, j] == (j * j if j < i else 0)
|
Python
| 0
|
@@ -14,16 +14,27 @@
as ti%0A%0A
+ti.init()%0A%0A
x = ti.f
@@ -75,27 +75,16 @@
n = 16%0A%0A
-ti.init()%0A%0A
ti.root.
|
d57161b9449faa1218e4dab55fe4b2bd6f0c3436
|
Remove unused code and get rid of flake8 errors
|
utils.py
|
utils.py
|
import json
import os
import time
import uuid
from google.appengine.api import urlfetch
from models import Profile
def getUserId(user, id_type="email"):
if id_type == "email":
return user.email()
if id_type == "oauth":
"""A workaround implementation for getting userid."""
auth = os.getenv('HTTP_AUTHORIZATION')
bearer, token = auth.split()
token_type = 'id_token'
if 'OAUTH_USER_ID' in os.environ:
token_type = 'access_token'
url = ('https://www.googleapis.com/oauth2/v1/tokeninfo?%s=%s'
% (token_type, token))
user = {}
wait = 1
for i in range(3):
resp = urlfetch.fetch(url)
if resp.status_code == 200:
user = json.loads(resp.content)
break
elif resp.status_code == 400 and 'invalid_token' in resp.content:
url = ('https://www.googleapis.com/oauth2/v1/tokeninfo?%s=%s'
% ('access_token', token))
else:
time.sleep(wait)
wait = wait + i
return user.get('user_id', '')
if id_type == "custom":
# implement your own user_id creation and getting algorythm
# this is just a sample that queries datastore for an existing profile
# and generates an id if profile does not exist for an email
profile = Conference.query(Conference.mainEmail == user.email())
if profile:
return profile.id()
else:
return str(uuid.uuid1().get_hex())
|
Python
| 0
|
@@ -30,20 +30,8 @@
time
-%0Aimport uuid
%0A%0Afr
@@ -74,34 +74,8 @@
tch%0A
-from models import Profile
%0A%0Ade
@@ -1104,435 +1104,4 @@
'')%0A
-%0A if id_type == %22custom%22:%0A # implement your own user_id creation and getting algorythm%0A # this is just a sample that queries datastore for an existing profile%0A # and generates an id if profile does not exist for an email%0A profile = Conference.query(Conference.mainEmail == user.email())%0A if profile:%0A return profile.id()%0A else:%0A return str(uuid.uuid1().get_hex())%0A
|
a4ff022e9cbacca75febfcb3eaceea462078b721
|
bump the default timeout..
|
pssh.py
|
pssh.py
|
#!/usr/bin/env kpython
# Parallel SSH to a list of nodes, returned from search-ec2-tags.py
# (must be in your path).
#
# Waits for nodes to respond, then outputs their stdout,stderr color coded.
#
# ./pssh.py --query 'ec2_tag' 'command_to_run'
#
# Options:
# -h, --help show this help message and exit
# --query=QUERY the string to pass search-ec2-tags.py
# --hosts=HOSTS comma-sep list of hosts to ssh to
# --no-color disable or enable color
# --keep-ssh-warnings disable the removing of SSH warnings from stderr output
# --connect-timeout ssh ConnectTimeout option
# --timeout amount of time to wait, before killing the ssh
#
import sys
import time
import subprocess
from optparse import OptionParser
def hilite(string, options, color='white', bold=False):
if options.no_color:
return string
attr = []
if color == 'green':
attr.append('32') # green
elif color == 'red':
attr.append('41') # red
else:
attr.append('37') # white
if bold:
attr.append('1')
return '\x1b[%sm%s\x1b[0m' % (';'.join(attr), string)
def remove_ssh_warnings(stderr, options):
if options.keep_ssh_warnings:
return stderr
output = str(stderr).splitlines()
if len(output) == 0:
return None
if len(output) == 1:
return output[0]
if stderr[0].startswith('@'):
# 8 lines for a DNS spoofing warning
if 'POSSIBLE DNS SPOOFING' in output[1]:
output = output[8:]
# 13 lines for a remote host identification changed warning
if 'REMOTE HOST IDENTIFICATION' in output[1]:
output = output[13:]
if len(output) == 0:
return None
if len(output) == 1:
return output[0]
return '\n'.join(output)
def query(string):
stdout, stderr = subprocess.Popen(['kpython', '/usr/local/bin/search-ec2-tags.py'] + string.split(),
stderr=subprocess.PIPE, stdout=subprocess.PIPE).communicate()
print "matched the following hosts: %s" % ', '.join(stdout.splitlines())
if stderr:
return None
return stdout.splitlines()
if __name__ == '__main__':
parser = OptionParser(usage=__doc__)
parser.add_option("--query", help='the string to pass search-ec2-tags.py', default=False)
parser.add_option("--host", help='comma-sep list of hosts to ssh to', default=False)
parser.add_option("--timeout", help='amount of time to wait before killing the ssh',
default=60)
parser.add_option("--connect-timeout", help='ssh ConnectTimeout option',
default=10)
parser.add_option("--no-color", action="store_true", help="disable or enable color",
default=False)
parser.add_option("--keep-ssh-warnings", action="store_true",
help="disable the removing of SSH warnings from stderr output",
default=False)
(options, args) = parser.parse_args()
procs = []
command = args[0]
hosts = ['ops-dev005.krxd.net', 'ops-dev001.krxd.net']
if options.query:
hosts = query(options.query)
if not hosts:
print hilite("Sorry, search-ec2-tags.py returned an error:\n %s" % hosts, options, 'red')
sys.exit(1)
if options.host:
hosts = [host.strip() for host in hosts.split(',')]
for host in hosts:
proc = subprocess.Popen("ssh -oStrictHostKeyChecking=no -oConnectTimeout=%s %s '%s'" %
(options.connect_timeout, host, command), shell=True,
stderr=subprocess.PIPE, stdout=subprocess.PIPE)
procs.append(proc)
index = 0
ticks = 0
while 1:
# nothing has returned, the first iteration, I bet.
if ticks < 2:
time.sleep(1)
ticks += 1
host = hosts[index]
proc = procs[index]
if proc.poll() is not None:
stdout, stderr = proc.communicate()
print "[%s]" % hilite(host, options, bold=True)
if stdout:
print "STDOUT: \n%s" % hilite(stdout, options, 'green', False)
stderr = remove_ssh_warnings(stderr, options)
if stderr:
print "STDERR: \n%s" % hilite(stderr, options, 'red', False)
del procs[index]
del hosts[index]
elif ticks > 2:
print "waiting on these hosts, still: %s" % ', '.join(hosts)
time.sleep(1)
if len(procs) > index + 1:
index += 1
elif len(procs) == 0:
break
else:
index = 0
if ticks > options.timeout:
[bad.terminate() for bad in procs]
print hilite("\nSorry, the following hosts took too long, and I gave up: %s\n" % ','.join(hosts), options, 'red')
break
|
Python
| 0
|
@@ -2552,9 +2552,10 @@
ult=
-6
+12
0)%0A
|
f6d7707abcd80524857386d96495cc79795cd5d5
|
use htmlparser to get a word meaning in yahoo dictionary
|
ydict.py
|
ydict.py
|
import urllib.request
from html.parser import HTMLParser
class DictParser(HTMLParser):
# def __init__(self):
# super.__init__()
def handle_starttag(self, tag, attrs):
print("Encountered a start tag:", tag)
def handle_endtag(self, tag):
print("Encountered an end tag :", tag)
def handle_data(self, data):
print("Encountered some data :", data)
class ydict:
def __init__(self):
self.url = "http://tw.dictionary.search.yahoo.com/search?p="
def search(self, word):
opener = urllib.request.FancyURLopener({})
f = opener.open(self.url+word)
content = f.read()
dp = DictParser()
dp.feed(content.decode('UTF-8'))
# print(content.decode('UTF-8'))
|
Python
| 0.000006
|
@@ -84,18 +84,16 @@
er):%0A
- #
def __i
@@ -108,18 +108,16 @@
lf):%0A
- #
sup
@@ -118,16 +118,18 @@
super
+()
.__init_
@@ -140,255 +140,1939 @@
-def handle_starttag(self, tag, attrs):%0A print(%22Encountered a start tag:%22, tag)%0A def handle_endtag(self, tag):%0A print(%22Encountered an end tag :%22, tag)%0A def handle_data(self, data):%0A print(%22Encountered some data :%22, data)
+ self.content = False%0A # self.query_string = None%0A self.li_counter = 0%0A self.ignore_flag = False%0A def handle_starttag(self, tag, attrs):%0A if self.content and tag == %22li%22 and len(attrs) == 0:%0A self.li_counter += 1%0A elif len(attrs) == 0:%0A pass%0A elif tag == %22li%22 and attrs%5B0%5D%5B1%5D == %22first%22:%0A print(%22first%22 + self.li_counter.__str__())%0A self.li_counter = 0%0A self.content = True%0A elif tag == %22li%22 and attrs%5B0%5D%5B1%5D == %22last%22:%0A print(%22last%22 + self.li_counter.__str__())%0A self.content = False%0A elif tag == %22span%22 and len(attrs) %3E 2:%0A if attrs%5B1%5D%5B1%5D == %22iconStyle%22:%0A self.ignore_flag = True%0A pass%0A # if self.content and (tag == %22a%22 or tag == %22span%22 or tag == %22b%22):%0A # print(%22Start tag: %22 + tag)%0A # for a in attrs:%0A # print(%22, %22 + str(a))%0A # print(%22Encountered a start tag:%22, tag)%0A def handle_endtag(self, tag):%0A pass%0A # if self.content and tag == %22li%22 and len(attrs) == 0:%0A # self.li_counter -= 1%0A # print(%22Encountered an end tag :%22, tag)%0A def handle_data(self, data):%0A if self.content == False or self.ignore_flag:%0A if self.ignore_flag:%0A self.ignore_flag = False%0A elif self.li_counter == 0:%0A print(%22query word:%22 + data)%0A elif self.li_counter == 1:%0A print(%22content:%22 + data)%0A elif self.li_counter == 2:%0A print(%22forms:%22 + data)%0A elif self.li_counter == 3:%0A print(%22sym:%22 + data)%0A # if data == self.query_string:%0A # self.content = True%0A # if self.content and (self.lasttag == %22a%22 or self.lasttag == %22span%22 or self.lasttag == %22b%22):%0A # print(%22data :%22, data)%0A # def set_query_str(self, query_str):%0A # self.query_string = query_str
%0A%0Acl
@@ -2346,16 +2346,51 @@
arser()%0A
+ # dp.set_query_str(%22test%22)%0A
|
0545539a6d3df83af57f973a82cff2961cbe32ec
|
Test db login
|
km3pipe/tests/test_db.py
|
km3pipe/tests/test_db.py
|
# coding=utf-8
# Filename: test_core.py
# pylint: disable=C0111,E1003,R0904,C0103,R0201,C0102
from __future__ import division, absolute_import, print_function
from km3pipe.testing import TestCase
from km3pipe.db import DOMContainer
__author__ = "Tamas Gal"
__copyright__ = "Copyright 2016, Tamas Gal and the KM3NeT collaboration."
__credits__ = []
__license__ = "MIT"
__maintainer__ = "Tamas Gal"
__email__ = "tgal@km3net.de"
__status__ = "Development"
DET_ID = 'det_id1'
JSON_DOMS = [{'DOMId': 1, 'Floor': 10, 'CLBUPI': '100', 'DetOID': DET_ID},
{'DOMId': 2, 'Floor': 20, 'CLBUPI': '200', 'DetOID': DET_ID},
{'DOMId': 3, 'Floor': 30, 'CLBUPI': '300', 'DetOID': DET_ID},
{'DOMId': 4, 'Floor': 40, 'CLBUPI': '400', 'DetOID': 'det_id2'}]
class TestDBManager(TestCase):
pass
class TestDOMContainer(TestCase):
def test_init(self):
DOMContainer(None)
def setUp(self):
self.dc = DOMContainer(JSON_DOMS)
def test_ids_returns_dom_ids(self):
self.assertListEqual([1, 2, 3], self.dc.ids(DET_ID))
def test_json_list_lookup(self):
lookup = self.dc._json_list_lookup('DOMId', 1, 'Floor', DET_ID)
self.assertEqual(10, lookup)
def test_clbupi2floor(self):
self.assertEqual(10, self.dc.clbupi2floor('100', DET_ID))
self.assertEqual(20, self.dc.clbupi2floor('200', DET_ID))
self.assertEqual(30, self.dc.clbupi2floor('300', DET_ID))
def test_clbupi2domid(self):
self.assertEqual(1, self.dc.clbupi2domid('100', DET_ID))
self.assertEqual(2, self.dc.clbupi2domid('200', DET_ID))
self.assertEqual(3, self.dc.clbupi2domid('300', DET_ID))
|
Python
| 0.000001
|
@@ -189,16 +189,27 @@
TestCase
+, MagicMock
%0A%0Afrom k
@@ -226,27 +226,73 @@
import D
-OMContainer
+BManager, DOMContainer%0Afrom km3pipe.logger import logging
%0A%0A__auth
@@ -507,17 +507,16 @@
pment%22%0A%0A
-%0A
DET_ID =
@@ -830,16 +830,47 @@
id2'%7D%5D%0A%0A
+log = logging.getLogger('db')%0A%0A
%0Aclass T
@@ -897,16 +897,341 @@
e):%0A
+%0A
-pass
+def test_login_called_on_init_when_credentials_are_provided(self):%0A user = 'user'%0A pwd = 'god'%0A%0A DBManager.login = MagicMock()%0A db = DBManager(username=user, password=pwd)%0A self.assertEqual(1, DBManager.login.call_count)%0A self.assertTupleEqual((user, pwd), DBManager.login.call_args%5B0%5D)
%0A%0A%0Ac
|
2dd94619f75b05746293fa87eaed73fc52a5ba71
|
return all schedules from worker method and trim the list from the main method. also improve logging as a result
|
classtime/scheduling/schedule_generator.py
|
classtime/scheduling/schedule_generator.py
|
import multiprocessing
from angular_flask.logging import logging
logging = logging.getLogger(__name__) # pylint: disable=C0103
import classtime
import heapq
from .schedule import Schedule
CANDIDATE_POOL_SIZE = 120
"""Number of schedules to keep in consideration at any one time"""
WORKERS = 16
"""Maximum number of worker processes to spawn"""
WORKLOAD_SIZE = CANDIDATE_POOL_SIZE / WORKERS
"""Number of candidate schedules to give to each worker process"""
def find_schedules(schedule_params, num_requested):
"""
:param AcademicCalendar cal: calendar to pull section data from
:param dict schedule_params: parameters to build the schedule with.
Check :ref:`api/generate-schedules <api-generate-schedules>`
for available parameters.
"""
if 'term' not in schedule_params:
logging.error("Schedule generation call did not specify <term>")
term = schedule_params.get('term', '')
institution = schedule_params.get('institution', 'ualberta')
cal = classtime.get_calendar(institution)
if 'courses' not in schedule_params:
logging.error("Schedule generation call did not specify <courses>")
course_ids = schedule_params.get('courses', list())
busy_times = schedule_params.get('busy-times', list())
logging.info('Received schedule request')
schedules = _generate_schedules(cal,
term, course_ids, busy_times,
num_requested)
if len(schedules) == 0:
logging.error('No schedules found for q={}'.format(
schedule_params))
else:
logging.info('Returning {} schedules from request q={}'.format(
len(schedules), schedule_params))
return schedules
def _generate_schedules(cal, term, course_ids, busy_times, num_requested):
"""Generate a finite number of schedules
:param int num_requested: maximum number of schedules to return.
Upper limit is CANDIDATE_POOL_SIZE.
Will only return valid schedules, even if that means returning
less than the requested number.
:returns: the best possible schedules, sorted by ScheduleScorer
scoring functions
:rtype: list of :ref:`schedule objects <api-schedule-object>`
"""
def _log_scheduling_component(num, component, pace):
logging.debug('({symbol}/{num}) Scheduling {name}'.format(
symbol=Schedule.SYMBOLS[pace],
num=num,
name=' '.join(component[0].get('asString').split()[:-1])))
components = cal.get_components(term, course_ids)
components = sorted(components, key=len)
candidates = [Schedule(busy_times=busy_times)]
for pace, component in enumerate(components):
_log_scheduling_component(len(components), component, pace)
candidates = _add_component(candidates, component, pace)
candidates = [candidate for candidate in candidates
if len(candidate.sections) == len(components)]
logging.debug('Generated {} schedules'.format(len(candidates)))
return sorted(candidates, reverse=True)[:num_requested]
def _add_component(candidates, component, pace):
"""
Schedule generation algorithm
1. Pick a schedule candidate from the list.
2. Pick a section ("A2") for a component ("LAB") of a course ("CHEM")
2b. If the section conflicts with the schedule, throw it out
2c. Otherwise, add it to the schedule.
3. Do 2 for all section options ("A3") for the component ("LAB").
4. Do 3 for all components ("LAB") of a course ("CHEM")
5. Do 4 for all schedule candidates
6. Do battle royale with the schedules. Only keep the best.
7. Add the next component using (1->6).
8. Repeat until all courses are scheduled.
"""
def _candidate_battle_royale(candidates, component, pace, heap_size, out_q):
"""Put the `heap_size` best candidates onto the `out_q`
:param list candidates: candidate schedules
:param list component: sections to consider. Exactly one is added to any
given schedule.
:param int pace: the number of components which should already have been
added to a schedule. If a schedule has less than this, it can never
be a complete schedule. Therefore, time should not be wasted on it.
:param int heap_size: number of candidate schedules which should escape
alive
:param multiprocessing.Queue out_q: a multiprocessing Queue to put
results onto.
:returns: the best schedules
:rtype: list of schedules
"""
for candidate in candidates[:]:
if _is_hopeless(candidate, pace):
continue
for section in component:
if candidate.has_dependency_conflict(section):
continue
if candidate.conflicts(section):
continue
_add_candidates(candidates,
candidate.clone().add_section(section),
heap_size)
out_q.put(candidates)
return
out_q = multiprocessing.Queue()
procs = list()
for chunk in _chunks(candidates):
proc = multiprocessing.Process(
target=_candidate_battle_royale,
args=(chunk, component, pace,
WORKLOAD_SIZE+1, out_q))
procs.append(proc)
proc.start()
candidates = list()
for _ in range(len(procs)):
candidates.extend(out_q.get())
candidates = candidates[:CANDIDATE_POOL_SIZE]
for proc in procs:
proc.join()
return candidates
def _add_candidates(candidates, candidate, heap_size):
discard = heapq.heapreplace(candidates, candidate)
if len(candidates) < heap_size:
heapq.heappush(candidates, discard)
def _is_hopeless(candidate, sections_chosen):
return len(candidate.sections) < sections_chosen
# http://stackoverflow.com/questions/312443/how-do-you-split-a-list-into-evenly-sized-chunks-in-python
def _chunks(full_list, chunk_size=None):
""" Yield successive n-sized chunks from l.
"""
if chunk_size is None:
chunk_size = WORKLOAD_SIZE
for i in xrange(0, len(full_list), chunk_size):
yield full_list[i:i+chunk_size]
|
Python
| 0.000002
|
@@ -1396,31 +1396,8 @@
imes
-,%0A num_requested
)%0A
@@ -1556,16 +1556,19 @@
rning %7B%7D
+/%7B%7D
schedul
@@ -1613,23 +1613,83 @@
-len(schedules),
+min(num_requested, len(schedules)),%0A len(schedules),%0A
sch
@@ -1723,16 +1723,32 @@
chedules
+%5B:num_requested%5D
%0A%0Adef _g
@@ -1798,31 +1798,16 @@
sy_times
-, num_requested
):%0A %22
@@ -3067,32 +3067,16 @@
se=True)
-%5B:num_requested%5D
%0A%0Adef _a
|
bd23202dca2ac26c324aa036d9b9b95092cc43b8
|
fix joined options parsing.
|
wa/framework/entrypoint.py
|
wa/framework/entrypoint.py
|
# Copyright 2013-2015 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import sys
import argparse
import logging
import os
import subprocess
import warnings
from wa.framework import pluginloader
from wa.framework.command import init_argument_parser
from wa.framework.configuration import settings
from wa.framework.configuration.execution import ConfigManager
from wa.framework.host import init_user_directory
from wa.framework.exception import WAError, DevlibError, ConfigError
from wa.utils import log
from wa.utils.doc import format_body
from wa.utils.misc import get_traceback
warnings.filterwarnings(action='ignore', category=UserWarning, module='zope')
logger = logging.getLogger('command_line')
def load_commands(subparsers):
commands = {}
for command in pluginloader.list_commands():
commands[command.name] = pluginloader.get_command(command.name,
subparsers=subparsers)
return commands
def main():
if not os.path.exists(settings.user_directory):
init_user_directory()
try:
description = ("Execute automated workloads on a remote device and process "
"the resulting output.\n\nUse \"wa <subcommand> -h\" to see "
"help for individual subcommands.")
parser = argparse.ArgumentParser(description=format_body(description, 80),
prog='wa',
formatter_class=argparse.RawDescriptionHelpFormatter,
)
init_argument_parser(parser)
# load_commands will trigger plugin enumeration, and we want logging
# to be enabled for that, which requires the verbosity setting; however
# full argument parse cannot be complted until the commands are loaded; so
# parse just the base args for know so we can get verbosity.
args, _ = parser.parse_known_args()
settings.set("verbosity", args.verbose)
log.init(settings.verbosity)
# each command will add its own subparser
commands = load_commands(parser.add_subparsers(dest='command'))
args = parser.parse_args()
config = ConfigManager()
config.load_config_file(settings.user_config_file)
for config_file in args.config:
if not os.path.exists(config_file):
raise ConfigError("Config file {} not found".format(config_file))
config.load_config_file(config_file)
command = commands[args.command]
sys.exit(command.execute(config, args))
except KeyboardInterrupt:
logging.info('Got CTRL-C. Aborting.')
sys.exit(3)
except Exception as e: # pylint: disable=broad-except
if not getattr(e, 'logged', None):
log.log_error(e, logger)
sys.exit(2)
|
Python
| 0.000005
|
@@ -1491,16 +1491,610 @@
mands%0A%0A%0A
+# ArgumentParser.parse_known_args() does not correctly deal with concatenated%0A# single character options. See https://bugs.python.org/issue16142 for the%0A# description of the issue (with a fix attached since 2013!). To get around%0A# this problem, this will pre-process sys.argv to detect such joined options%0A# and split them.%0Adef split_joined_options(argv):%0A output = %5B%5D%0A for part in argv:%0A if len(part) %3E 1 and part%5B0%5D == '-' and part%5B1%5D != '-':%0A for c in part%5B1:%5D:%0A output.append('-' + c)%0A else:%0A output.append(part)%0A return output%0A%0A%0A
def main
@@ -3039,16 +3039,66 @@
bosity.%0A
+ argv = split_joined_options(sys.argv%5B1:%5D)%0A
@@ -3131,16 +3131,20 @@
wn_args(
+argv
)%0A
@@ -3380,16 +3380,20 @@
se_args(
+argv
)%0A%0A
|
685db5867dcb86e462e46a923b154e09e5f64d20
|
fix aux refresh queue
|
queue_pub_refresh_aux.py
|
queue_pub_refresh_aux.py
|
import argparse
import logging
import os
from time import sleep
from time import time
from sqlalchemy import orm, text
from app import db
from app import logger
from queue_main import DbQueue
from util import elapsed
from util import safe_commit
from pub import Pub # foul magic
import endpoint # magic
import pmh_record # more magic
class DbQueuePubRefreshAux(DbQueue):
def table_name(self, job_type):
return 'pub_refresh_queue_aux'
def process_name(self, job_type):
return 'run_aux_pub_refresh'
def worker_run(self, **kwargs):
chunk_size = kwargs.get("chunk", 100)
limit = kwargs.get("limit", None)
queue_no = kwargs.get("queue", 0)
if limit is None:
limit = float("inf")
index = 0
num_updated = 0
start_time = time()
while num_updated < limit:
new_loop_start_time = time()
objects = self.fetch_queue_chunk(chunk_size, queue_no)
if not objects:
sleep(5)
continue
for o in objects:
o.refresh()
finish_batch_text = u'''
update {queue_table}
set finished = now(), started = null, priority = null
where id = any(:ids)'''.format(queue_table=self.table_name(None))
finish_batch_command = text(finish_batch_text).bindparams(
ids=[o.id for o in objects]
)
db.session.execute(finish_batch_command)
commit_start_time = time()
safe_commit(db) or logger.info(u"COMMIT fail")
logger.info(u"commit took {} seconds".format(elapsed(commit_start_time, 2)))
index += 1
num_updated += chunk_size
self.print_update(new_loop_start_time, len(objects), limit, start_time, index)
def fetch_queue_chunk(self, chunk_size, queue_no):
logger.info(u"looking for new jobs")
text_query_pattern = u'''
with refresh_queue as (
select id
from {queue_table}
where
queue_no = {queue_no}
and started is null
order by
priority desc,
rand
limit {chunk_size}
for update skip locked
)
update {queue_table} queue_rows_to_update
set started = now()
from refresh_queue
where refresh_queue.id = queue_rows_to_update.id
returning refresh_queue.id;
'''
text_query = text_query_pattern.format(
chunk_size=chunk_size,
queue_table=self.table_name(None),
queue_no=queue_no
)
logger.info(u"the queue query is:\n{}".format(text_query))
job_time = time()
row_list = db.engine.execute(text(text_query).execution_options(autocommit=True)).fetchall()
object_ids = [row[0] for row in row_list]
logger.info(u"got {} ids, took {} seconds".format(len(object_ids), elapsed(job_time)))
job_time = time()
q = db.session.query(Pub).options(
orm.undefer('*')
).filter(Pub.id.in_(object_ids))
objects = q.all()
logger.info(u"got pub objects in {} seconds".format(elapsed(job_time)))
return objects
if __name__ == "__main__":
if os.getenv('OADOI_LOG_SQL'):
logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO)
db.session.configure()
parser = argparse.ArgumentParser(description="Run stuff.")
parser.add_argument('--run', default=False, action='store_true', help="to run the queue")
parser.add_argument('--limit', "-l", nargs="?", type=int, help="how many jobs to do")
parser.add_argument('--chunk', "-ch", nargs="?", default=1, type=int, help="how many to take off db at once")
parser.add_argument('--queue', "-q", nargs="?", default=0, type=int, help="which queue to run")
parser.add_argument('--dynos', default=None, type=int, help="don't use this option")
parser.add_argument('--reset', default=False, action='store_true', help="don't use this option")
parser.add_argument('--status', default=False, action='store_true', help="don't use this option")
parser.add_argument('--logs', default=False, action='store_true', help="don't use this option")
parser.add_argument('--monitor', default=False, action='store_true', help="don't use this option")
parser.add_argument('--kick', default=False, action='store_true', help="don't use this option")
parser.add_argument('--id', nargs="?", type=str, help="don't use this option")
parser.add_argument('--doi', nargs="?", type=str, help="don't use this option")
parser.add_argument('--method', nargs="?", type=str, default="update", help="don't use this option")
parsed_args = parser.parse_args()
job_type = "normal" # should be an object attribute
my_queue = DbQueuePubRefreshAux()
my_queue.parsed_vars = vars(parsed_args)
my_queue.run_right_thing(parsed_args, job_type)
|
Python
| 0
|
@@ -2250,16 +2250,62 @@
y desc,%0A
+ finished asc nulls first,%0A
|
ffbc39b4eeb4a3e4850f83faa13c1ddf616d2328
|
Add mail to administrators
|
tools/wcloud/wcloud/utils.py
|
tools/wcloud/wcloud/utils.py
|
import sys
import smtplib
from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart
DEFAULT_EMAIL_HOST = 'mail.deusto.es'
EMAILS_SENT = []
def send_email(app, body_text, subject, from_email, to_email, body_html=None):
email_host = app.config.get('EMAIL_HOST', DEFAULT_EMAIL_HOST)
if app.config.get('TESTING', False) or app.config.get('DEBUG', False):
print "Faking request (%s, %s)" %( app.config.get('TESTING', False), app.config.get('DEBUG', False))
sys.stdout.flush()
EMAILS_SENT.append(body_html)
else:
print "Sending mail using %s" % email_host
msg = MIMEMultipart('alternative')
msg['Subject'] = subject
msg['From'] = from_email
msg['To'] = to_email
part1 = MIMEText(body_text, 'text')
msg.attach(part1)
if body_text is not None:
part2 = MIMEText(body_html, 'html')
msg.attach(part2)
s = smtplib.SMTP(email_host)
s.sendmail(from_email, (to_email, from_email), msg.as_string())
print "Mail sent using %s" % email_host
sys.stdout.flush()
if __name__ == '__main__':
class Fake(): pass
fake_app = Fake()
fake_app.config = {}
send_email(fake_app, "Hi there. This is a test", "Test", "pablo.orduna@deusto.es", "pablo.orduna@deusto.es", """<b>Esto es negrita</b>""")
|
Python
| 0.000001
|
@@ -308,16 +308,90 @@
_HOST)%0A%0A
+ if isinstance(to_email, basestring):%0A to_email = %5B to_email %5D%0A%0A
if a
@@ -822,24 +822,34 @@
'To'%5D =
+', '.join(
to_email
%0A%0A
@@ -840,16 +840,17 @@
to_email
+)
%0A%0A
@@ -1035,16 +1035,187 @@
part2)%0A%0A
+ total_to_email = %5B%5D%0A total_to_email.extend(to_email)%0A total_to_email.extend(app.config%5B'ADMINISTRATORS'%5D)%0A total_to_email.append(from_email)%0A%0A
@@ -1287,23 +1287,22 @@
il,
-(to_email, from
+tuple(total_to
_ema
|
c42882f0a8d8cec154d0ad247332f961c1585170
|
Fix `ValueError: The 'file' attribute has no file associated with it.`
|
wagtail/wagtaildocs/views/documents.py
|
wagtail/wagtaildocs/views/documents.py
|
from django.shortcuts import render, redirect, get_object_or_404
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.contrib.auth.decorators import permission_required
from django.core.exceptions import PermissionDenied
from django.utils.translation import ugettext as _
from django.views.decorators.vary import vary_on_headers
from django.core.urlresolvers import reverse
from wagtail.wagtailadmin.forms import SearchForm
from wagtail.wagtailsearch.backends import get_search_backends
from wagtail.wagtailadmin import messages
from wagtail.wagtaildocs.models import Document
from wagtail.wagtaildocs.forms import DocumentForm
@permission_required('wagtaildocs.add_document')
@vary_on_headers('X-Requested-With')
def index(request):
# Get documents
documents = Document.objects.all()
# Ordering
if 'ordering' in request.GET and request.GET['ordering'] in ['title', '-created_at']:
ordering = request.GET['ordering']
else:
ordering = '-created_at'
documents = documents.order_by(ordering)
# Permissions
if not request.user.has_perm('wagtaildocs.change_document'):
# restrict to the user's own documents
documents = documents.filter(uploaded_by_user=request.user)
# Search
query_string = None
if 'q' in request.GET:
form = SearchForm(request.GET, placeholder=_("Search documents"))
if form.is_valid():
query_string = form.cleaned_data['q']
if not request.user.has_perm('wagtaildocs.change_document'):
# restrict to the user's own documents
documents = Document.search(query_string, filters={'uploaded_by_user_id': request.user.id})
else:
documents = Document.search(query_string)
else:
form = SearchForm(placeholder=_("Search documents"))
# Pagination
p = request.GET.get('p', 1)
paginator = Paginator(documents, 20)
try:
documents = paginator.page(p)
except PageNotAnInteger:
documents = paginator.page(1)
except EmptyPage:
documents = paginator.page(paginator.num_pages)
# Create response
if request.is_ajax():
return render(request, 'wagtaildocs/documents/results.html', {
'ordering': ordering,
'documents': documents,
'query_string': query_string,
'is_searching': bool(query_string),
})
else:
return render(request, 'wagtaildocs/documents/index.html', {
'ordering': ordering,
'documents': documents,
'query_string': query_string,
'is_searching': bool(query_string),
'search_form': form,
'popular_tags': Document.popular_tags(),
})
@permission_required('wagtaildocs.add_document')
def add(request):
if request.POST:
doc = Document(uploaded_by_user=request.user)
form = DocumentForm(request.POST, request.FILES, instance=doc)
if form.is_valid():
form.save()
# Reindex the document to make sure all tags are indexed
for backend in get_search_backends():
backend.add(doc)
messages.success(request, _("Document '{0}' added.").format(doc.title), buttons=[
messages.button(reverse('wagtaildocs_edit_document', args=(doc.id,)), _('Edit'))
])
return redirect('wagtaildocs_index')
else:
messages.error(request, _("The document could not be saved due to errors."))
else:
form = DocumentForm()
return render(request, "wagtaildocs/documents/add.html", {
'form': form,
})
def edit(request, document_id):
doc = get_object_or_404(Document, id=document_id)
if not doc.is_editable_by_user(request.user):
raise PermissionDenied
if request.POST:
original_file = doc.file
form = DocumentForm(request.POST, request.FILES, instance=doc)
if form.is_valid():
if 'file' in form.changed_data:
# if providing a new document file, delete the old one.
# NB Doing this via original_file.delete() clears the file field,
# which definitely isn't what we want...
original_file.storage.delete(original_file.name)
doc = form.save()
# Reindex the document to make sure all tags are indexed
for backend in get_search_backends():
backend.add(doc)
messages.success(request, _("Document '{0}' updated").format(doc.title), buttons=[
messages.button(reverse('wagtaildocs_edit_document', args=(doc.id,)), _('Edit'))
])
return redirect('wagtaildocs_index')
else:
messages.error(request, _("The document could not be saved due to errors."))
else:
form = DocumentForm(instance=doc)
# Get file size
try:
filesize = doc.file.size
except OSError:
# File doesn't exist
filesize = None
messages.error(request, _("The file could not be found. Please change the source or delete the document"), buttons=[
messages.button(reverse('wagtaildocs_delete_document', args=(doc.id,)), _('Delete'))
])
return render(request, "wagtaildocs/documents/edit.html", {
'document': doc,
'filesize': filesize,
'form': form
})
def delete(request, document_id):
doc = get_object_or_404(Document, id=document_id)
if not doc.is_editable_by_user(request.user):
raise PermissionDenied
if request.POST:
doc.delete()
messages.success(request, _("Document '{0}' deleted.").format(doc.title))
return redirect('wagtaildocs_index')
return render(request, "wagtaildocs/documents/confirm_delete.html", {
'document': doc,
})
def usage(request, document_id):
doc = get_object_or_404(Document, id=document_id)
# Pagination
p = request.GET.get('p', 1)
paginator = Paginator(doc.get_usage(), 20)
try:
used_by = paginator.page(p)
except PageNotAnInteger:
used_by = paginator.page(1)
except EmptyPage:
used_by = paginator.page(paginator.num_pages)
return render(request, "wagtaildocs/documents/usage.html", {
'document': doc,
'used_by': used_by
})
|
Python
| 0.999991
|
@@ -4930,24 +4930,123 @@
-# Get file size%0A
+filesize = None%0A%0A # Get file size when there is a file associated with the Document object%0A if doc.file:%0A
@@ -5042,32 +5042,36 @@
e:%0A try:%0A
+
filesize
@@ -5087,24 +5087,28 @@
le.size%0A
+
+
except OSErr
@@ -5111,16 +5111,20 @@
SError:%0A
+
@@ -5148,32 +5148,53 @@
ist%0A
+ pass%0A%0A if not
filesize
= None%0A
@@ -5181,23 +5181,17 @@
filesize
- = None
+:
%0A
|
abf3758d86c1ee37e458d79e62be69e4c23e515c
|
switch from single quote to double quote
|
wqflask/tests/wqflask/show_trait/test_export_trait_data.py
|
wqflask/tests/wqflask/show_trait/test_export_trait_data.py
|
import unittest
from wqflask.show_trait.export_trait_data import dict_to_sorted_list
from wqflask.show_trait.export_trait_data import cmp_samples
class TestExportTraits(unittest.TestCase):
"""Test methods related to converting dict to sortedlist"""
def test_dict_to_sortedlist(self):
'''test for conversion of dict to sorted list'''
sample1 = {
"other": "exp1",
"name": "exp2"
}
sample2 = {
"se": 1,
"num_cases": 4,
"value": 6,
"name": 3
}
rever = {
"name": 3,
"value": 6,
"num_cases": 4,
"se": 1
}
oneItem = {
"item1": "one"
}
self.assertEqual(["exp2", "exp1"], dict_to_sorted_list(sample1))
self.assertEqual([3, 6, 1, 4], dict_to_sorted_list(sample2))
self.assertEqual([3, 6, 1, 4], dict_to_sorted_list(rever))
self.assertEqual(["one"], dict_to_sorted_list(oneItem))
'''test that the func returns the values not the keys'''
self.assertFalse(["other", "name"] == dict_to_sorted_list(sample1))
def test_cmp_samples(self):
'''test for comparing samples function'''
sampleA = [
[
("value", "other"),
("name", "test_name")
]
]
sampleB = [
[
("value", "other"),
("unknown", "test_name")
]
]
sampleC = [
[("other", "value"),
("name", "value")
],
[
("name", "value"),
("value", "name")
],
[
("other", "value"),
("name", "value"
)],
[
("name", "name1"),
("se", "valuex")
],
[(
"value", "name1"),
("se", "valuex")
],
[(
"other", "name1"),
("se", "valuex"
)
],
[(
"name", "name_val"),
("num_cases", "num_val")
],
[(
"other_a", "val_a"),
("other_b", "val"
)
]
]
results = [cmp_samples(val[0], val[1]) for val in sampleA]
resultB = [cmp_samples(val[0], val[1]) for val in sampleB]
resultC = [cmp_samples(val[0], val[1]) for val in sampleC]
self.assertEqual(1, *results)
self.assertEqual(-1, *resultB)
self.assertEqual([1, -1, 1, -1, -1, 1, -1, -1], resultC)
|
Python
| 0
|
@@ -288,35 +288,35 @@
(self):%0A
-'''
+%22%22%22
test for convers
@@ -341,19 +341,19 @@
ted list
-'''
+%22%22%22
%0A
@@ -1015,27 +1015,27 @@
m))%0A
-'''
+%22%22%22
test that th
@@ -1072,19 +1072,19 @@
the keys
-'''
+%22%22%22
%0A
@@ -1193,19 +1193,19 @@
-'''
+%22%22%22
test for
@@ -1235,11 +1235,11 @@
tion
-'''
+%22%22%22
%0A
|
3781478aa4c43165b277f77dfe918bbedfd94116
|
Add Traditional Chinese Translation (#9669)
|
warehouse/i18n/__init__.py
|
warehouse/i18n/__init__.py
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import functools
from babel.core import Locale
from pyramid import viewderivers
from pyramid.i18n import TranslationStringFactory, default_locale_negotiator
from pyramid.threadlocal import get_current_request
from warehouse.cache.http import add_vary
KNOWN_LOCALES = {
identifier: Locale.parse(identifier, sep="_")
for identifier in [
"en", # English
"es", # Spanish
"fr", # French
"ja", # Japanese
"pt_BR", # Brazilian Portugeuse
"uk", # Ukranian
"el", # Greek
"de", # German
"zh_Hans", # Simplified Chinese
"ru", # Russian
"he", # Hebrew
"eo", # Esperanto
]
}
LOCALE_ATTR = "_LOCALE_"
_translation_factory = TranslationStringFactory("messages")
class LazyString:
def __init__(self, fn, *args, **kwargs):
self.fn = fn
self.args = args
self.mapping = kwargs.get("mapping", {})
self.kwargs = kwargs
def __json__(self, request):
return str(self)
def __mod__(self, new_mapping):
mapping = self.mapping.copy()
mapping.update(new_mapping)
return LazyString(self.fn, *self.args, mapping=new_mapping, **self.kwargs)
def __str__(self):
return self.fn(*self.args, **self.kwargs)
def _locale(request):
"""
Gets a babel.core:Locale() object for this request.
"""
return KNOWN_LOCALES.get(request.locale_name, KNOWN_LOCALES["en"])
def _negotiate_locale(request):
locale_name = getattr(request, LOCALE_ATTR, None)
if locale_name is not None:
return locale_name
locale_name = request.params.get(LOCALE_ATTR)
if locale_name is not None:
return locale_name
locale_name = request.cookies.get(LOCALE_ATTR)
if locale_name is not None:
return locale_name
if not request.accept_language:
return default_locale_negotiator(request)
return request.accept_language.best_match(
tuple(KNOWN_LOCALES.keys()), default_match=default_locale_negotiator(request)
)
def _localize(request, message, **kwargs):
"""
To be used on the request directly, e.g. `request._(message)`
"""
return request.localizer.translate(_translation_factory(message, **kwargs))
def localize(message, **kwargs):
"""
To be used when we don't have the request context, e.g.
`from warehouse.i18n import localize as _`
"""
def _lazy_localize(message, **kwargs):
request = get_current_request()
return _localize(request, message, **kwargs)
return LazyString(_lazy_localize, message, **kwargs)
class InvalidLocalizer:
def _fail(self):
raise RuntimeError("Cannot use localizer without has_translations=True")
@property
def locale_name(self):
self._fail()
def pluralize(self, *args, **kwargs):
self._fail()
def translate(self, *args, **kwargs):
self._fail()
def translated_view(view, info):
if info.options.get("has_translations"):
# If this page can be translated, then we'll add a Vary: PyPI-Locale
# Vary header.
# Note: This will give weird results if hitting PyPI directly instead of through
# the Fastly VCL which sets PyPI-Locale.
return add_vary("PyPI-Locale")(view)
elif info.exception_only:
return view
else:
# If we're not using translations on this view, then we'll wrap the view
# with a wrapper that just ensures that the localizer cannot be used.
@functools.wraps(view)
def wrapped(context, request):
# This whole method is a little bit of an odd duck, we want to make
# sure that we don't actually *access* request.localizer, because
# doing so triggers the machinery to create a new localizer. So
# instead we will dig into the request object __dict__ to
# effectively do the same thing, just without triggering an access
# on request.localizer.
# Save the original session so that we can restore it once the
# inner views have been called.
nothing = object()
original_localizer = request.__dict__.get("localizer", nothing)
# This particular view hasn't been set to allow access to the
# translations, so we'll just assign an InvalidLocalizer to
# request.localizer
request.__dict__["localizer"] = InvalidLocalizer()
try:
# Invoke the real view
return view(context, request)
finally:
# Restore the original session so that things like
# pyramid_debugtoolbar can access it.
if original_localizer is nothing:
del request.__dict__["localizer"]
else:
request.__dict__["localizer"] = original_localizer
return wrapped
translated_view.options = {"has_translations"}
def includeme(config):
# Add the request attributes
config.add_request_method(_locale, name="locale", reify=True)
config.add_request_method(_localize, name="_")
# Register our translation directory.
config.add_translation_dirs("warehouse:locale/")
config.set_locale_negotiator(_negotiate_locale)
# Register our i18n/l10n filters for Jinja2
filters = config.get_settings().setdefault("jinja2.filters", {})
filters.setdefault("format_date", "warehouse.i18n.filters:format_date")
filters.setdefault("format_datetime", "warehouse.i18n.filters:format_datetime")
filters.setdefault(
"format_rfc822_datetime", "warehouse.i18n.filters:format_rfc822_datetime"
)
filters.setdefault("format_number", "warehouse.i18n.filters:format_number")
jglobals = config.get_settings().setdefault("jinja2.globals", {})
jglobals.setdefault("KNOWN_LOCALES", "warehouse.i18n:KNOWN_LOCALES")
config.add_view_deriver(
translated_view, over="rendered_view", under=viewderivers.INGRESS
)
|
Python
| 0
|
@@ -1131,24 +1131,66 @@
ied Chinese%0A
+ %22zh_Hant%22, # Traditional Chinese%0A
%22ru%22
|
b512c4340568ac1ac5a9fdba3dfd3c9cda84a9e8
|
Add path_exists() and assertions
|
utils.py
|
utils.py
|
import os
from tqdm import tqdm
import requests
import numpy as np
import lasagne
from cv2 import resize
import cv2
from scipy.misc import imsave
def create_dir_if_not_exists(directory):
if not os.path.exists(directory):
os.makedirs(directory)
def download_if_not_exists(file_path, download_link, message=None, total_size=None):
if os.path.exists(file_path):
return
if message != None:
print(message)
create_dir_if_not_exists('/'.join(file_path.split('/')[:-1]))
download(file_path, download_link, total_size)
def download(file_path, download_link, total_size):
"""
Based on code in this answer: http://stackoverflow.com/a/10744565/2427542
"""
response = requests.get(download_link, stream=True)
with open(file_path, "wb") as handle:
for data in tqdm(response.iter_content(), total=total_size):
handle.write(data)
def load_params(network, model_file):
assert os.path.exists(model_file)
with np.load(model_file) as f:
param_values = [f['arr_%d' % i] for i in range(len(f.files))]
lasagne.layers.set_all_param_values(network, param_values)
def get_image(path, dim=None, grey=False, maintain_aspect=True, center=True):
"""
Given an image path, return a 3D numpy array with the image. Maintains aspect ratio and center crops the image to match dim.
:type path: str
:param path: The location of the image
:type grey: boolean
:param grey: Whether the image should be returned in greyscale
:type dim: tuple
:param dim: The (height, width)
"""
assert os.path.exists(path)
if grey:
im = cv2.imread(path, cv2.IMREAD_GRAYSCALE)
im = np.expand_dims(im, axis=-1)
else:
# dimensions are (height, width, channel)
im = cv2.imread(path, cv2.IMREAD_COLOR)
im = cv2.cvtColor(im, cv2.COLOR_BGR2RGB)
im = im.astype('float32')
im = im / 255.
if dim != None:
if not maintain_aspect:
im = resize(im, dim)
else:
im = resize_maintain_aspect(im, dim)
if center:
im = center_crop(im, dim)
im = im.transpose(2, 0, 1)
return im
def resize_maintain_aspect(im, dim):
"""
Resize an image while maintaining its aspect ratio. Resizes the smaller side of the image
to match the corresponding dimension length specified.
"""
# The reversal of get_aspect_maintained_dim()'s output is needed because OpenCV's
# resize() method takes the new size in the form (x, y)
return resize(im, get_aspect_maintained_dim(im.shape, dim)[::-1])
def get_aspect_maintained_dim(old_dim, new_dim):
"""
Given an image's dimension and the dimension to which it is to be resized, returns the dimension to which
the image can be resized while maintaining its aspect ratio.
"""
if old_dim[1] < old_dim[0]:
return (int(old_dim[0]*(new_dim[1]/(old_dim[1]*1.0))), new_dim[1])
else:
return (new_dim[0], int(old_dim[1]*(new_dim[0]/(old_dim[0]*1.0))))
def center_crop(im, dim):
"""
Center-crops a portion of dimensions `dim` from the image.
"""
r = max(0, (dim[0]-im.shape[0])//2)
c = max(0, (dim[1]-im.shape[1])//2)
return im[r:r+dim[0], c:c+dim[1], :]
def save_im(file_name, im):
"""
Saves an image in (channel, height, width) format.
"""
imsave(file_name, im.transpose(1, 2, 0))
|
Python
| 0.000004
|
@@ -140,16 +140,69 @@
imsave%0A%0A
+def path_exists(path):%0A%09return os.path.exists(path)%0A%0A
def crea
@@ -242,24 +242,21 @@
%09if not
-os.
path
-.
+_
exists(d
@@ -381,24 +381,21 @@
e):%0A%09if
-os.
path
-.
+_
exists(f
@@ -923,32 +923,29 @@
e):%0A%09assert
-os.
path
-.
+_
exists(model
@@ -1523,24 +1523,21 @@
%09assert
-os.
path
-.
+_
exists(p
|
955f751965ff0e6f6cf0d5fe9d51b8bcc30281a0
|
Remove check for Wagtail-0.3-style urlconfs in wagtailimages
|
wagtail/wagtailimages/wagtail_hooks.py
|
wagtail/wagtailimages/wagtail_hooks.py
|
from django.conf import settings
from django.conf.urls import include, url
from django.core import urlresolvers
from django.core.exceptions import ImproperlyConfigured
from django.utils.html import format_html, format_html_join
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.models import ContentType
from wagtail.wagtailcore import hooks
from wagtail.wagtailadmin.menu import MenuItem
from wagtail.wagtailadmin.site_summary import SummaryItem
from wagtail.wagtailimages import admin_urls, image_operations
from wagtail.wagtailimages.models import get_image_model
from wagtail.wagtailimages.rich_text import ImageEmbedHandler
@hooks.register('register_admin_urls')
def register_admin_urls():
return [
url(r'^images/', include(admin_urls, namespace='wagtailimages', app_name='wagtailimages')),
]
# Check for the presence of a pre-Wagtail-0.3-style urlconf, and fail loudly if one is found.
# Prior to Wagtail 0.3, the standard Wagtail urls.py contained an entry for
# wagtail.wagtailimages.urls rooted at '/admin/images/' or equivalent. As of Wagtail 0.5,
# the wagtailimages admin views are defined by wagtail.wagtailimages.admin_urls, and
# wagtail.wagtailimages.urls is used for front-end views instead - which means that those URLs
# will clash with the admin.
# This check can only be performed after the ROOT_URLCONF module has been fully imported. Since
# importing a urlconf module generally involves recursively importing a whole load of other things
# including models.py and wagtail_hooks.py, there is no obvious place to put this code at the
# module level without causing a circular import. We therefore put it in construct_main_menu, which
# is run frequently enough to ensure that the error message will not be missed. Yes, it's hacky :-(
OLD_STYLE_URLCONF_CHECK_PASSED = False
def check_old_style_urlconf():
global OLD_STYLE_URLCONF_CHECK_PASSED
# A faulty urls.py will place wagtail.wagtailimages.urls at the same path that
# wagtail.wagtailimages.admin_urls is loaded to, resulting in the wagtailimages_serve path
# being equal to wagtailimages:index followed by three arbitrary args
try:
wagtailimages_serve_path = urlresolvers.reverse('wagtailimages_serve', args=['123', '456', '789'])
except urlresolvers.NoReverseMatch:
# wagtailimages_serve is not defined at all, so there's no collision
OLD_STYLE_URLCONF_CHECK_PASSED = True
return
wagtailimages_index_path = urlresolvers.reverse('wagtailimages:index')
if wagtailimages_serve_path == wagtailimages_index_path + '123/456/789/':
raise ImproperlyConfigured("""Your urls.py contains an entry for %s that needs to be removed.
See http://wagtail.readthedocs.org/en/latest/releases/0.5.html#urlconf-entries-for-admin-images-admin-embeds-etc-need-to-be-removed"""
% wagtailimages_index_path
)
else:
OLD_STYLE_URLCONF_CHECK_PASSED = True
@hooks.register('construct_main_menu')
def construct_main_menu(request, menu_items):
if not OLD_STYLE_URLCONF_CHECK_PASSED:
check_old_style_urlconf()
class ImagesMenuItem(MenuItem):
def is_shown(self, request):
return request.user.has_perm('wagtailimages.add_image')
@hooks.register('register_admin_menu_item')
def register_images_menu_item():
return ImagesMenuItem(_('Images'), urlresolvers.reverse('wagtailimages:index'), name='images', classnames='icon icon-image', order=300)
@hooks.register('insert_editor_js')
def editor_js():
js_files = [
'wagtailimages/js/hallo-plugins/hallo-wagtailimage.js',
'wagtailimages/js/image-chooser.js',
]
js_includes = format_html_join('\n', '<script src="{0}{1}"></script>',
((settings.STATIC_URL, filename) for filename in js_files)
)
return js_includes + format_html(
"""
<script>
window.chooserUrls.imageChooser = '{0}';
registerHalloPlugin('hallowagtailimage');
</script>
""",
urlresolvers.reverse('wagtailimages:chooser')
)
@hooks.register('register_permissions')
def register_permissions():
image_content_type = ContentType.objects.get(app_label='wagtailimages', model='image')
image_permissions = Permission.objects.filter(content_type=image_content_type)
return image_permissions
@hooks.register('register_image_operations')
def register_image_operations():
return [
('original', image_operations.DoNothingOperation),
('fill', image_operations.FillOperation),
('min', image_operations.MinMaxOperation),
('max', image_operations.MinMaxOperation),
('width', image_operations.WidthHeightOperation),
('height', image_operations.WidthHeightOperation),
]
@hooks.register('register_rich_text_embed_handler')
def register_image_embed_handler():
return ('image', ImageEmbedHandler)
class ImagesSummaryItem(SummaryItem):
order = 200
template = 'wagtailimages/homepage/site_summary_images.html'
def get_context(self):
return {
'total_images': get_image_model().objects.count(),
}
@hooks.register('construct_homepage_summary_items')
def add_images_summary_item(request, items):
items.append(ImagesSummaryItem(request))
|
Python
| 0
|
@@ -109,64 +109,8 @@
ers%0A
-from django.core.exceptions import ImproperlyConfigured%0A
from
@@ -850,2300 +850,8 @@
%5D%0A%0A%0A
-# Check for the presence of a pre-Wagtail-0.3-style urlconf, and fail loudly if one is found.%0A# Prior to Wagtail 0.3, the standard Wagtail urls.py contained an entry for%0A# wagtail.wagtailimages.urls rooted at '/admin/images/' or equivalent. As of Wagtail 0.5,%0A# the wagtailimages admin views are defined by wagtail.wagtailimages.admin_urls, and%0A# wagtail.wagtailimages.urls is used for front-end views instead - which means that those URLs%0A# will clash with the admin.%0A# This check can only be performed after the ROOT_URLCONF module has been fully imported. Since%0A# importing a urlconf module generally involves recursively importing a whole load of other things%0A# including models.py and wagtail_hooks.py, there is no obvious place to put this code at the%0A# module level without causing a circular import. We therefore put it in construct_main_menu, which%0A# is run frequently enough to ensure that the error message will not be missed. Yes, it's hacky :-(%0A%0AOLD_STYLE_URLCONF_CHECK_PASSED = False%0Adef check_old_style_urlconf():%0A global OLD_STYLE_URLCONF_CHECK_PASSED%0A%0A # A faulty urls.py will place wagtail.wagtailimages.urls at the same path that%0A # wagtail.wagtailimages.admin_urls is loaded to, resulting in the wagtailimages_serve path%0A # being equal to wagtailimages:index followed by three arbitrary args%0A try:%0A wagtailimages_serve_path = urlresolvers.reverse('wagtailimages_serve', args=%5B'123', '456', '789'%5D)%0A except urlresolvers.NoReverseMatch:%0A # wagtailimages_serve is not defined at all, so there's no collision%0A OLD_STYLE_URLCONF_CHECK_PASSED = True%0A return%0A%0A wagtailimages_index_path = urlresolvers.reverse('wagtailimages:index')%0A if wagtailimages_serve_path == wagtailimages_index_path + '123/456/789/':%0A raise ImproperlyConfigured(%22%22%22Your urls.py contains an entry for %25s that needs to be removed.%0A See http://wagtail.readthedocs.org/en/latest/releases/0.5.html#urlconf-entries-for-admin-images-admin-embeds-etc-need-to-be-removed%22%22%22%0A %25 wagtailimages_index_path%0A )%0A else:%0A OLD_STYLE_URLCONF_CHECK_PASSED = True%0A%0A%0A@hooks.register('construct_main_menu')%0Adef construct_main_menu(request, menu_items):%0A if not OLD_STYLE_URLCONF_CHECK_PASSED:%0A check_old_style_urlconf()%0A%0A%0A
clas
|
ae3f9fbcf2bedba6798460569b10260c9acaa1bf
|
fix url to match filter
|
watcher/tweakerswatcher.py
|
watcher/tweakerswatcher.py
|
import requests
import json
import os.path
from watcher.watcher import Watcher
class TweakersWatcher(Watcher):
watcher_name = 'Tweakers Pricewatch'
filename = 'site_tweakers.txt'
def parse_site(self):
url = 'https://tweakers.net/xmlhttp/xmlHttp.php?application=tweakbase&type=filter&action=deals&dayOffset=1&minRelativePriceDrop=0.4&maxRelativePriceDrop=1&minAbsolutePriceDrop=30&maxAbsolutePriceDrop=&minCurrentPrice=0&maxCurrentPrice=&minPrices=3&minViews=0&of=absolutePriceDrop&od=desc&output=json'
request = requests.get(url)
json_object = json.loads(request.text)
return json_object['data']['html']
def check_price_error(self):
url = 'https://tweakers.net/pricewatch/deals/#filter:q1ZKSaz0T0srTi1RsjLUUcpNrAhKzUksySxLDSjKTE51KcovgEhk5jkmFefnlJYgSxgZgGWcS4uKUvNKwBJKVhAxMKcYpheLoQZ6ZmCpsMzUcqA6g1oA'
message_text = 'Mogelijke prijsfout, check: {0}'.format(url)
html = self.parse_site()
if not os.path.isfile(self.filename):
self.write_to_file(self.filename, html)
exit(0)
else:
with open(self.filename, 'r') as f:
file_content = f.read()
if file_content != html:
self.send_telegram(self.watcher_name, message_text)
self.write_to_file(self.filename, html)
|
Python
| 0
|
@@ -807,17 +807,17 @@
nlJYgSxg
-Z
+b
gGWcS4uK
@@ -836,30 +836,54 @@
MKcY
-pheLoQZ6ZmCpsMzUcqA6g1
+qATMw2KogZ4JWCosM7W8GKwrvygltcgtMzUnRclKKRHDtl
oA'%0A
|
d3effa1b80c8d56c98451f335b8099b72fa1f61b
|
Remove orderdict
|
yelp_kafka_tool/kafka_cluster_manager/cluster_info/util.py
|
yelp_kafka_tool/kafka_cluster_manager/cluster_info/util.py
|
from collections import Counter, OrderedDict
def get_partitions_per_broker(brokers):
"""Return partition count for each broker."""
return dict(
(broker, len(broker.partitions))
for broker in brokers
)
def get_leaders_per_broker(brokers, partitions):
"""Return count for each broker the number of times
it is assigned as preferred leader.
"""
leaders_per_broker = dict(
(broker, 0)
for broker in brokers
)
for partition in partitions:
leaders_per_broker[partition.leader] += 1
return leaders_per_broker
def get_per_topic_partitions_count(broker):
"""Return partition-count of each topic on given broker."""
return Counter((partition.topic for partition in broker.partitions))
def get_optimal_metrics(total_elements, total_groups):
"""Return optimal count and extra-elements allowed based on base
total count of elements and groups.
"""
opt_element_cnt = total_elements // total_groups
extra_elements_allowed_cnt = total_elements % total_groups
evenly_distribute = bool(not extra_elements_allowed_cnt)
return opt_element_cnt, extra_elements_allowed_cnt, evenly_distribute
|
Python
| 0.000065
|
@@ -28,21 +28,8 @@
nter
-, OrderedDict
%0A%0A%0Ad
|
b0212d5489b10956976365c862470e338c45509a
|
Test twisted and cares resolvers in netutil_test.
|
tornado/test/netutil_test.py
|
tornado/test/netutil_test.py
|
from __future__ import absolute_import, division, print_function, with_statement
import socket
from tornado.netutil import BlockingResolver, ThreadedResolver, is_valid_ip
from tornado.testing import AsyncTestCase, gen_test
from tornado.test.util import unittest
try:
from concurrent import futures
except ImportError:
futures = None
class _ResolverTestMixin(object):
def test_localhost(self):
self.resolver.resolve('localhost', 80, callback=self.stop)
future = self.wait()
self.assertIn((socket.AF_INET, ('127.0.0.1', 80)),
future.result())
@gen_test
def test_future_interface(self):
addrinfo = yield self.resolver.resolve('localhost', 80,
socket.AF_UNSPEC)
self.assertIn((socket.AF_INET, ('127.0.0.1', 80)),
addrinfo)
class BlockingResolverTest(AsyncTestCase, _ResolverTestMixin):
def setUp(self):
super(BlockingResolverTest, self).setUp()
self.resolver = BlockingResolver(io_loop=self.io_loop)
@unittest.skipIf(futures is None, "futures module not present")
class ThreadedResolverTest(AsyncTestCase, _ResolverTestMixin):
def setUp(self):
super(ThreadedResolverTest, self).setUp()
self.resolver = ThreadedResolver(io_loop=self.io_loop)
def tearDown(self):
self.resolver.executor.shutdown()
super(ThreadedResolverTest, self).tearDown()
class IsValidIPTest(unittest.TestCase):
def test_is_valid_ip(self):
self.assertTrue(is_valid_ip('127.0.0.1'))
self.assertTrue(is_valid_ip('4.4.4.4'))
self.assertTrue(is_valid_ip('::1'))
self.assertTrue(is_valid_ip('2620:0:1cfe:face:b00c::3'))
self.assertTrue(not is_valid_ip('www.google.com'))
self.assertTrue(not is_valid_ip('localhost'))
self.assertTrue(not is_valid_ip('4.4.4.4<'))
self.assertTrue(not is_valid_ip(' 127.0.0.1'))
|
Python
| 0
|
@@ -338,16 +338,273 @@
= None%0A%0A
+try:%0A import pycares%0Aexcept ImportError:%0A pycares = None%0Aelse:%0A from tornado.platform.caresresolver import CaresResolver%0A%0Atry:%0A import twisted%0Aexcept ImportError:%0A twisted = None%0Aelse:%0A from tornado.platform.twisted import TwistedResolver%0A
%0Aclass _
@@ -1706,24 +1706,538 @@
earDown()%0A%0A%0A
+@unittest.skipIf(pycares is None, %22pycares module not present%22)%0Aclass CaresResolverTest(AsyncTestCase, _ResolverTestMixin):%0A def setUp(self):%0A super(CaresResolverTest, self).setUp()%0A self.resolver = CaresResolver(io_loop=self.io_loop)%0A%0A%0A@unittest.skipIf(twisted is None, %22twisted module not present%22)%0Aclass TwistedResolverTest(AsyncTestCase, _ResolverTestMixin):%0A def setUp(self):%0A super(TwistedResolverTest, self).setUp()%0A self.resolver = TwistedResolver(io_loop=self.io_loop)%0A%0A%0A
class IsVali
|
b0b40db76e3c602eb0c49cf99b2ab8c6ef533751
|
suprime le param si la valeurr est None
|
sara_flexbe_states/src/sara_flexbe_states/SetRosParam.py
|
sara_flexbe_states/src/sara_flexbe_states/SetRosParam.py
|
# !/usr/bin/env python
import rospy
from flexbe_core import EventState, Logger
'''
Created on 21.09.2017
@author: Philippe La Madeleine
'''
class SetRosParam(EventState):
'''
Store a value in the ros parameter server for later use.
-- ParamName string The desired value.
># Value object The rosparam to set.
<= done The rosparam is set
'''
def __init__(self, ParamName):
'''
Constructor
'''
super(SetRosParam, self).__init__(outcomes=['done'], input_keys=['Value'])
self.ParamName = ParamName
def execute(self, userdata):
'''
Execute this state
'''
rospy.set_param(self.ParamName, userdata.Value)
return "done"
|
Python
| 0.999914
|
@@ -684,25 +684,55 @@
'''%0A
-%0A
+ if userdata.Value:%0A
rosp
@@ -775,16 +775,129 @@
.Value)%0A
+ else:%0A if rospy.has_param(self.ParamName):%0A rospy.delete_param(self.ParamName)%0A
|
738fc28922e0807bd292c8257ac251f5f743c237
|
Fix pep8 errors.
|
kotti_dkbase/__init__.py
|
kotti_dkbase/__init__.py
|
from pyramid.httpexceptions import HTTPError
from pyramid.httpexceptions import HTTPNotFound
from kotti_dkbase.views import error_view
from kotti_dkbase.views import exception_decorator
def includeme(config):
config.include('pyramid_zcml')
config.load_zcml('configure.zcml')
config.add_view(
error_view,
context=HTTPNotFound,
renderer='kotti_dkbase:templates/view/error-404.pt',
)
config.add_view(
error_view,
context=HTTPError,
renderer='kotti_dkbase:templates/view/error.pt',
)
config.add_view(
error_view,
decorator=exception_decorator,
context=Exception,
renderer='kotti_dkbase:templates/view/error.pt',
)
config.add_static_view('static-kotti_dkbase', 'kotti_dkbase:static')
config.override_asset('kotti', 'kotti_dkbase:kotti-overrides/')
|
Python
| 0.000004
|
@@ -180,16 +180,17 @@
orator%0A%0A
+%0A
def incl
@@ -406,34 +406,30 @@
or-404.pt',%0A
-
)%0A
+
config.a
@@ -537,34 +537,30 @@
/error.pt',%0A
-
)%0A
+
config.a
@@ -715,18 +715,14 @@
t',%0A
-
)%0A
+
@@ -790,16 +790,16 @@
tatic')%0A
-
conf
@@ -857,8 +857,9 @@
rides/')
+%0A
|
7d082f2012c08f4689c9d503d87d0557255c894d
|
Add option to use Yappi for manage.py profile command.
|
yet_another_django_profiler/management/commands/profile.py
|
yet_another_django_profiler/management/commands/profile.py
|
# encoding: utf-8
# Created by Jeremy Bowman on Fri Feb 21 11:16:36 EST 2014
# Copyright (c) 2014, 2015 Safari Books Online. All rights reserved.
#
# This software may be modified and distributed under the terms
# of the 3-clause BSD license. See the LICENSE file for details.
"""
Yet Another Django Profiler "profile" management command
"""
from __future__ import unicode_literals
import atexit
import cProfile
from cStringIO import StringIO
import marshal
import mock
from optparse import make_option
import pstats
import subprocess
import sys
import tempfile
from django.conf import settings
from django.core.management import call_command, ManagementUtility
from django.core.management.base import BaseCommand
from yet_another_django_profiler.middleware import func_strip_path, which
class Command(BaseCommand):
"""
Django management command for profiling other management commands.
"""
args = 'other_command <argument argument ...>'
help = 'Profile another Django management command'
custom_options = (
make_option(
'-o',
'--output',
dest='path',
help='Path to a file in which to store the profiling output (required if generating a call graph PDF, other results are output to the console by default)'
),
make_option(
'-s',
'--sort',
dest='sort',
help='Statistic by which to sort the profiling data (default is to generate a call graph PDF instead)'
),
make_option(
'-f',
'--fraction',
dest='fraction',
help='The fraction of total function calls to display (the default of .2 is omitted if max-calls or pattern are specified)'
),
make_option(
'-m',
'--max-calls',
dest='max_calls',
help='The maximum number of function calls to display'
),
make_option(
'-p',
'--pattern',
dest='pattern',
help='Regular expression filter for function display names'
)
)
option_list = BaseCommand.option_list + custom_options
def create_parser(self, prog_name, subcommand):
"""
Override the base create_parser() method to ignore options of the
command being profiled.
"""
parser = super(Command, self).create_parser(prog_name, subcommand)
parser.disable_interspersed_args()
return parser
def handle(self, *args, **options):
"""
Run and profile the specified management command with the provided
arguments.
"""
if not len(args):
self.print_help(sys.argv[0], 'profile')
sys.exit(1)
if not options['sort'] and not options['path']:
self.stdout.write('Output file path is required for call graph generation')
sys.exit(1)
command_name = args[0]
utility = ManagementUtility(sys.argv)
command = utility.fetch_command(command_name)
parser = command.create_parser(sys.argv[0], command_name)
command_options, command_args = parser.parse_args(list(args[1:]))
if command_name == 'test' and settings.TEST_RUNNER == 'django_nose.NoseTestSuiteRunner':
# Ugly hack: make it so django-nose won't have nosetests choke on
# our parameters
BaseCommand.option_list += self.custom_options
profiler = cProfile.Profile()
atexit.register(output_results, profiler, options, self.stdout)
profiler.runcall(call_command, command_name, *command_args, **command_options.__dict__)
sys.exit(0)
def output_results(profiler, options, stdout):
"""Generate the profiler output in the desired format. Implemented as a
separate function so it can be run as an exit handler (because management
commands often call exit() directly, bypassing the rest of the profile
command's handle() method)."""
profiler.create_stats()
if not options['sort']:
if not which('dot'):
stdout.write('Could not find "dot" from Graphviz; please install Graphviz to enable call graph generation')
return
if not which('gprof2dot.py'):
stdout.write('Could not find gprof2dot.py, which should have been installed by yet-another-django-profiler')
return
with tempfile.NamedTemporaryFile() as stats:
stats.write(marshal.dumps(profiler.stats))
stats.flush()
cmd = ('gprof2dot.py -f pstats {} | dot -Tpdf'.format(stats.name))
process = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
output = process.communicate()[0]
return_code = process.poll()
if return_code:
stdout.write('gprof2dot/dot exited with {}'.format(return_code))
return
path = options['path']
with open(path, 'wb') as pdf_file:
pdf_file.write(output)
stdout.write('Wrote call graph to {}'.format(path))
else:
sort = options['sort']
if sort == 'file':
# Work around bug on Python versions >= 2.7.4
sort = 'fil'
out = StringIO()
stats = pstats.Stats(profiler, stream=out)
with mock.patch('pstats.func_strip_path') as mock_func_strip_path:
mock_func_strip_path.side_effect = func_strip_path
stats.strip_dirs()
restrictions = []
if options['pattern']:
restrictions.append(options['pattern'])
if options['fraction']:
restrictions.append(float(options['fraction']))
elif options['max_calls']:
restrictions.append(int(options['max_calls']))
elif not options['pattern']:
restrictions.append(.2)
stats.sort_stats(sort).print_stats(*restrictions)
if options['path']:
path = options['path']
with open(path, 'w') as text_file:
text_file.write(out.getvalue())
stdout.write('Wrote profiling statistics to {}'.format(path))
else:
stdout.write(out.getvalue())
|
Python
| 0
|
@@ -2089,16 +2089,182 @@
names'%0A
+ ),%0A make_option(%0A '-b',%0A '--backend',%0A dest='backend',%0A help='Profiler backend to use (cProfile or yappi)'%0A
@@ -3615,24 +3615,205 @@
om_options%0A%0A
+ if options%5B'backend'%5D == 'yappi':%0A import yet_another_django_profiler.yadp_yappi as yadp_yappi%0A profiler = yadp_yappi.YappiProfile()%0A else:%0A
prof
@@ -3830,32 +3830,33 @@
ofile.Profile()%0A
+%0A
atexit.r
|
99a86a26170d293d8a11074f62f09024493422c1
|
Remove global commands in real_command()
|
utils.py
|
utils.py
|
import requests
import traceback
import six
import config
import log
print_ = six.print_
PY3 = six.PY3
PY34 = six.PY34
PY2 = six.PY2
commands = {}
cmd_list = []
alias_list = []
get = requests.get
post = requests.post
sysver = "".join(__import__("sys").version.split("\n"))
gitver = __import__("subprocess").check_output(['git',
'rev-parse',
'--short',
'HEAD']).decode().split()[0]
version = "A zIRC bot v{0}@{1}, running on Python {2}".format("0.1",
gitver,
sysver)
def add_cmd(name, minArgs=1, alias=None, owner=False,
admin=False, trusted=False, hide=False):
def real_command(func):
global commands
global cmd_list
global alias_list
commands[name] = {
'perms': [trusted, admin, owner],
'func': func,
'minArgs': minArgs,
'hide': hide
}
if alias:
for i in alias:
commands[i] = {
'perms': [trusted, admin, owner],
'func': func,
'minArgs': minArgs,
'hide': True
}
alias_list.append(i)
cmds = [i for i in commands.keys() if not commands[i]['hide']]
cmd_list = sorted(cmds)
alias_list = sorted(alias_list)
return real_command
def call_command(bot, event, irc, arguments):
command = ' '.join(arguments).split(' ')
name = command[0][1:]
if not name == '' and not name.find("?") != -1:
privmsg = event.target == bot.config['nickname']
args = command[1:] if len(command) > 1 else ''
host = event.source.host
chan = event.target if not privmsg else False
try:
perms = commands[name]['perms']
minArgs = commands[name]['minArgs']
if checkPerms(host, owner=perms[2], admin=perms[1],
trusted=perms[0], channel=chan):
if len(args) < minArgs:
irc.reply(event, config.argsMissing)
else:
target = "a private message" if privmsg else event.target
if not config.ci:
log.info("%s called %s in %s", event.source, name, target)
commands[name]['func'](bot, event, irc, args)
else:
irc.reply(event, config.noPerms)
except KeyError:
irc.notice(event.source.nick, config.invalidCmd.format(name))
except Exception:
irc.reply(event, 'Oops, an error occured!')
PrintError(irc, event)
def checkPerms(host, owner=False, admin=False, trusted=False, channel=False):
isOwner = host in config.owners
isAdmin = host in config.admins
isTrusted = host in config.trusted
isBot = host.find("/bot/") != -1 and host not in config.bots['hosts']
ignores = config.ignores["global"]
ignoreChans = list(config.ignores["channels"].keys())
if channel:
if channel in ignoreChans:
ignores.extend(config.ignores["channels"][channel])
if channel in config.bots['channels']:
isBot = False
isIgnored = host in ignores
if owner and isOwner:
return True
elif admin and (isAdmin or isOwner):
return True
elif trusted and (isTrusted or isAdmin or isOwner) and not isIgnored:
return True
elif not (owner or admin or trusted) and not isIgnored and not isBot:
return True
else:
return False
def PrintError(irc, event):
log.exception("An unknown error occured")
if not config.ci:
try:
syntax = "py3tb" if PY3 else "pytb"
tb = traceback.format_exc().strip()
r = post("http://dpaste.com/api/v2/",
data={
"title": "zIRCBot Error: {0}".format(tb.split("\n")[-1]),
"content": tb,
"syntax": syntax,
"expiry-days": "10",
"poster": "wolfy1339"
},
allow_redirects=True,
timeout=60)
irc.msg('##wolfy1339', "Error: {0}".format(r.text.split("\n")[0]))
except Exception:
irc.msg('##wolfy1339', config.tracebackPostError)
log.exception(config.tracebackPostError)
else:
__import__('sys').exit(1)
|
Python
| 0.000014
|
@@ -886,35 +886,13 @@
bal
-commands%0A global cmd
+alias
_lis
@@ -904,29 +904,27 @@
global
-alias
+cmd
_list%0A%0A
|
58ba7b4f777b88d81f2a9f717001d9cdac603947
|
Use copy2 to preserve file attributes
|
utils.py
|
utils.py
|
from __future__ import print_function
import os
import zipfile
import io
import platform
import tempfile
import codecs
import shutil
import subprocess
from appdirs import AppDirs
import validators
from PySide import QtCore
#try:
# import zlib
# ZIP_MODE = zipfile.ZIP_DEFLATED
#except:
def url_exists(path):
if validators.url(path):
return True
return False
def load_last_project_path():
proj_path = ''
proj_file = get_data_file_path('files/last_project_path.txt')
if os.path.exists(proj_file):
with codecs.open(proj_file, encoding='utf-8') as f:
proj_path = f.read().strip()
if not proj_path:
proj_path = QtCore.QDir.currentPath()
return proj_path
def load_recent_projects():
files = []
history_file = get_data_file_path('files/recent_files.txt')
if not os.path.exists(history_file):
return files
with codecs.open(history_file, encoding='utf-8') as f:
for line in f:
line = line.strip()
if line and os.path.exists(line):
files.append(line)
files.reverse()
return files
def save_project_path(path):
proj_file = get_data_file_path('files/last_project_path.txt')
with codecs.open(proj_file, 'w+', encoding='utf-8') as f:
f.write(path)
def save_recent_project(proj):
recent_file_path = get_data_file_path('files/recent_files.txt')
max_length = config.MAX_RECENT
recent_files = []
if os.path.exists(recent_file_path):
recent_files = codecs.open(recent_file_path, encoding='utf-8').read().split(u'\n')
try:
recent_files.remove(proj)
except ValueError:
pass
recent_files.append(proj)
with codecs.open(recent_file_path, 'w+', encoding='utf-8') as f:
for recent_file in recent_files[-max_length:]:
if recent_file and os.path.exists(recent_file):
f.write(u'{}\n'.format(recent_file))
def replace_right(source, target, replacement, replacements=None):
return replacement.join(source.rsplit(target, replacements))
def is_windows():
return platform.system() == 'Windows'
def get_temp_dir():
return tempfile.gettempdir()
def path_join(base, *rest):
new_rest = []
for i in range(len(rest)):
new_rest.append(str(rest[i]))
rpath = u'/'.join(new_rest)
if not os.path.isabs(rpath):
rpath = base + u'/' + rpath
if is_windows():
rpath = rpath.replace('/', '\\')
return rpath
def get_data_path(dir_path):
parts = dir_path.split('/')
dirs = AppDirs('Web2Executable', 'Web2Executable')
data_path = path_join(dirs.user_data_dir, *parts)
if is_windows():
data_path = data_path.replace(u'\\', u'/')
if not os.path.exists(data_path):
os.makedirs(data_path)
return data_path
def get_data_file_path(file_path):
parts = file_path.split('/')
data_path = get_data_path('/'.join(parts[:-1]))
return path_join(data_path, parts[-1])
def rmtree(path, **kwargs):
if is_windows():
if os.path.isabs(path):
path = '\\\\?\\'+path.replace('/', '\\')
shutil.rmtree(path, **kwargs)
def copy(src, dest, **kwargs):
if is_windows():
if os.path.isabs(src):
src = '\\\\?\\'+src.replace('/', '\\')
if os.path.isabs(dest):
dest = '\\\\?\\'+dest.replace('/', '\\')
shutil.copy(src, dest, **kwargs)
def move(src, dest, **kwargs):
if is_windows():
if os.path.isabs(src):
src = '\\\\?\\'+src.replace('/', '\\')
if os.path.isabs(dest):
dest = '\\\\?\\'+dest.replace('/', '\\')
shutil.move(src, dest, **kwargs)
def copytree(src, dest, **kwargs):
if is_windows():
if os.path.isabs(src):
src = '\\\\?\\'+src.replace('/', '\\')
if os.path.isabs(dest):
dest = '\\\\?\\'+dest.replace('/', '\\')
shutil.copytree(src, dest, **kwargs)
def log(*args):
if config.DEBUG:
print(*args)
with open(get_data_file_path('files/error.log'), 'a+') as f:
f.write(', '.join(args))
f.write('\n')
def open_folder_in_explorer(path):
if platform.system() == "Windows":
os.startfile(path)
elif platform.system() == "Darwin":
subprocess.Popen(["open", path])
else:
subprocess.Popen(["xdg-open", path])
def zip_files(zip_file_name, *args, **kwargs):
zip_file = zipfile.ZipFile(zip_file_name, 'w', config.ZIP_MODE)
verbose = kwargs.pop('verbose', False)
exclude_paths = kwargs.pop('exclude_paths', [])
old_path = os.getcwd()
for arg in args:
if is_windows():
arg = '\\\\?\\'+os.path.abspath(arg).replace('/', '\\')
if os.path.exists(arg):
if os.path.isdir(arg):
directory = os.path.abspath(arg)
os.chdir(directory)
for root, dirs, files in os.walk(directory):
excluded = False
for exclude_path in exclude_paths:
if exclude_path in path_join(directory,root):
excluded = True
if not excluded:
for file in files:
file_loc = os.path.relpath(path_join(root, file), directory)
if verbose:
log(file_loc)
try:
zip_file.write(file_loc)
except ValueError:
os.utime(file_loc, None)
zip_file.write(file_loc)
except FileNotFoundError:
pass
for direc in dirs:
dir_loc = os.path.relpath(path_join(root, direc), directory)
if verbose:
log(dir_loc)
try:
zip_file.write(dir_loc)
except ValueError:
os.utime(file_loc, None)
zip_file.write(file_loc)
except FileNotFoundError:
pass
else:
file = os.path.abspath(arg)
directory = os.path.abspath(path_join(file, '..'))
os.chdir(directory)
file_loc = os.path.relpath(arg, directory)
if verbose:
log(file_loc)
try:
zip_file.write(file_loc)
except ValueError:
os.utime(file_loc, None)
zip_file.write(file_loc)
os.chdir(old_path)
zip_file.close()
def join_files(destination, *args, **kwargs):
with io.open(destination, 'wb') as dest_file:
for arg in args:
if os.path.exists(arg):
with io.open(arg, 'rb') as file:
while True:
bytes = file.read(4096)
if len(bytes) == 0:
break
dest_file.write(bytes)
import config
|
Python
| 0
|
@@ -3384,24 +3384,25 @@
shutil.copy
+2
(src, dest,
|
d332ab4c4781483199bde7e985fc5eb079bcd2a1
|
fix content type
|
transports/movilgate_http.py
|
transports/movilgate_http.py
|
import re, sys, traceback
from xml.etree import ElementTree
from twisted.internet import defer
from twisted.internet.defer import inlineCallbacks
from twisted.internet.error import ConnectionRefusedError
from twisted.web import http
from twisted.web.resource import Resource
from twisted.web.server import NOT_DONE_YET
from vumi.transports.base import Transport
from vumi.utils import http_request_full, normalize_msisdn
from vumi import log
class MovilgateHttpTransport(Transport):
mandatory_metadata_fields = ['servicio_id', 'telefono_id_tran']
def mkres(self, cls, publish_func, path_key = None):
resource = cls(self.config, publish_func)
self._resources.append(resource)
if path_key is None:
path = self.config['receive_path']
else:
path = "%s/%s" % (self.config['receive_path'], path_key)
return (resource, path)
@inlineCallbacks
def setup_transport(self):
self._resources = []
log.msg("Setup mobivate transport %s" % self.config)
resources = [
self.mkres(MovilgateReceiveSMSResource,
self.publish_message)
]
self.receipt_resource = yield self.start_web_resources(
resources, self.config['receive_port'])
def stopWorker(self):
if hasattr(self, 'receipt_resource'):
return self.receipt_resource.stopListening()
def validate_transport_metadata(self, message):
for field in self.mandatory_metadata_fields:
if not field in message['transport_metadata']:
self.publish_delivery_report(
user_message_id=message['message_id'],
delivery_status='failed',
failure_level='service',
failure_code='0',
failure_reason='Missing %s metadata for sending to Movilgate' % field)
return False
return True
@inlineCallbacks
def handle_outbound_message(self, message):
log.msg("Outbound message to be processed %s" % repr(message))
try:
if not self.validate_transport_metadata(message):
return
movilgate_parser = MovilgateXMLParser()
mobilgate_msg = movilgate_parser.build({
'proveedor': {
'id': self.config['proveedor_id'],
'password': self.config['proveedor_password']},
'servicio': {'id': message['transport_metadata']['servicio_id']},
'telephono': {
'msisdn': message['to_addr'],
'id_tran': message['transport_metadata']['telefono_id_tran']},
'contenido': message['content']})
response = yield http_request_full(
self.config['url'],
mobilgate_msg,
{'User-Agent': ['Vusion Movilgate Transport'],
'Content-Type': ['application/xml; charset=UTF-8']})
if response.code != 200:
log.msg("Http Error %s: %s"
% (response.code, response.delivered_body))
yield self.publish_delivery_report(
user_message_id=message['message_id'],
delivery_status='failed',
failure_level='http',
failure_code=response.code,
failure_reason=response.delivered_body)
return
resp = ElementTree.fromstring(response.delivered_body)
status = resp.find('Transaccion').attrib['estado']
if status != "0":
yield self.publish_delivery_report(
user_message_id=message['message_id'],
delivery_status='failed',
failure_level='service',
failure_code=status,
failure_reason=resp.find('Texto').text)
return
yield self.publish_ack(
user_message_id=message['message_id'],
sent_message_id=message['message_id'])
except:
exc_type, exc_value, exc_traceback = sys.exc_info()
log.error(
"Error during consume user message: %r" %
traceback.format_exception(exc_type, exc_value, exc_traceback))
yield self.publish_delivery_report(
user_message_id=message['message_id'],
delivery_status='failed',
failure_level='internal',
failure_code=0,
failure_reason=traceback.format_exc())
class MovilgateReceiveSMSResource(Resource):
isLeaf = True
def __init__(self, config, publish_func):
log.msg("Init ReceiveSMSResource %s" % (config))
self.config = config
self.publish_func = publish_func
self.transport_name = self.config['transport_name']
@inlineCallbacks
def do_render(self, request):
try:
raw_body = request.content.read()
log.msg('got hit with %s' % raw_body)
mo_request = ElementTree.fromstring(raw_body)
contenido = mo_request.find('Contenido').text
servicio_id = mo_request.find('Servicio').attrib['Id']
from_add = mo_request.find('Telefono').attrib['msisdn']
id_tran = mo_request.find('Telefono').attrib['IdTran']
to_addr = servicio_id.split('.')[0]
yield self.publish_func(
transport_type='sms',
to_addr=to_addr,
from_addr=from_add,
content=contenido,
transport_metadata={'telefono_id_tran': id_tran, 'servicio_id': servicio_id})
request.setResponseCode(http.OK)
request.setHeader('Content-Type', 'text/plain')
except:
request.setResponseCode(http.INTERNAL_SERVER_ERROR)
log.msg("Error processing the request: %s" % (request,))
exc_type, exc_value, exc_traceback = sys.exc_info()
log.error(
"Error during consume user message: %r" %
traceback.format_exception(exc_type, exc_value, exc_traceback))
request.finish()
def render(self, request):
self.do_render(request)
return NOT_DONE_YET
class MovilgateXMLParser():
def build(self, messagedict):
messages = ElementTree.Element('MTRequest')
proveedor = ElementTree.SubElement(messages, 'Proveedor')
proveedor.set('Id', messagedict['proveedor']['id'])
proveedor.set('Password', messagedict['proveedor']['password'])
servicio = ElementTree.SubElement(messages, 'Servicio')
servicio.set('Id', messagedict['servicio']['id'])
servicio.set('ContentType', "0")
servicio.set('CreateSession', "0")
telephono = ElementTree.SubElement(messages, 'Telefono')
telephono.set('msisdn', messagedict['telephono']['msisdn'])
telephono.set('IdTran', messagedict['telephono']['id_tran'])
contenido = ElementTree.SubElement(messages, 'Contenido')
contenido.text = messagedict['contenido']
return ElementTree.tostring(messages)
|
Python
| 0.003446
|
@@ -2969,19 +2969,12 @@
: %5B'
-application
+text
/xml
|
4b44947911660ceee3a09da08c7c22509f953872
|
add TODOs
|
utils.py
|
utils.py
|
from __future__ import absolute_import
import csv
import logging
import json
import re
from collections import defaultdict
from utils.handlers import ColorizingStreamHandler, JSONFileHandler
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
logger.addHandler(ColorizingStreamHandler())
logger.addHandler(JSONFileHandler('input.jslog'))
class IpedsCsvReader(object):
field_mapping = None
primary_mapping = None
year_type = None
def __init__(self, fh, **kwargs):
for key, value in kwargs.items():
if hasattr(self, key):
setattr(self, key, value)
self._reader = self.get_reader(fh)
self.parse_header()
def get_reader(self, fh):
return csv.reader(fh)
def parse_header(self):
header = self._reader.next()
self.header = header
if self.field_mapping is None:
return
years = defaultdict(list)
fields = dict(self.field_mapping)
primary_idx = None
for idx, cell in enumerate(header):
if cell == self.primary_mapping[0]:
primary_idx = idx
continue
try:
name, year = re.match(r'(\w+)\([a-zA-Z]+(\d+)', cell).groups()
except AttributeError:
continue
if name in fields:
years[year].append((idx, fields[name]))
self.primary_idx = primary_idx
self.years_data = years
def parse_rows(self, institution_model, report_model):
report_name = report_model.__name__
for row in self._reader:
if len("".join(row[2:])) == 0:
# skip empty rows
continue
inst = institution_model.objects.get(ipeds_id=row[self.primary_idx])
for year in self.years_data:
new_data = dict()
for idx, name in self.years_data[year]:
if row[idx]:
new_data[name] = row[idx]
if new_data:
instance, created = report_model.objects.get_or_create(
institution=inst, year=year,
defaults=dict(year_type=self.year_type))
instance.__dict__.update(new_data)
instance.save()
else:
continue
# camelCase for better JSON compatibility
log_data = dict(firstImport=created, # `created` is reserved
instPk=inst.pk, instName=inst.name, year=year,
report=report_name, newData=new_data,
source="ipeds")
logger.info("%s" % (instance), extra=dict(json=log_data))
def explain_header(self):
from .models import Variable
name_set = set()
for cell in self.header:
try:
name, code = re.match(r'(\w+)\((\w+)\)', cell).groups()
var = Variable.objects.filter(raw__startswith="%s|%s|" % (code, name))[0]
except AttributeError:
continue
except IndexError:
name = "????"
code = cell
var = None
name_set.add(name)
print name, code, var.long_name if var else ""
print "%d Unique Variables: %s" % (len(name_set), sorted(name_set))
|
Python
| 0
|
@@ -2351,32 +2351,185 @@
-continue
+# skip empty data%0A continue%0A # TODO only log changed data%0A # TODO make ints ints, decimals strings, floats float
%0A
|
9a861757011e2f8ba17bc30b0e874d087f5afd7b
|
Bump version to 6.0.1b1
|
platformio/__init__.py
|
platformio/__init__.py
|
# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
VERSION = (6, 0, "1a1")
__version__ = ".".join([str(s) for s in VERSION])
__title__ = "platformio"
__description__ = (
"A professional collaborative platform for embedded development. "
"Cross-platform IDE and Unified Debugger. "
"Static Code Analyzer and Remote Unit Testing. "
"Multi-platform and Multi-architecture Build System. "
"Firmware File Explorer and Memory Inspection. "
"IoT, Arduino, CMSIS, ESP-IDF, FreeRTOS, libOpenCM3, mbedOS, Pulp OS, SPL, "
"STM32Cube, Zephyr RTOS, ARM, AVR, Espressif (ESP8266/ESP32), FPGA, "
"MCS-51 (8051), MSP430, Nordic (nRF51/nRF52), NXP i.MX RT, PIC32, RISC-V, "
"STMicroelectronics (STM8/STM32), Teensy"
)
__url__ = "https://platformio.org"
__author__ = "PlatformIO Labs"
__email__ = "contact@piolabs.com"
__license__ = "Apache Software License"
__copyright__ = "Copyright 2014-present PlatformIO Labs"
__accounts_api__ = "https://api.accounts.platformio.org"
__registry_mirror_hosts__ = [
"registry.platformio.org",
"registry.nm1.platformio.org",
]
__pioremote_endpoint__ = "ssl:host=remote.platformio.org:port=4413"
__default_requests_timeout__ = (10, None) # (connect, read)
__core_packages__ = {
"contrib-piohome": "~3.4.1",
"contrib-pysite": "~2.%d%d.0" % (sys.version_info.major, sys.version_info.minor),
"tool-scons": "~4.40300.0",
"tool-cppcheck": "~1.270.0",
"tool-clangtidy": "~1.120001.0",
"tool-pvs-studio": "~7.18.0",
}
__check_internet_hosts__ = [
"185.199.110.153", # Github.com
"88.198.170.159", # platformio.org
"github.com",
] + __registry_mirror_hosts__
|
Python
| 0
|
@@ -635,17 +635,17 @@
6, 0, %221
-a
+b
1%22)%0A__ve
|
225d3f4abe2a9145dba3f3b1e0a72b9db4aea0f7
|
Fix DOB plot title
|
plots/gender_by_dob.py
|
plots/gender_by_dob.py
|
import dateutil
import pandas
from bokeh.charts import TimeSeries, Line
from bokeh.plotting import gridplot
from bokeh.resources import CDN
from bokeh.embed import autoload_static
import os
def plot(newest_changes):
ra_len = 1 #rolling average lenght
dox = pandas.DataFrame()
interesante = ['female','male','nonbin']
for l in ['b', 'd']:
acro = 'do'+l
filelist = os.listdir('/home/maximilianklein/snapshot_data/{}/'.format(newest_changes))
dox_list = [f for f in filelist if f.startswith(acro)]
dox_file = dox_list[0]
if newest_changes == 'newest-changes':
date_range = dox_file.split('{}-index-from-'.format(acro))[1].split('.csv')[0].replace('-',' ')
csv_to_read = '/home/maximilianklein/snapshot_data/{}/{}'.format(newest_changes,dox_file)
df = pandas.DataFrame.from_csv(csv_to_read)
del df['nan']
df['total'] = df.sum(axis=1)
df['nonbin'] = df['total'] - df['male'] - df['female']
df['fem_per'] = df['female'] / (df['total'])
df['nonbin_per'] = df['nonbin'] / df['total']
for inte in interesante:
dox['{}-{}'.format(acro, inte)] = df[inte]
#ra = pandas.rolling_mean(df['fem_per'], ra_len)
#dox[acro] = ra
time_range = (1400, 2015)
dox = dox[time_range[0]: time_range[1]]
'''dox['Date'] = [dateutil.parser.parse(str(int(x)))
for x in dox['dob'].keys()]'''
tups = zip(['Date of Birth']*3 + ['Date of Death']*3, ['Women', 'Men', 'Non-binary']* 2)
labs = ['-'.join(x) for x in tups]
dox.columns = labs
table_html = dox.to_html(max_rows=20, na_rep="n/a")
title_suffix = 'Changes since {}'.format(date_range) if newest_changes == 'newest-changes' else 'All Time'
p = Line(dox, legend=True, title="Female Ratios {}".format(title_suffix))
#p.below[0].formatter.formats = dict(years=['%Y'])
'''
nonbindox = nonbindox[time_range[0]: time_range[1]]
nonbindox['Date'] = [dateutil.parser.parse(str(int(x)))
for x in nonbindox['dob'].keys()]
p2 = TimeSeries(nonbindox[['dob','dod','Date']], index='Date', legend=True,
title="Non Binary Ratios ".format(title_suffix))
p2.below[0].formatter.formats = dict(years=['%Y'])
p = gridplot([[p1], [p2]], toolbar_location=None)
'''
js_filename = "gender_by_dob_{}.js".format(newest_changes)
script_path = "./assets/js/"
output_path = "./files/assets/js/"
# generate javascript plot and corresponding script tag
js, tag = autoload_static(p, CDN, script_path + js_filename)
with open(output_path + js_filename, 'w') as js_file:
js_file.write(js)
return {'plot_tag':tag, 'table_html':table_html}
if __name__ == "__main__":
print(plot('newest'))
print(plot('newest-changes'))
|
Python
| 0.000005
|
@@ -1268,25 +1268,16 @@
ro%5D = ra
-%0A
%0A%0A ti
@@ -1345,20 +1345,16 @@
nge%5B1%5D%5D%0A
-
%0A '''
@@ -1823,21 +1823,43 @@
le=%22
-Female Ratios
+Date of Birth and Death by Gender -
%7B%7D%22
@@ -2789,16 +2789,17 @@
e_html%7D%0A
+%0A
if __nam
|
491d7eca2137613978a7d88ad74fcdda9dcb5e5c
|
add find_packages to setup.py
|
plugins/geoip/setup.py
|
plugins/geoip/setup.py
|
#!/usr/bin/env python
import setuptools
version = '0.1.0'
setuptools.setup(
name="alerta-geoip",
version=version,
description='Alerta plugin for GeoIP Lookup',
url='https://github.com/alerta/alerta-contrib',
license='Apache License 2.0',
author='Nick Satterly',
author_email='nick.satterly@theguardian.com',
py_modules=['geoip'],
install_requires=[
'requests',
'alerta-server'
],
include_package_data=True,
zip_safe=False,
entry_points={
'alerta.plugins': [
'geoip = geoip:GeoLocation'
]
}
)
|
Python
| 0.000001
|
@@ -20,24 +20,50 @@
on%0A%0A
-import setuptool
+from setuptools import setup, find_package
s%0A%0Av
@@ -84,19 +84,8 @@
0'%0A%0A
-setuptools.
setu
@@ -347,16 +347,46 @@
n.com',%0A
+ packages=find_packages(),%0A
py_m
|
eecb7d6c8d86912dc4994ec7439e1679282d3347
|
Add a trivial doctest
|
pmxbot/saysomething.py
|
pmxbot/saysomething.py
|
# vim:ts=4:sw=4:noexpandtab
import threading
import random
import logging
import time
import datetime
from itertools import chain
from jaraco import timing
import pmxbot.core
import pmxbot.logging
import pmxbot.quotes
log = logging.getLogger(__name__)
nlnl = '\n', '\n'
def new_key(key, word):
if word == '\n':
return nlnl
else:
return (key[1], word)
def markov_data_from_words(words):
data = {}
key = nlnl
for word in words:
data.setdefault(key, []).append(word)
key = new_key(key, word)
return data
def words_from_markov_data(data, initial_word='\n'):
key = '\n', initial_word
if initial_word != '\n':
yield initial_word
while 1:
word = random.choice(data.get(key, nlnl))
key = new_key(key, word)
yield word
def words_from_file(f):
for line in f:
words = line.split()
if len(words):
for word in words:
yield word
else:
yield '\n'
yield '\n'
def words_from_logger(logger, max=1000):
return words_from_lines(logger.get_random_logs(max))
def words_from_quotes(quotes):
return words_from_lines(q['text'] for q in quotes)
def words_from_lines(lines):
for line in lines:
words = line.strip().lower().split()
for word in words:
yield word
yield '\n'
def words_from_logger_and_quotes(logger, quotes):
return chain(
words_from_logger(logger),
words_from_quotes(quotes),
['\n'],
)
def paragraph_from_words(words):
result = []
for word in words:
if word == '\n':
break
result.append(word)
return ' '.join(result)
class FastSayer:
@classmethod
def init_in_thread(cls):
threading.Thread(target=cls.init_class).start()
@classmethod
def init_class(cls):
log.info("Initializing FastSayer...")
timer = timing.Stopwatch()
cls._wait_for_stores(timer)
words = words_from_logger_and_quotes(
pmxbot.logging.Logger.store,
pmxbot.quotes.Quotes.store,
)
cls.markov_data = markov_data_from_words(words)
log.info("Done initializing FastSayer in %s.", timer.split())
def saysomething(self, initial_word='\n'):
return paragraph_from_words(words_from_markov_data(self.markov_data, initial_word))
@classmethod
def _wait_for_stores(cls, timer):
while timer.elapsed < datetime.timedelta(seconds=30):
stores_initialized = (
hasattr(pmxbot.logging.Logger, 'store') and
hasattr(pmxbot.quotes.Quotes, 'store')
)
if stores_initialized:
break
time.sleep(0.1)
else:
raise RuntimeError("Timeout waiting for stores to be initialized")
@pmxbot.core.command("saysomething")
def saysomething(client, event, channel, nick, rest):
"""
Generate a Markov Chain response based on past logs. Seed it with
a starting word by adding that to the end, eg
'!saysomething dowski:'
"""
sayer = FastSayer()
if not hasattr(sayer, 'markov_data'):
return "Sayer not yet initialized. Try again later."
if rest:
return sayer.saysomething(rest)
else:
return sayer.saysomething()
|
Python
| 0.998601
|
@@ -22,16 +22,284 @@
andtab%0A%0A
+r%22%22%22%0A%3E%3E%3E import io%0A%3E%3E%3E import itertools%0A%3E%3E%3E f = io.StringIO(%22foo said one thing%5Cn%5Cnfoo said another thing%5Cn%5Cnbar said nothing%5Cn%22)%0A%3E%3E%3E data = markov_data_from_words(words_from_file(f))%0A%3E%3E%3E words = words_from_markov_data(data)%0A%3E%3E%3E paragraph_from_words(words)%0A'...'%0A%22%22%22%0A%0A
import t
|
e3b71c58a409239845588ed9f20970243db45dba
|
add delay to slow balls movement in pygame1_sample
|
pong/pygame1_sample.py
|
pong/pygame1_sample.py
|
import sys, pygame
pygame.init()
size = width, height = 640, 480
speed = [2, 2]
black = 0, 0, 0
screen = pygame.display.set_mode(size)
ball = pygame.image.load("ball.gif")
ballrect = ball.get_rect()
while 1:
for event in pygame.event.get():
if event.type == pygame.QUIT: sys.exit()
ballrect = ballrect.move(speed)
if ballrect.left < 0 or ballrect.right > width:
speed[0] = -speed[0]
if ballrect.top < 0 or ballrect.bottom > height:
speed[1] = -speed[1]
screen.fill(black)
screen.blit(ball, ballrect)
pygame.display.flip()
|
Python
| 0
|
@@ -12,16 +12,29 @@
pygame%0A
+import time%0A%0A
pygame.i
@@ -85,12 +85,12 @@
= %5B
-2, 2
+1, 1
%5D%0Abl
@@ -341,16 +341,38 @@
(speed)%0A
+ time.sleep(0.001)%0A
if b
|
a3a19a7aa8d8b4691ddd569197024961f95f4678
|
Rename search method to search_html
|
twitterwebsearch/searcher.py
|
twitterwebsearch/searcher.py
|
"""
Module for using the web interface of Twitter's search.
"""
import sys
import time
import datetime
from selenium.common.exceptions import NoSuchElementException
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
TWITTER_SEARCH_URL = 'https://twitter.com/search-home'
SEARCH_FIELD = 'search-home-input'
WAIT_FOR_CLASS = 'AdaptiveSearchTitle-title'
SCROLLER_SCRIPT = '''
footer = document.getElementsByClassName('stream-footer')[0];
scroller = setInterval(function() { footer.scrollIntoView(); }, 250);
'''
LIVE_TWEETS_SELECTOR = 'a[href*="f=tweets"]'
DRIVER_PRIORITY = [webdriver.PhantomJS, webdriver.Firefox]
QUERY_TIMEOUT = 20 # seconds
POLL_TIME = 1 # seconds
def create_driver():
if not hasattr(create_driver, 'driver'):
for driver in DRIVER_PRIORITY:
try:
res = driver()
except:
continue
create_driver.driver = driver
return res
else:
raise RuntimeError('None of the following Selenium drivers are available: %r' % DRIVER_PRIORITY)
else:
return create_driver.driver()
def debug_screenshot(driver, dontraise=True):
try:
path = '__twitterwebsearch.%s.png' % datetime.datetime.now().strftime('%Y-%m-%d.%H%M')
driver.save_screenshot(path)
return path
except:
if dontraise:
exc_type, exc_value, _ = sys.exc_info()
print >>sys.stderr, 'Failed to create screenshot:', exc_type, '--', exc_value
else:
raise
def wait_until_url(driver, predicate, sleep=0.25):
while not predicate(driver.current_url):
time.sleep(sleep)
def search(query):
driver = create_driver()
driver.get(TWITTER_SEARCH_URL)
elem = driver.find_element_by_id(SEARCH_FIELD)
elem.send_keys(query)
elem.send_keys(Keys.ENTER)
res = ''
try:
elem = WebDriverWait(driver, QUERY_TIMEOUT).until(
EC.presence_of_element_located((By.CLASS_NAME, WAIT_FOR_CLASS))
)
try:
driver.find_element_by_css_selector(LIVE_TWEETS_SELECTOR).click()
except NoSuchElementException:
debug_screenshot(driver)
raise
wait_until_url(driver, predicate=lambda url: '&f=tweets' in url)
driver.execute_script(SCROLLER_SCRIPT)
old_size = size = 0
delta = not 0
while delta != 0:
time.sleep(POLL_TIME)
old_size = size
size = len(driver.page_source)
delta = size - old_size
res = driver.page_source
except:
debug_screenshot(driver)
raise
finally:
driver.quit()
return res
|
Python
| 0.000022
|
@@ -1891,16 +1891,71 @@
query):%0A
+ return search_html(query)%0A%0Adef search_html(query):%0A
driv
|
126e6be2dd7b61809656ada1adfe3c64cbe24c47
|
Add couchbase/spock to branch merge set.
|
engines/ep/scripts/unmerged-commits.py
|
engines/ep/scripts/unmerged-commits.py
|
#!/usr/bin/env python2.7
# Script to show which commit(s) are not yet merged between our release branches.
from __future__ import print_function
import subprocess
import sys
class bcolors:
"""Define ANSI color codes, if we're running under a TTY."""
if sys.stdout.isatty():
HEADER = '\033[36m'
WARNING = '\033[33m'
ENDC = '\033[0m'
else:
HEADER = ''
WARNING = ''
ENDC = ''
# Branches to check for unmerged patches. Each toplevel element is a series
# of branches (ordered by ancestory) which should be merged into each other.
# i.e. the oldest supported branch to the newest, which is the order
# patches should be merged.
branches = (('couchbase/watson_ep',
'couchbase/master'),
('couchbase/watson_mc',
'couchbase/master'))
total_unmerged = 0
for series in branches:
for downstream, upstream in zip(series, series[1:]):
commits = subprocess.check_output(['git', 'cherry', '-v',
upstream, downstream])
count = len(commits.splitlines())
total_unmerged += count
if count > 0:
print((bcolors.HEADER +
"{} commits in '{}' not present in '{}':" +
bcolors.ENDC).format(count, downstream, upstream))
print(commits)
if total_unmerged:
print((bcolors.WARNING + "Total of {} commits outstanding" +
bcolors.ENDC).format(total_unmerged))
sys.exit(total_unmerged)
|
Python
| 0.000001
|
@@ -740,22 +740,21 @@
uchbase/
-master
+spock
'),%0A
@@ -785,16 +785,81 @@
on_mc',%0A
+ 'couchbase/spock'),%0A ('couchbase/spock',%0A
|
ba3cb591f0be7b7443504491176e3889ba92be8c
|
Improve error message when keybinding not found
|
keybindings.py
|
keybindings.py
|
import vx
from functools import partial
from enum import Enum
_keys = {
'langle': '<', 'rangle': '>',
'lparen': '(', 'rparen': ')',
'lbrace': '{', 'rbrace': '}',
'lbracket': '[', 'rbracket': ']',
'grave': '`',
'backtick': '`',
'tilde': '~',
'bang': '!',
'exclamation': '!',
'at': '@',
'hash': '#',
'dollar': '$',
'percent': '%',
'carrot': '^',
'carat': '^',
'and': '&',
'ampersand': '&',
'star': '*',
'asterisk': '*',
'hyphen': '-',
'dash': '-',
'minus': '-',
'underscore': '_',
'equals': '=',
'equal': '=',
'plus': '+',
'pipe': '|',
'backslash': '\\',
'forwardslash': '/',
'slash': '/',
'quote': '"',
'apostrophe': '\'',
'question': '?',
'dot': '.',
'period': '.',
'comma': ',',
'backspace': chr(127),
'enter': chr(13),
'return': chr(13), # this one is funky
'escape': chr(27),
}
for x in 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789':
_keys[x] = x
Keys = Enum('Keys', _keys)
class _keybinding:
def __init__(self, key, printable):
if isinstance(key, Keys):
self.key = key.value
else:
self.key = key
self.printable = printable
def __str__(self):
return str(self.key)
def __sub__(self, other):
return self + _keyseparator() + other
class _keymodifier:
def __init__(self, mod, printable):
self.mod = mod
self.printable = printable
def __add__(self, other):
if not isinstance(other, _keybinding):
other = _keybinding(other, str(other))
other.key = self.mod(other.key)
other.printable = self.printable + '-' + other.printable
return other
_ctrl = _keymodifier(lambda c: chr(0x1f & ord(c)), 'C')
_alt = _keymodifier(lambda c: chr(0x80 | ord(c)), 'M')
class _keyseparator:
def __init__(self):
self.left = None
self.right = None
def __add__(self, other):
if isinstance(other, Keys):
other = _keybinding(other, str(other))
if type(other) is _keyseparator:
other.left = self
return other
if self.right:
self.right += other
else:
self.right = other
return self
def __radd__(self, other):
if type(other) is str:
other = _keybinding(other)
if self.left:
self.left += other
else:
self.left = other
return self
def __sub__(self, other):
if isinstance(other, Keys):
other = _keybinding(other, other)
return self + _keyseparator() + other
def __str__(self):
ret = ''
if self.left:
ret += str(self.left)
ret += ' '
if self.right:
ret += str(self.right)
return ret
def _tobinding(s):
'''Convert a key string (C-o) to a keycode.'''
class donothing:
def __add__(self, other): return other
binding = donothing()
for c,n in zip(s,s[1:]+' '):
if c == '-':
continue
elif c == 'C' and n == '-':
c = _ctrl
elif c == 'M' and n == '-':
c = _alt
elif c == ' ':
c = _keyseparator()
binding = binding + c
return binding
_keybindings = {}
_keybinding_traverser = _keybindings
def _bind(keys, command=None):
"""Bind a key to a command. Can be used as a decorator"""
if command is None:
def wrapper(func):
_bind(keys, func)
return func
return wrapper
# we split on space below so handle it here
if keys == ' ':
_keybindings[' '] = command
return
keys = str(keys)
if type(keys) is str:
squares = list(map(lambda x: str(_tobinding(x)), keys.split(' ')))
else:
squares = [keys]
prehops = squares[0:-1]
finalhop = squares[-1]
cur = _keybindings
for h in prehops:
if cur.get(h) is None:
cur[h] = {}
elif not isinstance(cur[h], dict):
print('Warning, overwriting old keybinding')
cur[h] = {}
cur = cur[h]
cur[finalhop] = command
def _quick_bind(key):
'''Bind a keycode to insert itself as text.'''
_bind(key, partial(vx.add_string, key))
# Quick-bind letter keys
for i in range(26):
char = chr(ord('a') + i)
_quick_bind(char)
char = chr(ord('A') + i)
_quick_bind(char)
# ...number keys
for i in range(10):
_quick_bind(str(i))
# ...symbols
for char in ['?', '<', '>', '\'', '/', '"', ':',
';', '.', ',', '!', '@', '#', '$',
'%', '^', '&', '*', '(', ')', '-',
'_', '+', '=', '\\', '|', '`', '~',
' ']:
_quick_bind(char)
# ...return/backspace
_bind(chr(13), partial(vx.add_string, '\n'))
_bind(chr(127), vx.backspace)
def _register_key(key):
global _keybinding_traverser
_keybinding_traverser = _keybinding_traverser.get(key)
if callable(_keybinding_traverser):
_keybinding_traverser()
_keybinding_traverser = _keybindings
elif _keybinding_traverser is None:
_keybinding_traverser = _keybindings
raise Exception('not found')
return False
return True
_key_callbacks = []
_key_callbacks.append(_register_key)
def _register_key(key):
for c in _key_callbacks:
if c(key): break
vx.key_callbacks = _key_callbacks
vx.register_key = _register_key
vx.tobinding = _tobinding
vx.bind = _bind
vx.ctrl = _ctrl
vx.alt = _alt
vx.keys = Keys
|
Python
| 0.000004
|
@@ -5490,17 +5490,24 @@
ot found
-'
+ ' + key
)%0A
|
c94960b8c42ab46331cf1f5b76c2c2f4deb33b9d
|
fix KeyError on small word set
|
typetrainer/tutors/common.py
|
typetrainer/tutors/common.py
|
import random
import collections
import itertools
from typetrainer.generator import make_char_chain, generate_word
class Filler(object):
def __init__(self, words, make_lengths_seq):
self.dist = {}
self.first, self.other, self.word_chars = make_char_chain(words, 3, self.dist)
self.lengths = list(make_lengths_seq(words))
self.old_generated = collections.deque([], 100)
pos = random.randint(0, len(self.lengths) - 1)
left = itertools.islice(self.lengths, pos, None)
right = itertools.islice(self.lengths, 0, pos)
self.liter = itertools.cycle(itertools.chain(left, right))
def __iter__(self):
while True:
t, l = self.liter.next()
if t == 'w':
for _ in range(50):
word = generate_word(self.first, self.other, l, 3)
if word not in self.old_generated:
break
else:
continue
self.old_generated.append(word)
yield word
else:
yield l
def change_distribution(self, seq, prob_factor, replace=False):
if replace:
self.dist.clear()
self.dist[seq] = prob_factor
self.reset_parts()
def reset_distribution(self):
self.dist.clear()
self.reset_parts()
def reset_parts(self):
for p in self.other.values():
p.reset()
for p in self.first.values():
p.reset()
def strip_non_word_chars(self, string):
result = ''
for c in string:
if c in self.word_chars:
result += c
return result
|
Python
| 0.000004
|
@@ -652,32 +652,61 @@
__iter__(self):%0A
+ skip_to_word = False%0A
while Tr
@@ -766,17 +766,190 @@
if
-t == 'w':
+skip_to_word:%0A while t != 'w':%0A t, l = self.liter.next()%0A%0A skip_to_word = False%0A%0A if t == 'w':%0A word = None
%0A
@@ -981,16 +981,45 @@
ge(50):%0A
+ try:%0A
@@ -1081,16 +1081,84 @@
, l, 3)%0A
+ except KeyError:%0A break%0A%0A
@@ -1234,16 +1234,17 @@
break%0A
+%0A
@@ -1243,37 +1243,84 @@
-else:
+if not word:%0A skip_to_word = True
%0A
|
078e409d3c09e9ec0699ea95a2786c2342474bba
|
Return timestamp as a float in JSON.
|
views.py
|
views.py
|
import json
from collections import deque
from flask import request, render_template
from flask import current_app as app, abort
from util import make_status_response, generate_filename, jsonify
RECORDS_QUEUE = deque(maxlen=100)
def _prime_records_queue(q):
with open(generate_filename(app.config), 'r') as trace_file:
for line in trace_file:
if len(RECORDS_QUEUE) == RECORDS_QUEUE.maxlen:
break
timestamp, record = line.split(':', 1)
record = json.loads(record)
record['timestamp'] = timestamp
RECORDS_QUEUE.append(record)
def add_record():
if not request.json:
app.logger.error("Expected JSON, but POSTed data was %s", request.data)
return abort(400)
records = request.json.get('records', None)
if records is None or not hasattr(records, '__iter__'):
app.logger.error("Expected JSON, but POSTed data was %s", request.data)
return abort(400)
with open(generate_filename(app.config), 'a') as trace_file:
for record in records:
timestamp = record.pop('timestamp')
trace_file.write("%s: %s\r\n" % (timestamp, json.dumps(record)))
record['timestamp'] = timestamp
RECORDS_QUEUE.append(record)
return make_status_response(201)
def show_records():
_prime_records_queue(RECORDS_QUEUE)
return jsonify(records=list(RECORDS_QUEUE))
def visualization():
return render_template('visualization.html')
|
Python
| 0
|
@@ -256,34 +256,35 @@
eue(q):%0A
-with open(
+filename =
generate_fil
@@ -300,16 +300,52 @@
.config)
+%0A try:%0A with open(filename
, 'r') a
@@ -350,32 +350,36 @@
as trace_file:%0A
+
for line
@@ -406,16 +406,20 @@
+
if len(R
@@ -477,14 +477,22 @@
+
+
break%0A
+
@@ -546,24 +546,28 @@
+
record = jso
@@ -563,16 +563,32 @@
ecord =
+_massage_record(
json.loa
@@ -597,25 +597,214 @@
(record)
-%0A
+, float(timestamp))%0A RECORDS_QUEUE.append(record)%0A except IOError:%0A app.logger.warn(%22No active trace file found at %25s%22 %25 filename)%0A%0A%0Adef _massage_record(record, timestamp):%0A
reco
@@ -813,32 +813,36 @@
%5B'timestamp'%5D =
+int(
timestamp%0A
@@ -838,49 +838,35 @@
tamp
-%0A RECORDS_QUEUE.append(
+ * 1000)%0A return
record
-)
+%0A
%0A%0A%0Ad
@@ -1468,31 +1468,42 @@
record
-%5B'timestamp'%5D =
+ = _massage_record(record,
timesta
@@ -1504,16 +1504,17 @@
imestamp
+)
%0A
|
2693ac98abf8eca4b96991c3d3fbf8e452eeead3
|
Use lzma instead of zlib
|
prerender/prerender.py
|
prerender/prerender.py
|
import os
import time
import zlib
import asyncio
import logging
from urllib.parse import urlparse
from concurrent.futures import ThreadPoolExecutor
from multiprocessing import cpu_count
import aiofiles
import aiofiles.os
from sanic import Sanic
from sanic import response
from sanic.exceptions import NotFound
from async_timeout import timeout
from .chromerdp import ChromeRemoteDebugger
logger = logging.getLogger(__name__)
executor = ThreadPoolExecutor(max_workers=cpu_count())
PRERENDER_TIMEOUT = int(os.environ.get('PRERENDER_TIMEOUT', 30))
ALLOWED_DOMAINS = set(dm.strip() for dm in os.environ.get('PRERENDER_ALLOWED_DOMAINS', '').split(',') if dm.strip())
CACHE_ROOT_DIR = os.environ.get('CACHE_ROOT_DIR', '/tmp/prerender')
CACHE_LIVE_TIME = int(os.environ.get('CACHE_LIVE_TIME', 3600))
class Prerender:
def __init__(self, host='localhost', port=9222, loop=None):
self.host = host
self.port = port
self.loop = loop
self._rdp = ChromeRemoteDebugger(host, port, loop=loop)
self._ctrl_tab = None
async def connect(self):
tabs = await self._rdp.tabs()
self._ctrl_tab = tabs[0]
await self._ctrl_tab.attach()
logger.info('Connected to control tab %s', self._ctrl_tab.id)
async def new_tab(self, url=None):
await self._ctrl_tab.send({
'method': 'Target.createTarget',
'params': {
'url': url or 'about:blank'
}
})
res = await self._ctrl_tab.recv()
tab_id = res['result']['targetId']
logger.info('Created new tab %s', tab_id)
tabs = await self._rdp.tabs()
tab = [tb for tb in tabs if tb.id == tab_id][0]
return tab
async def close_tab(self, tab_id):
await self._ctrl_tab.send({
'method': 'Target.closeTarget',
'params': {'targetId': tab_id}
})
res = await self._ctrl_tab.recv()
logger.info('Closed tab %s', tab_id)
return res
async def close(self):
tabs = await self._rdp.tabs()
for tab in tabs:
await tab.close()
logger.info('All tabs closed')
async def prerender(renderer, url):
tab = await renderer.new_tab()
await tab.attach()
await tab.listen()
await tab.set_user_agent('Mozilla/5.0 (Linux) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/59.0.3033.0 Safari/537.36 Prerender (bosondata)') # NOQA
try:
await tab.navigate(url)
with timeout(PRERENDER_TIMEOUT):
html = await tab.wait()
finally:
await renderer.close_tab(tab.id)
return html
def _get_cache_file_path(parsed_url):
path = parsed_url.hostname
path = os.path.join(path, os.path.normpath(parsed_url.path[1:]))
if parsed_url.query:
path = os.path.join(path, os.path.normpath(parsed_url.query))
return os.path.join(CACHE_ROOT_DIR, path, 'prerender.cache.html')
async def _fetch_from_cache(path, loop):
async with aiofiles.open(path, mode='rb', executor=executor) as f:
res = await loop.run_in_executor(executor, zlib.decompress, await f.read())
return res.decode('utf-8')
def _save_to_cache(path, html):
save_dir = os.path.dirname(path)
try:
os.makedirs(save_dir, 0o755)
except OSError:
pass
try:
compressed = zlib.compress(html.encode('utf-8'))
with open(path, mode='wb') as f:
f.write(compressed)
except Exception:
logger.exception('Error writing cache')
async def _is_cache_valid(path):
if not os.path.exists(path):
return False
stat = await aiofiles.os.stat(path, executor=executor)
if time.time() - stat.st_mtime <= CACHE_LIVE_TIME:
return True
return False
app = Sanic(__name__)
@app.exception(NotFound)
async def handle_request(request, exception):
url = request.url
if url.startswith('/http'):
url = url[1:]
if request.query_string:
url = url + '?' + request.query_string
parsed_url = urlparse(url)
if ALLOWED_DOMAINS:
if parsed_url.hostname not in ALLOWED_DOMAINS:
return response.text('Forbiden', status=403)
cache_path = _get_cache_file_path(parsed_url)
try:
if await _is_cache_valid(cache_path):
html = await _fetch_from_cache(cache_path, request.app.loop)
logger.info('Got 200 for %s in cache', url)
return response.html(html, headers={'X-Prerender-Cache': 'hit'})
except Exception:
logger.exception('Error reading cache')
start_time = time.time()
try:
html = await prerender(request.app.prerender, url)
duration_ms = int((time.time() - start_time) * 1000)
logger.info('Got 200 for %s in %dms', url, duration_ms)
executor.submit(_save_to_cache, cache_path, html)
return response.html(html, headers={'X-Prerender-Cache': 'miss'})
except asyncio.TimeoutError:
duration_ms = int((time.time() - start_time) * 1000)
logger.warning('Got 504 for %s in %dms', url, duration_ms)
return response.text('Gateway timeout', status=504)
except Exception:
duration_ms = int((time.time() - start_time) * 1000)
logger.exception('Internal Server Error for %s in %dms', url, duration_ms)
return response.text('Internal Server Error', status=500)
@app.listener('after_server_start')
def after_server_start(app, loop):
app.prerender = Prerender(loop=loop)
loop.run_until_complete(app.prerender.connect())
|
Python
| 0.000002
|
@@ -22,20 +22,20 @@
%0Aimport
-zlib
+lzma
%0Aimport
@@ -3084,20 +3084,20 @@
ecutor,
-zlib
+lzma
.decompr
@@ -3336,12 +3336,12 @@
d =
-zlib
+lzma
.com
|
2a7ed7c2d6f37c3b6965ad92b21cecc0a4abd91a
|
Add first verion to upload via BioBlend
|
upload_datasets_to_galaxy.py
|
upload_datasets_to_galaxy.py
|
#!/usr/bin/python3
import argparse
# from bioblend.galaxy import GalaxyInstance
import configparser
def upload_datasets_to_galaxy():
# Arguments initialization
parser = argparse.ArgumentParser(description="Script to upload a folder into"
"Galaxy Data Libraries")
parser.add_argument('--folder', help='Folder to add in Data Libraries of Galaxy')
args = parser.parse_args()
# Fetch arguments
folder_path = args.folder
# Launch config
config = configparser.ConfigParser()
config.read('config.ini')
galaxy_config = config['Galaxy']
# gi = GalaxyInstance(url=galaxy_config['url'], key=galaxy_config['api-key'])
# print(gi.histories.get_histories())
if __name__ == "__main__":
upload_datasets_to_galaxy()
|
Python
| 0
|
@@ -28,18 +28,16 @@
rgparse%0A
-#
from bio
@@ -90,16 +90,26 @@
igparser
+%0Aimport os
%0A%0Adef up
@@ -628,18 +628,16 @@
y'%5D%0A%0A
- #
gi = Ga
@@ -657,67 +657,1333 @@
url=
-galaxy_config%5B'url'%5D, key=galaxy_config%5B'api-key'%5D)%0A%0A #
+'http://127.0.0.1:8080', key='5e8cc5748922c598c1aa6ec9e605780f')%0A%0A name_folder_test = '160802_D00281L_0127_C9NPBANXX'%0A path_folder_test = './test-data/staging/' + name_folder_test%0A path_to_fastq_folder_test = os.path.join(path_folder_test, 'fastq')%0A%0A # TODO: Make a loop which execute the following, for each directory found%0A libs_folder = gi.libraries.get_libraries(name=name_folder_test)%0A # TODO: Check the library does already exist%0A # Create the library with the name equal to the folder name%0A # and description 'Library' + folder_name%0A dict_library_test = gi.libraries.create_library(name_folder_test,%0A description=' '.join(%5B'Library', name_folder_test%5D),%0A synopsis=None)%0A%0A # Upload the data in the library just created%0A list_of_files = '%5Cn'.join(os.listdir(path_to_fastq_folder_test))%0A unknow_return = gi.libraries.upload_from_galaxy_filesystem(%0A library_id=dict_library_test.get('id'),%0A filesystem_paths=list_of_files,%0A file_type='auto',%0A link_data_only='link_to_files',%0A )%0A print(unknow_return)%0A # TODO: Check if no new files, else upload them%0A # print(%22Already there! Skipping %7B0%7D%22.format(name_folder_test))%0A%0A #
prin
|
efd38d1f59d73443e47071c1fd7c87b492372915
|
Fix peername retrieval in user action log
|
dvhb_hybrid/user_action_log/base_amodels.py
|
dvhb_hybrid/user_action_log/base_amodels.py
|
from dvhb_hybrid import utils
from dvhb_hybrid.amodels import Model, method_connect_once
from django.contrib.contenttypes.models import ContentType
from .enums import UserActionLogEntryType, UserActionLogEntrySubType, UserActionLogStatus
class BaseUserActionLogEntry(Model):
"""
Abstract action log entry async model class
"""
@classmethod
def get_table_from_django(cls, django_model):
return super().get_table_from_django(django_model, 'payload')
@classmethod
def set_defaults(cls, data: dict):
data.setdefault('created_at', utils.now())
@classmethod
@method_connect_once
async def create_record(
cls, request, type, subtype, message=None, payload=None, status=None, user_id=None, object=None, connection=None):
rec_data = await cls._prepare_data(
request, message, type, subtype, payload, user_id, object, status, connection=connection)
return await cls.create(**rec_data, connection=connection)
@classmethod
async def _prepare_data(cls, request, message, type, subtype, payload, user_id, object, status, connection):
rec_data = dict(
ip_address=None,
message=message,
user_id=user_id,
type=type.value,
subtype=subtype.value,
payload=payload,
content_type_id=None,
object_id=None,
object_repr=None,
)
if request is not None:
peername = request.transport.get_extra_info('peername')
if peername is not None:
rec_data['ip_address'], _ = peername
if hasattr(request, 'user'):
if rec_data['user_id'] is None:
rec_data['user_id'] = request.user.id
if type == UserActionLogEntryType.auth and object is None:
object = request.user
if object is not None:
if message is None and type == UserActionLogEntryType.crud:
model_name = object.__class__.__name__
rec_data['message'] = 'User {}d {}'.format(subtype.value, model_name)
rec_data['object_id'] = str(object.pk)
rec_data['object_repr'] = repr(object)[:200]
rec_data['content_type_id'] = await cls.app.m.django_content_type.get_id_by_amodel_name(
object.__class__.__name__, connection=connection)
if isinstance(status, UserActionLogStatus):
rec_data['status'] = status.value
return rec_data
@classmethod
@method_connect_once
async def create_login(cls, request, user_id=None, connection=None):
return await cls.create_record(
request,
message="User logged in",
user_id=user_id,
type=UserActionLogEntryType.auth,
subtype=UserActionLogEntrySubType.login,
connection=connection)
@classmethod
@method_connect_once
async def create_logout(cls, request, connection=None):
return await cls.create_record(
request,
message="User logged out",
type=UserActionLogEntryType.auth,
subtype=UserActionLogEntrySubType.logout,
connection=connection)
@classmethod
@method_connect_once
async def create_change_password(cls, request, user_id=None, connection=None):
return await cls.create_record(
request,
user_id=user_id,
message="User changed password",
type=UserActionLogEntryType.auth,
subtype=UserActionLogEntrySubType.change_password,
connection=connection)
@classmethod
@method_connect_once
async def create_user_registration(cls, request, connection=None):
return await cls.create_record(
request,
message="User registered",
type=UserActionLogEntryType.reg,
subtype=UserActionLogEntrySubType.create,
connection=connection)
@classmethod
@method_connect_once
async def create_user_deletion(cls, request, connection=None):
return await cls.create_record(
request,
message="User deleted",
type=UserActionLogEntryType.reg,
subtype=UserActionLogEntrySubType.delete,
connection=connection)
@classmethod
@method_connect_once
async def create_user_profile_update(cls, request, connection=None):
return await cls.create_record(
request,
message="User updated profile",
type=UserActionLogEntryType.reg,
subtype=UserActionLogEntrySubType.update,
connection=connection)
@classmethod
@method_connect_once
async def create_user_change_email_address(
cls, request, user_id, old_email, new_email, confirmation_code, connection=None):
return await cls.create_record(
request,
message="User changed email address",
type=UserActionLogEntryType.email,
subtype=UserActionLogEntrySubType.update,
payload=dict(old_email=old_email, new_email=new_email, confirmation_code=confirmation_code),
user_id=user_id,
connection=connection)
@classmethod
@method_connect_once
async def create_user_create_model(
cls, request, object, connection=None):
return await cls.create_record(
request,
object=object,
type=UserActionLogEntryType.crud,
subtype=UserActionLogEntrySubType.create,
connection=connection)
@classmethod
@method_connect_once
async def create_user_update_model(
cls, request, object, connection=None):
return await cls.create_record(
request,
object=object,
type=UserActionLogEntryType.crud,
subtype=UserActionLogEntrySubType.update,
connection=connection)
@classmethod
@method_connect_once
async def create_user_delete_model(
cls, request, object, connection=None):
return await cls.create_record(
request,
object=object,
type=UserActionLogEntryType.crud,
subtype=UserActionLogEntrySubType.delete,
connection=connection)
class DjangoContentType(Model):
table = BaseUserActionLogEntry.get_table_from_django(ContentType)
@classmethod
@method_connect_once
async def get_id_by_amodel_name(cls, name, connection=None):
name = name.lower()
name = name.replace('_', '')
where = [cls.table.c.model == name]
result = await cls.get_one(*where, connection=connection, silent=True)
if result is not None:
return result.id
|
Python
| 0.003282
|
@@ -1529,16 +1529,47 @@
ername')
+ if request.transport else None
%0A
|
1f4006ba9831f47a7ccc3fa0f8f9fbbb44b0c217
|
fix plot_matplotlib_hist2d.py covariance matrix
|
examples/plotting/plot_matplotlib_hist2d.py
|
examples/plotting/plot_matplotlib_hist2d.py
|
#!/usr/bin/env python
"""
========================================
Plot a 2D ROOT histogram with matplotlib
========================================
This example demonstrates how a 2D ROOT histogram can be displayed with
matplotlib.
"""
print __doc__
import ROOT
from matplotlib import pyplot as plt
from rootpy.plotting import root2matplotlib as rplt
from rootpy.plotting import Hist2D
import numpy as np
a = Hist2D(100, -3, 3, 100, 0, 6)
a.fill_array(np.random.multivariate_normal(
mean=(0, 3),
cov=np.arange(4).reshape(2, 2),
size=(1E6,)))
fig, (ax1, ax2, ax3) = plt.subplots(nrows=1, ncols=3, figsize=(15, 5))
ax1.set_title('hist2d')
rplt.hist2d(a, axes=ax1)
ax2.set_title('imshow')
im = rplt.imshow(a, axes=ax2)
ax3.set_title('contour')
rplt.contour(a, axes=ax3)
fig.subplots_adjust(right=0.8)
cbar_ax = fig.add_axes([0.85, 0.15, 0.05, 0.7])
fig.colorbar(im, cax=cbar_ax)
if not ROOT.gROOT.IsBatch():
plt.show()
|
Python
| 0.000153
|
@@ -508,34 +508,26 @@
cov=
-np.arange(4).reshape(2, 2)
+%5B%5B1, .5%5D, %5B.5, 1%5D%5D
,%0A
|
0e7a6f58bc740479a616c973c5973bd255501004
|
Update feedback_tags.py
|
feedback_form/templatetags/feedback_tags.py
|
feedback_form/templatetags/feedback_tags.py
|
"""Template tags and filters for the ``feedback_form`` app."""
from django import template
from ..app_settings import * # NOQA
from ..forms import FeedbackForm
register = template.Library()
@register.inclusion_tag('feedback_form/partials/form.html', takes_context=True)
def feedback_form(context):
"""Template tag to render a feedback form."""
user = None
if context['request'].user.is_authenticated():
user = context['request'].user
return {
'form': FeedbackForm(url=context['request'].path, user=user),
'background_color': FEEDBACK_FORM_COLOR,
'text_color': FEEDBACK_FORM_TEXTCOLOR,
'text': FEEDBACK_FORM_TEXT,
}
|
Python
| 0
|
@@ -362,16 +362,104 @@
= None%0A
+ url = None%0A if context.get('request'):%0A url = context%5B'request'%5D.path%0A
if c
@@ -501,16 +501,20 @@
ated():%0A
+
@@ -590,39 +590,19 @@
orm(url=
-context%5B'request'%5D.path
+url
, user=u
|
426dd82e9b2a7c2de2b6ba9091ad67057ffe9f5f
|
Create db, if there isn't one.
|
statiki.wsgi
|
statiki.wsgi
|
import os
from os.path import abspath, dirname
import sys
#active the python virtualenv for this application
HOME = os.environ['HOME']
activate_this = '%s/.virtualenvs/statiki/bin/activate_this.py' % HOME
execfile(activate_this, dict(__file__=activate_this))
# Add the source directory to the path
HERE = dirname(abspath(__file__))
sys.path.insert(0, HERE)
from statiki import app as application
|
Python
| 0
|
@@ -372,16 +372,20 @@
i import
+ db,
app as
@@ -384,20 +384,36 @@
app as application%0A
+db.create_all()%0A
|
78bfcf1561597113a91f7449642085a392c20429
|
use doctype instead of service name to send email
|
frappe/integrations/offsite_backup_utils.py
|
frappe/integrations/offsite_backup_utils.py
|
# -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
import glob
import os
from frappe.utils import split_emails, get_backups_path
def send_email(success, service_name, doctype, email_field, error_status=None):
recipients = get_recipients(service_name, email_field)
if not recipients:
frappe.log_error("No Email Recipient found for {0}".format(service_name),
"{0}: Failed to send backup status email".format(service_name))
return
if success:
if not frappe.db.get_value(doctype, None, "send_email_for_successful_backup"):
return
subject = "Backup Upload Successful"
message = """
<h3>Backup Uploaded Successfully!</h3>
<p>Hi there, this is just to inform you that your backup was successfully uploaded to your {0} bucket. So relax!</p>""".format(service_name)
else:
subject = "[Warning] Backup Upload Failed"
message = """
<h3>Backup Upload Failed!</h3>
<p>Oops, your automated backup to {0} failed.</p>
<p>Error message: {1}</p>
<p>Please contact your system manager for more information.</p>""".format(service_name, error_status)
frappe.sendmail(recipients=recipients, subject=subject, message=message)
def get_recipients(service_name, email_field):
if not frappe.db:
frappe.connect()
return split_emails(frappe.db.get_value(service_name, None, email_field))
def get_latest_backup_file(with_files=False):
def get_latest(file_ext):
file_list = glob.glob(os.path.join(get_backups_path(), file_ext))
return max(file_list, key=os.path.getctime)
latest_file = get_latest('*.sql.gz')
if with_files:
latest_public_file_bak = get_latest('*-files.tar')
latest_private_file_bak = get_latest('*-private-files.tar')
return latest_file, latest_public_file_bak, latest_private_file_bak
return latest_file
def get_file_size(file_path, unit):
if not unit:
unit = 'MB'
file_size = os.path.getsize(file_path)
memory_size_unit_mapper = {'KB': 1, 'MB': 2, 'GB': 3, 'TB': 4}
i = 0
while i < memory_size_unit_mapper[unit]:
file_size = file_size / 1000.0
i += 1
return file_size
def validate_file_size():
frappe.flags.create_new_backup = True
latest_file = get_latest_backup_file()
file_size = get_file_size(latest_file, unit='GB')
if file_size > 1:
frappe.flags.create_new_backup = False
|
Python
| 0
|
@@ -366,35 +366,30 @@
_recipients(
-service_nam
+doctyp
e, email_fie
@@ -1280,27 +1280,22 @@
ipients(
-service_nam
+doctyp
e, email
@@ -1383,27 +1383,22 @@
t_value(
-service_nam
+doctyp
e, None,
|
5659ae2668edb934f422e15edb81b1977da9b2c2
|
clean up
|
sail.py
|
sail.py
|
#!/usr/bin/python
# David Kohreidze
import csv
import os
import re
with open('keywords.csv', 'rU') as csvf:
reader = csv.reader(csvf)
links = {rows[0]:rows[1] for rows in reader} # builds dictionary from file
for f in os.listdir('.'): # for every file in the current directory
if os.path.isfile(f): # must be a file
if f.endswith(".txt"): # must be a text file
s = open(f).read() # read file
print "Processing %s.." %f
for i in links:
s = re.sub(r'\b'+i+r'\b', '<a href="%s">%s</a>'%(links[i],i), s, 1)
f = open(f, 'w')
f.write(s)
f.close()
print "Complete."
|
Python
| 0.000001
|
@@ -180,107 +180,35 @@
der%7D
- # builds dictionary from file%0A%0Afor f in os.listdir('.'): # for every file in the current directory
+%0A%0Afor f in os.listdir('.'):
%0A i
@@ -232,24 +232,8 @@
f):
-# must be a file
%0A
@@ -260,29 +260,8 @@
%22):
-# must be a text file
%0A%09
@@ -282,20 +282,8 @@
ad()
- # read file
%0A%09
@@ -313,12 +313,8 @@
%25f%0A
-%09 %0A
%09 f
@@ -377,16 +377,17 @@
%3E%25s%3C/a%3E'
+
%25(links%5B
@@ -403,11 +403,8 @@
1)%0A
-%09 %0A
%09 f
@@ -446,16 +446,17 @@
close()%0A
+%0A
print %22C
|
838361c976d481dcd8932e2a6caa0008935dc3c1
|
Move LoopingCall initialization to __init__().
|
vumi/transports/xmpp/xmpp.py
|
vumi/transports/xmpp/xmpp.py
|
# -*- test-case-name: vumi.transports.xmpp.tests.test_xmpp -*-
# -*- encoding: utf-8 -*-
from twisted.python import log
from twisted.words.protocols.jabber.jid import JID
from twisted.words.xish import domish
from twisted.words.xish.domish import Element as DomishElement
from twisted.internet.task import LoopingCall
from twisted.internet.defer import inlineCallbacks
from wokkel.client import XMPPClient
from wokkel.ping import PingClientProtocol
from wokkel.xmppim import (RosterClientProtocol, MessageProtocol,
PresenceClientProtocol)
from vumi.transports.base import Transport
class TransportRosterClientProtocol(RosterClientProtocol):
def connectionInitialized(self):
# get the roster as soon as the connection's been initialized, this
# allows us to see who's online but more importantly, allows us to see
# who's added us to their roster. This allows us to auto subscribe to
# anyone, automatically adding them to our roster, skips the "user ...
# wants to add you to their roster, allow? yes/no" hoopla.
self.getRoster()
class TransportPresenceClientProtocol(PresenceClientProtocol):
"""
A custom presence protocol to automatically accept any subscription
attempt.
"""
def subscribeReceived(self, entity):
self.subscribe(entity)
self.subscribed(entity)
def unsubscribeReceived(self, entity):
self.unsubscribe(entity)
self.unsubscribed(entity)
class XMPPTransportProtocol(MessageProtocol, object):
def __init__(self, jid, message_callback, connection_callback,
connection_lost_callback=None,):
super(MessageProtocol, self).__init__()
self.jid = jid
self.message_callback = message_callback
self.connection_callback = connection_callback
self.connection_lost_callback = connection_lost_callback
def reply(self, jid, content):
message = domish.Element((None, "message"))
# intentionally leaving from blank, leaving for XMPP server
# to figure out
message['to'] = jid
message['type'] = 'chat'
message.addUniqueId()
message.addElement((None, 'body'), content=content)
self.xmlstream.send(message)
def onMessage(self, message):
"""Messages sent to the bot will arrive here. Command handling routing
is done in this function."""
if not isinstance(message.body, DomishElement):
return None
text = unicode(message.body).encode('utf-8').strip()
self.message_callback(
to_addr=self.jid.userhost(),
from_addr=message['from'],
content=text,
transport_type='xmpp',
transport_metadata={
'xmpp_id': message.getAttribute('id'),
})
def connectionMade(self):
self.connection_callback()
return super(XMPPTransportProtocol, self).connectionMade()
def connectionLost(self, reason):
if self.connection_lost_callback is not None:
self.connection_lost_callback(reason)
log.msg("XMPP Connection lost.")
super(XMPPTransportProtocol, self).connectionLost(reason)
class XMPPTransport(Transport):
"""XMPP transport.
Configuration parameters:
:type host: str
:param host:
The host of the XMPP server to connect to.
:type port: int
:param port:
The port on the XMPP host to connect to.
:type debug: bool
:param debug:
Whether or not to show all the XMPP traffic. Defaults to False.
:type username: str
:param username:
The XMPP account username
:type password: str
:param password:
The XMPP account password
:type status: str
:param status:
The XMPP status to display
:type ping_interval: int
:param ping_interval:
How often (in seconds) to send a keep-alive ping to the XMPP server
to keep the connection alive. Defaults to 60 seconds.
"""
start_message_consumer = False
_xmpp_protocol = XMPPTransportProtocol
_xmpp_client = XMPPClient
def validate_config(self):
self.host = self.config['host']
self.port = int(self.config['port'])
self.debug = self.config.get('debug', False)
self.username = self.config['username']
self.password = self.config['password']
self.status = self.config['status']
self.ping_interval = self.config.get('ping_interval', 60)
def setup_transport(self):
log.msg("Starting XMPPTransport: %s" % self.transport_name)
statuses = {None: self.status}
self.jid = JID(self.username)
self.xmpp_client = self._xmpp_client(self.jid, self.password,
self.host, self.port)
self.xmpp_client.logTraffic = self.debug
self.xmpp_client.setServiceParent(self)
presence = TransportPresenceClientProtocol()
presence.setHandlerParent(self.xmpp_client)
presence.available(statuses=statuses)
self.pinger = PingClientProtocol()
self.pinger.setHandlerParent(self.xmpp_client)
self.ping_call = LoopingCall(self.send_ping)
self.ping_call.start(self.ping_interval)
roster = TransportRosterClientProtocol()
roster.setHandlerParent(self.xmpp_client)
self.xmpp_protocol = self._xmpp_protocol(
self.jid, self.publish_message, self._setup_message_consumer)
self.xmpp_protocol.setHandlerParent(self.xmpp_client)
log.msg("XMPPTransport %s started." % self.transport_name)
@inlineCallbacks
def send_ping(self):
if self.xmpp_client.xmlstream:
yield self.pinger.ping(self.jid)
def teardown_transport(self):
log.msg("XMPPTransport %s stopped." % self.transport_name)
ping_call = getattr(self, 'ping_call', None)
if ping_call and ping_call.running:
ping_call.stop()
def handle_outbound_message(self, message):
recipient = message['to_addr']
text = message['content']
jid = JID(recipient).userhost()
if not self.xmpp_protocol.xmlstream:
log.err("Outbound undeliverable, XMPP not initialized yet.")
return False
else:
self.xmpp_protocol.reply(jid, text)
|
Python
| 0
|
@@ -4142,16 +4142,184 @@
Client%0A%0A
+ def __init__(self, options, config=None):%0A super(XMPPTransport, self).__init__(options, config=config)%0A self.ping_call = LoopingCall(self.send_ping)%0A%0A
def
@@ -5356,61 +5356,8 @@
nt)%0A
- self.ping_call = LoopingCall(self.send_ping)%0A
|
03c221d7ac1ca955b41577d525bd40b6188045ea
|
Clarify comment.
|
size.py
|
size.py
|
#!/usr/bin/python
# calculate the number of pixels for a stimulus
# fixed: viewer distance, vertical resolution, visual angle
# argv[1] = vertical screen height
from math import atan2, degrees
import sys
if sys.argv[1]:
h = float(sys.argv[1])
else:
h = 21.5 # Dell laptop
h = 20.6 # Macbook Pro
h = 28.7 # Dell monitor
d = 60 # distance between monitor and participant in cm
r = 768 # vertical resolution of monitor
size_in_px = 0 # stimulus size in pixels
size_in_deg = 0
target_degrees = 2
# calculate the number of degrees that correspond to a single pixel. This will
# generally be a very small value, something like 0.03.
deg_per_px = degrees(atan2(.5*h, d)) / (.5*r)
print '%s degrees correspond to a single pixel' % deg_per_px
# calculate the size of the stimulus in degrees
while size_in_deg < target_degrees:
size_in_px += 1
size_in_deg = size_in_px * deg_per_px
print 'The size of the stimulus is %s pixels and %s visual degrees' % (size_in_px, size_in_deg)
|
Python
| 0.000001
|
@@ -154,16 +154,21 @@
n height
+ (cm)
%0A%0Afrom m
|
00058bfd27336454229b061aa038478920787df7
|
Add ability to apply rules
|
src/fwgen.py
|
src/fwgen.py
|
#!/usr/bin/env python3
import argparse
import sys
import re
import yaml
DEFAULT_CHAINS = {
'filter': ['INPUT', 'FORWARD', 'OUTPUT'],
'nat': ['PREROUTING', 'INPUT', 'OUTPUT', 'POSTROUTING'],
'mangle': ['PREROUTING', 'INPUT', 'FORWARD', 'OUTPUT', 'POSTROUTING'],
'raw': ['PREROUTING', 'OUTPUT'],
'security': ['INPUT', 'FORWARD', 'OUTPUT']
}
class FwGen(object):
def __init__(self, config):
self.config = config
def get_policy_rules(self, inet_family):
for table, chains in DEFAULT_CHAINS.items():
for chain in chains:
try:
policy = self.config['policies'][inet_family][table][chain]
except KeyError:
policy = 'ACCEPT'
yield (table, ':%s %s' % (chain, policy))
def get_zone_rules(self, inet_family):
for zone, params in self.config['zones'].items():
try:
for table, chains in params['rules'][inet_family].items():
for chain, chain_rules in chains.items():
zone_chain = '%s_%s' % (zone, chain)
for rule in chain_rules:
yield (table, '-A %s %s' % (zone_chain, rule))
except KeyError:
continue
def get_default_rules(self, inet_family):
try:
rules = self.config['defaults']['rules'][inet_family]
except KeyError:
rules = {}
return self.get_rules(rules)
def get_helper_chains(self, inet_family):
try:
rules = self.config['helper_chains'][inet_family]
except KeyError:
rules = {}
for table, chains in rules.items():
for chain in chains:
yield self.get_new_chain_rule(table, chain)
yield from self.get_rules(rules)
@staticmethod
def get_rules(rules):
for table, chains in rules.items():
for chain, chain_rules in chains.items():
for rule in chain_rules:
yield (table, '-A %s %s' % (chain, rule))
@staticmethod
def get_new_chain_rule(table, chain):
return (table, ':%s -' % chain)
def get_zone_dispatchers(self, inet_family):
for zone, params in self.config['zones'].items():
try:
for table, chains in params['rules'][inet_family].items():
for chain in chains:
dispatcher_chain = '%s_%s' % (zone, chain)
yield self.get_new_chain_rule(table, dispatcher_chain)
if chain in ['PREROUTING', 'INPUT', 'FORWARD']:
yield (table, '-A %s -i %%{%s} -j %s' % (chain, zone, dispatcher_chain))
elif chain in ['OUTPUT', 'POSTROUTING']:
yield (table, '-A %s -o %%{%s} -j %s' % (chain, zone, dispatcher_chain))
else:
raise Exception('%s is not a valid default chain' % chain)
except KeyError:
continue
def expand_zones(self, rule):
zone_pattern = re.compile(r'^(.+?\s)%\{(.+?)\}(\s.+)$')
match = re.search(zone_pattern, rule)
if match:
zone = match.group(2)
for interface in self.config['zones'][zone]['interfaces']:
rule_expanded = '%s%s%s' % (match.group(1), interface, match.group(3))
yield from self.expand_zones(rule_expanded)
else:
yield rule
def output_rules(self, rules):
for table in DEFAULT_CHAINS:
yield '*%s' % table
for rule_table, rule in rules:
if rule_table == table:
yield from self.expand_zones(rule)
yield 'COMMIT'
def commit(self):
iptables = []
ip6tables = []
iptables.extend(self.get_policy_rules('v4'))
ip6tables.extend(self.get_policy_rules('v6'))
iptables.extend(self.get_default_rules('v4'))
ip6tables.extend(self.get_default_rules('v6'))
iptables.extend(self.get_helper_chains('v4'))
ip6tables.extend(self.get_helper_chains('v6'))
iptables.extend(self.get_zone_dispatchers('v4'))
ip6tables.extend(self.get_zone_dispatchers('v6'))
iptables.extend(self.get_zone_rules('v4'))
ip6tables.extend(self.get_zone_rules('v6'))
for i in self.output_rules(iptables):
print(i)
for i in self.output_rules(ip6tables):
print(i)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--config', metavar='PATH', help='Path to config file')
args = parser.parse_args()
config_yaml = '/etc/fwgen/config.yml'
if args.config:
config_yaml = args.config
with open(config_yaml, 'r') as f:
config = yaml.load(f)
fw = FwGen(config)
fw.commit()
if __name__ == '__main__':
sys.exit(main())
|
Python
| 0
|
@@ -53,16 +53,34 @@
mport re
+%0Aimport subprocess
%0A%0Aimport
@@ -3842,24 +3842,304 @@
d 'COMMIT'%0A%0A
+ @staticmethod%0A def apply_rules(rules, inet_family):%0A cmd = %7B%0A 'v4': %5B'iptables-restore'%5D,%0A 'v6': %5B'ip6tables-restore'%5D%0A %7D%0A stdin = ('%25s%5Cn' %25 '%5Cn'.join(rules)).encode('utf-8')%0A subprocess.run(cmd%5Binet_family%5D, input=stdin)%0A%0A
def comm
@@ -4746,33 +4746,41 @@
))%0A%0A
-for i in
+self.apply_rules(
self.output_
@@ -4790,25 +4790,31 @@
es(iptables)
-:
+, 'v4')
%0A
@@ -4810,28 +4810,69 @@
- print(i)
+self.apply_rules(self.output_rules(ip6tables), 'v6')%0A
%0A
@@ -4868,24 +4868,25 @@
')%0A%0A
+#
for i in sel
@@ -4916,24 +4916,25 @@
s):%0A
+#
print(i)
|
3972f861fae155b84bc344810b0e5a1c8cbb418c
|
Fix SMBC next page XPath
|
webcomix/supported_comics.py
|
webcomix/supported_comics.py
|
supported_comics = {
"xkcd": ("http://xkcd.com/1/", "//a[@rel='next']/@href", "//div[@id='comic']//img/@src"),
"Nedroid": ("http://nedroid.com/2005/09/2210-whee/", "//div[@class='nav-next']/a/@href", "//div[@id='comic']/img/@src"),
"JL8": ("http://limbero.org/jl8/1", "//a[text()='>']/@href", "//img/@src"),
"SMBC": ("http://www.smbc-comics.com/comic/2002-09-05", "//a[@class='next']/@href", "//img[@id='cc-comic']/@src"),
"Blindsprings": ("http://www.blindsprings.com/comic/blindsprings-cover-book-one", "//a[@class='cc-next']/@href", "//img[@id='cc-comic']/@src"),
"TheAbominableCharlesChristopher": ("http://abominable.cc/post/44164796353/episode-one", "//span[@class='next_post']//@href", "//div[@class='photo']//img/@src"),
"GuildedAge": ("http://guildedage.net/comic/chapter-1-cover/", "//a[@class='navi comic-nav-next navi-next']/@href", "//div[@id='comic']//img/@src"),
"TalesOfElysium": ("http://ssp-comics.com/comics/toe/?page=1", "//a[button/@id='nextButton']/@href", "//div[@id='ImageComicContainer']//img/@src"),
"AmazingSuperPowers": ("http://www.amazingsuperpowers.com/2007/09/heredity/", "//a[@class='navi navi-next']/@href", "//div[@class='comicpane']/img/@src"),
"Gunshow": ("http://gunshowcomic.com/1", "(//span[@class='snavb'])[4]/a/@href", "//img[@class='strip']/@src"),
"Lackadaisy": ("http://www.lackadaisycats.com/comic.php?comicid=1", "//div[@class='next']/a/@href", "//div[@id='content']/img/@src"),
"WildeLife": ("http://www.wildelifecomic.com/comic/1", "//a[@class='cc-next']/@href", "//img[@id='cc-comic']/@src")
}
|
Python
| 0.000221
|
@@ -382,24 +382,27 @@
//a%5B@class='
+cc-
next'%5D/@href
|
b256c42f393d32d4f060fe04a1349d30c3018146
|
add option to disable smart output for tasks.
|
task.py
|
task.py
|
from hashlib import md5
import subprocess
from cPickle \
import \
dumps
from toydist.core.utils \
import \
pprint
from errors \
import \
TaskRunFailure
# TODO:
# - factory for tasks, so that tasks can be created from strings
# instead of import (import not extensible)
class Task(object):
def __init__(self, name, outputs, inputs, func=None, deps=None):
if isinstance(inputs, basestring):
self.inputs = [inputs]
else:
self.inputs = inputs
if isinstance(outputs, basestring):
self.outputs = [outputs]
else:
self.outputs = outputs
self.name = name or ""
self.uid = None
self.func = func
if deps is None:
self.deps = []
else:
self.deps = deps
self.cache = None
self.env = None
self.env_vars = None
self.scan = None
# UID and signature functionalities
#----------------------------------
def get_uid(self):
if self.uid is None:
m = md5()
up = m.update
up(self.__class__.__name__)
for x in self.inputs + self.outputs:
up(x)
self.uid = m.digest()
return self.uid
def signature(self):
if self.cache is None:
sig = self._signature()
self.cache = sig
return sig
else:
return self.cache
def _signature(self):
m = md5()
self._sig_explicit_deps(m)
for k in self.env_vars:
m.update(dumps(self.env[k]))
if self.func:
m.update(self.func.func_code.co_code)
return m.digest()
def _sig_explicit_deps(self, m):
for s in self.inputs + self.deps + self.outputs:
#if os.path.exists(s):
# m.update(open(s).read())
m.update(open(s).read())
return m.digest()
# execution
#----------
def run(self):
self.func(self)
def exec_command(self, cmd, cwd):
if self.env["VERBOSE"]:
pprint('GREEN', " ".join(cmd))
else:
pprint('GREEN', "%-16s%s" % (self.name.upper(), " ".join(self.inputs)))
p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
if p.returncode:
raise TaskRunFailure("cmd %s failed: %s" % (" ".join(cmd), stderr))
|
Python
| 0
|
@@ -927,16 +927,52 @@
n = None
+%0A self.disable_output = False
%0A%0A #
@@ -2113,16 +2113,56 @@
, cwd):%0A
+ if not self.disable_output:%0A
@@ -2185,16 +2185,20 @@
BOSE%22%5D:%0A
+
@@ -2232,38 +2232,46 @@
n(cmd))%0A
+
else:%0A
+
ppri
|
4f04de16a75b4ba87936498aeec7eebeb5ba4b56
|
Add window resizing
|
temp.py
|
temp.py
|
import random
import string
import sys
from stylesheet import set_stylesheet
from PyQt5 import QtCore, QtGui, QtWidgets
# Create main canvas
class Passwordy(QtWidgets.QMainWindow):
def __init__(self, parent = None):
QtWidgets.QMainWindow.__init__(self,parent)
# Call function to create UI
self.create_ui()
def create_ui(self):
# Set window size
self.resize(500, 61)
# Set window stylesheet
get_stylesheet = set_stylesheet()
self.setStyleSheet(get_stylesheet)
# Main window setting
self.centralWidget = QtWidgets.QWidget(self)
self.centralWidget.setObjectName('centralWidget')
# Set main widget layout (centralWidget)
self.horizontalLayout_2 = QtWidgets.QHBoxLayout(self.centralWidget)
self.horizontalLayout_2.setSizeConstraint(QtWidgets.QLayout.SetDefaultConstraint)
self.horizontalLayout_2.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout_2.setSpacing(0)
self.horizontalLayout_2.setObjectName('horizontalLayout_2')
# Create frame for menu button and title
self.menu_frame = QtWidgets.QFrame(self.centralWidget)
# Set frame size
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.menu_frame.sizePolicy().hasHeightForWidth())
self.menu_frame.setSizePolicy(sizePolicy)
self.menu_frame.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.menu_frame.setFrameShadow(QtWidgets.QFrame.Raised)
self.menu_frame.setObjectName('menu_frame')
# Set menu frames layout
self.horizontalLayout = QtWidgets.QHBoxLayout(self.menu_frame)
self.horizontalLayout.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout.setSpacing(0)
self.horizontalLayout.setObjectName('horizontalLayout')
# Create menubutton
self.menu_button = QtWidgets.QPushButton(self.menu_frame)
self.menu_button.setObjectName('menu_button')
self.menu_button.setText('☰')
# Add menubutton widget to layout
self.horizontalLayout.addWidget(self.menu_button)
# Create title label
self.title_label = QtWidgets.QLabel(self.menu_frame)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.title_label.sizePolicy().hasHeightForWidth())
self.title_label.setSizePolicy(sizePolicy)
self.title_label.setObjectName('title_label')
self.title_label.setText('passwordy')
# Add title label to layout
self.horizontalLayout.addWidget(self.title_label)
self.horizontalLayout_2.addWidget(self.menu_frame)
# Create frame for middle spacer
self.spacer_frame = QtWidgets.QFrame(self.centralWidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.spacer_frame.sizePolicy().hasHeightForWidth())
self.spacer_frame.setSizePolicy(sizePolicy)
self.spacer_frame.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.spacer_frame.setFrameShadow(QtWidgets.QFrame.Raised)
self.spacer_frame.setObjectName('spacer_frame')
# Create layout for spacer frame
self.verticalLayout = QtWidgets.QVBoxLayout(self.spacer_frame)
self.verticalLayout.setContentsMargins(11, 11, 11, 11)
self.verticalLayout.setSpacing(6)
self.verticalLayout.setObjectName('verticalLayout')
# Create horizontal spacer
spacer = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.verticalLayout.addItem(spacer)
# Add spacer to layout
self.horizontalLayout_2.addWidget(self.spacer_frame)
# Create frame for generate button
self.generate_frame = QtWidgets.QFrame(self.centralWidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.generate_frame.sizePolicy().hasHeightForWidth())
self.generate_frame.setSizePolicy(sizePolicy)
self.generate_frame.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.generate_frame.setFrameShadow(QtWidgets.QFrame.Raised)
self.generate_frame.setObjectName('generate_frame')
# Create layout for generate button frame
self.verticalLayout_2 = QtWidgets.QVBoxLayout(self.generate_frame)
self.verticalLayout_2.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_2.setSpacing(0)
self.verticalLayout_2.setObjectName('verticalLayout_2')
# Create generate button
self.generate_button = QtWidgets.QPushButton(self.generate_frame)
self.generate_button.setText('generate')
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.generate_button.sizePolicy().hasHeightForWidth())
self.generate_button.setSizePolicy(sizePolicy)
self.generate_button.setObjectName('generate_button')
# Add generate button to layout
self.verticalLayout_2.addWidget(self.generate_button)
self.horizontalLayout_2.addWidget(self.generate_frame)
# Connect generate button to generate passwords function
self.generate_button.clicked.connect(self.generate_passwords)
# Set central widget
self.setCentralWidget(self.centralWidget)
# Hide OS' default window title bar
self.setWindowFlags(QtCore.Qt.FramelessWindowHint)
# Connect slots
#QtCore.QMetaObject.connectSlotsByName(self)
def generate_passwords(self):
# Clear the output box
self.password_output.setText('')
# Set strings to get characters from
# Numbers
numbers = string.digits
# Letters
lowercase = string.ascii_lowercase
uppercase = string.ascii_uppercase
# Special Characters
special_characters = '!@#$%^&*()\{\}[]?,.'
# Init output character string
output_characters = ''
# Init empty password list
final_password_list = []
# Check user has used a checkbox, add characters from strings relative to checkboxes, generate password
if True in [self.numbers_checkbox.isChecked(),
self.lowercase_checkbox.isChecked(),
self.uppercase_checkbox.isChecked(),
self.special_characters_checkbox.isChecked()]:
output_characters = (numbers * self.numbers_checkbox.isChecked()
+ lowercase * self.lowercase_checkbox.isChecked()
+ uppercase * self.uppercase_checkbox.isChecked()
+ special_characters * self.special_characters_checkbox.isChecked())
# Check how many passwords the user requires, generate for that amount
for i in range(0, self.number_of_passwords.value()):
password = ''.join(random.choice(output_characters) for i in range(self.number_of_characters.value()))
final_password_list.append(password)
# If user hasn't selected a checkbox, inform them in a popup
else:
informer = QMessageBox()
#informer.setWindowTitle('Passwordy - Error')
informer.setStandardButtons(QMessageBox.Ok)
informer.setDefaultButton(QMessageBox.Ok)
# Warning text
informer.setText('Error: ' + '\n' + 'You must make a selection using one of the checkboxes, please try again...')
informer.exec_()
# Add each password in the password list to the output box
for i in final_password_list:
self.password_output.append(i)
# Run App
def main():
app = QtWidgets.QApplication(sys.argv)
main = Passwordy()
main.show()
sys.exit(app.exec_())
if __name__ == '__main__':
main()
|
Python
| 0.000001
|
@@ -5954,16 +5954,18 @@
words)%0A%0A
+%0A%0A
@@ -6260,16 +6260,91 @@
s(self):
+%0A%0A # Increase window size%0A self.resize(500, 500)%0A%0A '''
%0A
@@ -8477,16 +8477,27 @@
pend(i)%0A
+ '''
%0A %0A# Run
|
c0ebc5d757e71c06a8ca3597bf92d496aa0dd5ee
|
update test child age
|
test.py
|
test.py
|
import os
import unittest
import tempfile
import json
from app import app
from app.models import db, Child, User
from datetime import datetime
class ChildViewTestCase(unittest.TestCase):
def test_child_view(self):
first_name = "Martha"
last_name = "Sosa"
birth_date= datetime.strptime("2009-02-02", "%Y-%m-%d")
test_child_view = Child(first_name=first_name, last_name=last_name, birth_date=birth_date)
self.assertEqual(test_child_view.age, 6)
class AuthTestCase(unittest.TestCase):
def setUp(self):
self.client = app.test_client()
def test_can_login(self):
password = "testpass"
user = User(
first_name="Testuser",
last_name="Tester",
email="test@tester.com",
password=password,
)
db.session.add(user)
db.session.commit()
response = self.client.post('/', data=dict(
email=user.email,
password=password,
), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertIn("Log Out", response.get_data())
db.session.delete(user)
db.session.commit()
def test_can_sign_out(self):
password = "testpass"
user = User(
first_name="Testuser",
last_name="Tester",
email="test@tester.com",
password=password,
)
db.session.add(user)
db.session.commit()
response = self.client.post('/', data=dict(
email=user.email,
password=password,
), follow_redirects=True)
response = self.client.get('/logout', follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertIn("Login", response.get_data())
db.session.delete(user)
db.session.commit()
if __name__ == "__main__":
app.config['TESTING'] = True
app.testing = True
app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql://localhost/travis_ci_test'
app.config['WTF_CSRF_ENABLED'] = False
db.init_app(app)
db.create_all()
unittest.main()
|
Python
| 0.000002
|
@@ -490,9 +490,9 @@
ge,
-6
+7
)%0A%0Ac
|
e890ac9ef00193beac77b757c62911553cebf656
|
Change save path to local path
|
test.py
|
test.py
|
import urllib
urllib.urlretrieve('http://192.168.0.13:8080/photoaf.jpg', '/home/pi/img/img.jpg')
|
Python
| 0.000001
|
@@ -71,21 +71,8 @@
', '
-/home/pi/img/
img.
|
2595c31143226004c484da1e131e092224f9d238
|
Rewrite module to use constants
|
test.py
|
test.py
|
import subprocess
from random import choice, randint
from time import sleep
class Test(object):
def __init__(self, testfile, args=[]):
self.testfile = testfile
self.args = args
self.output = None
self.status = None
self.errorMessage = None
self.parser = None
def run(self):
self.output = runTest(self)
self.parseResults()
def parseResults(self):
self.parser = TestParser(self)
self.parser.parseOutput(self.output)
def printResults(self):
self.parser.printResults()
def getResults(self):
return {
"Status": self.status,
"ErrorMessage": self.errorMessage,
}
class SiteProbe(Test):
def __init__(self, testfile="siteprobe.py", target="https://www.torproject.org"):
super(SiteProbe, self).__init__(testfile=testfile, args = ["-u", target])
self.target = target
class TCPTest(Test):
def __init__(self, testfile="tcpconnect.py", target="www.torproject.org"):
super(TCPTest, self).__init__(testfile=testfile, args=["-t", target])
self.target = target
class PingTest(Test):
def __init__(self, testfile="ping.py",target=None, targetfile="directory_authorities.txt"):
args = ["-t", target] if target is not None else ["-f", targetfile]
super(PingTest, self).__init__(testfile=testfile, args=args)
self.target = target
self.packets = None
def parseResults(self):
self.parser = TestParser(self)
self.packets = self.parser.findValue("ReceivedPackets: ")
if "echo-reply" in self.packets:
self.status = "OK"
else:
self.status = "FAILED"
self.errorMessage = "Host unreachable"
class DNSTest(Test):
def __init__(self, testfile="dnscompare.py", target="www.torproject.org"):
super(DNSTest, self).__init__(testfile=testfile, args=["-t", target])
self.target = target
class Traceroute(Test):
def __init__(self, testfile="traceroute.py", target=None, targetfile="directory_authorities.txt"):
args = ["-b", target] if target is not None else ["-f", targetfile]
super(Traceroute, self).__init__(testfile=testfile,args=args)
self.target = target
class TestParser(object):
def __init__(self, test):
self.test = test
def findValue(self, key):
"""
The ooniprobe tests include simple key/value pairs
in their output, indicating the test results.
To avoid false positives, the format "key: [ VALUE ]"
is used. This method takes "key" as a parameter,
and returns VALUE.
"""
output = self.test.output
if not key in output:
return "NOT FOUND"
start = output.find(key) + len(key)
value = output[start:].split(" ]")[0][1:].strip()
return value
def parseOutput(self, output):
self.test.status = self.findValue("TestStatus: ")
if not self.test.status == "OK":
self.test.errorMessage = self.findValue("TestException: ")
def printResults(self):
print "Test: %s" % self.test.testfile
if hasattr(self.test, "target") and self.test.target is not None:
print "Target: %s" % self.test.target
results = self.test.getResults()
for key, value in results.iteritems():
if key and value:
print "%s: %s" % (key, value)
class TestCase(list):
def __init__(self, tests=[], sleep_interval=(1,20)):
super(TestCase, self).__init__(tests)
self.sleepInterval = sleep_interval
def run(self):
tests = testCaseGenerator(list(self))
for test in tests:
test.run()
sleep(randint(self.sleepInterval[0], self.sleepInterval[1]))
def printResults(self):
for test in self:
test.printResults()
print
def getFailed(self):
failed = [test for test in self if test.status != "OK"]
return failed
def testCaseGenerator(seq):
for x in range(len(seq)):
test = choice(seq)
seq.remove(test)
yield test
def runTest(test):
binary = "ooniprobe"
args = [binary, "-n", test.testfile]
if test.args:
args += test.args
print "Running test %s" % test.testfile
popen = subprocess.Popen(args, stdout=subprocess.PIPE)
popen.wait()
output = popen.stdout.read()
return output
|
Python
| 0
|
@@ -70,16 +70,37 @@
sleep%0A%0A
+from const import *%0A%0A
class Te
@@ -801,59 +801,39 @@
ile=
-%22siteprobe.py%22, target=%22https://www.torproject.org%22
+PROBE_TEST, target=TOR_SITE_URL
):%0A
@@ -1000,52 +1000,35 @@
ile=
-%22tcpconnect.py%22, target=%22www.torproject.org%22
+TCP_TEST, target=TOR_DOMAIN
):%0A
@@ -1192,17 +1192,17 @@
ile=
-%22ping.py%22
+PING_TEST
,tar
@@ -1213,48 +1213,8 @@
None
-, targetfile=%22directory_authorities.txt%22
):%0A
@@ -1270,33 +1270,18 @@
e else %5B
-%22-f%22, targetfile
%5D
+
%0A
@@ -1763,52 +1763,35 @@
ile=
-%22dnscompare.py%22, target=%22www.torproject.org%22
+DNS_TEST, target=TOR_DOMAIN
):%0A
@@ -1957,76 +1957,36 @@
ile=
-%22traceroute.py%22, target=None, targetfile=%22directory_authorities.txt%22
+TRACEROUTE_TEST, target=None
):%0A
@@ -2046,24 +2046,8 @@
se %5B
-%22-f%22, targetfile
%5D%0A
@@ -3425,14 +3425,22 @@
val=
-(1,20)
+SLEEP_INTERVAL
):%0A
@@ -4101,19 +4101,19 @@
y =
-%22ooniprobe%22
+OONI_BINARY
%0A
|
89b1bfaad82f1e19df51b189b65ce940983d0da1
|
comment out cfl in tests since it seems to be broken.
|
test.py
|
test.py
|
###
# Copyright (c) 2012-2014, spline
# All rights reserved.
###
from supybot.test import *
class ScoresTestCase(PluginTestCase):
plugins = ('Scores',)
def testScores(self):
# cfb, cfl, d1bb, golf, mlb, nascar, nba, ncb, ncw, nfl, nhl, racing, tennis, and wnba
conf.supybot.plugins.Scores.disableANSI.setValue('True')
self.assertNotError('cfb')
self.assertNotError('cfl')
self.assertNotError('d1bb')
self.assertNotError('golf')
self.assertNotError('mlb')
self.assertNotError('nascar')
self.assertNotError('nba')
self.assertNotError('ncb')
self.assertNotError('ncw')
self.assertNotError('nfl')
self.assertNotError('nhl')
self.assertNotError('racing f1')
self.assertNotError('tennis')
self.assertNotError('wnba')
# vim:set shiftwidth=4 tabstop=4 expandtab textwidth=79:
|
Python
| 0
|
@@ -377,32 +377,34 @@
r('cfb')%0A
+ #
self.assertNotE
|
ac8d6210b1e48e7ce1131412b45d23846b7c73d2
|
Fix to minor style issue
|
test.py
|
test.py
|
import time
import panoply
KEY = "panoply/2g866xw4oaqt1emi"
SECRET = "MmM0NWNvc2wwYmJ4ZDJ0OS84MmY3MzQ4NC02MDIzLTQyN2QtODdkMS0yY2I0NTAzNDk0NDQvMDM3MzM1OTk5NTYyL3VzLWVhc3QtMQ==" # noqa
sdk = panoply.SDK(KEY, SECRET)
sdk.write('roi-test', {'hello': 1})
print sdk.qurl
time.sleep(5)
|
Python
| 0.000001
|
@@ -247,18 +247,16 @@
': 1%7D)%0A%0A
-%0A%0A
print sd
|
5e089a1b155071bb9f009657320c9c12418f517d
|
debug travis
|
test.py
|
test.py
|
#!/usr/bin/env python
from numpy import array,nan,uint16,int64
from numpy.testing import assert_allclose
from datetime import datetime
#
try:
from .airMass import airmass
from .rawDMCreader import goRead
from .plotSolarElev import compsolar
except:
from airMass import airmass
from rawDMCreader import goRead
from plotSolarElev import compsolar
def test_airmass():
theta=[-1.,38.]
Irr,M,I0 = airmass(theta,datetime(2015,7,1,0,0,0))
assert_allclose(Irr,[nan, 805.13538427])
assert_allclose(M,[nan, 1.62045712])
def test_rawread():
bigfn='test/testframes.DMCdata'
framestoplay=(1,2,1) #this is (start,stop,step) so (1,2,1) means read only the second frame in the file
testframe, testind,finf = goRead(bigfn,(512,512),(1,1),framestoplay,verbose=1)
#these are both tested by goRead
#finf = getDMCparam(bigfn,(512,512),(1,1),None,verbose=2)
#with open(bigfn,'rb') as f:
# testframe,testind = getDMCframe(f,iFrm=1,finf=finf,verbose=2)
#test a handful of pixels
assert testind.dtype == int64
assert testframe.dtype == uint16
assert testind == 710731
assert (testframe[0,:5,0] == array([642, 1321, 935, 980, 1114])).all()
assert (testframe[0,-5:,-1] == array([2086, 1795, 2272, 1929, 1914])).all()
def test_plotsolar():
Irr,sunel = compsolar('pfisr',(None,None,None),
datetime(2015,7,1,0,0,0), 1, False)
assert_allclose(Irr[[6,14,6],[2,125,174]], [nan, 216.436431, 405.966392])
assert_allclose(sunel[[6,14,6],[2,125,174]], [-33.736906, 4.438728, 9.068415])
if __name__ == '__main__':
test_airmass()
test_rawread()
test_plotsolar()
|
Python
| 0.000001
|
@@ -252,17 +252,45 @@
r%0Aexcept
-:
+ Exception as e:%0A print(e)
%0A fro
@@ -1686,24 +1686,25 @@
d()%0A test_plotsolar()
+%0A
|
e859119ba7c898c9c5a1e3c9a719050461abc249
|
test installed package
|
test.py
|
test.py
|
#!/usr/bin/env python3
import sys
from os import path
from unittest import TestLoader, TextTestRunner
print("Python {}".format(sys.version))
libdir = path.join(path.abspath(path.curdir), 'lib')
sys.path.insert(0, libdir)
from tsdesktop import version
version.println()
ldr = TestLoader()
suite = ldr.discover('tsdesktop', '*_test.py')
verbose = 1
if '-v' in sys.argv: verbose = 2
rnr = TextTestRunner(verbosity=verbose)
rst = rnr.run(suite)
sys.exit(len(rst.errors))
|
Python
| 0
|
@@ -136,16 +136,59 @@
sion))%0A%0A
+if not '--test-installed' in sys.argv:%0A
libdir =
@@ -232,16 +232,20 @@
'lib')%0A
+
sys.path
|
465046b44d71bfea9879a0e7e6039b8c932c90e8
|
Fix for fast CPU in testing
|
test.py
|
test.py
|
#!/usr/bin/env python3
from pycah.db.user import User
from pycah.db.game import Game
from pycah.db.expansion import Expansion
from pycah.db import connection
import re, json
connection.set_session(autocommit=True)
cursor = connection.cursor()
cursor.execute(open('./pycah/db/create_database.sql').read())
connection.set_session(autocommit=False)
print('Importing cards...')
expansions = {}
for e in ['Base', 'CAHe1', 'CAHe2', 'CAHe3']:
cursor.execute('''INSERT INTO expansions VALUES(DEFAULT,%s) RETURNING eid''', (e,))
eid = cursor.fetchone()[0]
expansions[e] = eid
try:
print('Importing Australian Edition cards...')
regex = re.compile(r'_+')
cursor.execute('''INSERT INTO expansions VALUES(DEFAULT,%s,%s) RETURNING eid''', ('Australian Edition', 'The Australian Edition of Cards against Humanity.'))
eid = cursor.fetchone()[0]
with open('./pycah/db/cards/black_aus.txt', encoding='utf-8') as f:
for l in f:
line = l.strip()
line = regex.sub('_', line)
spaces = max(1, line.count('_'))
cursor.execute('''INSERT INTO black_cards VALUES(%s,DEFAULT,%s,%s)''', (eid, spaces, line))
with open('./pycah/db/cards/white_aus.txt', encoding='utf-8') as f:
for l in f:
line = l.strip()
cursor.execute('''INSERT INTO white_cards VALUES(%s,DEFAULT,%s,%s)''', (eid, False, line))
connection.commit()
print('Finished importing Australian Edition')
except Exception as e:
print('Failed to import Australian Edition')
raise e
try:
print('Importing the rest...')
cards = json.loads(open('./pycah/db/cards/rest.json').read())
for card in cards:
if card['expansion'] not in expansions:
cursor.execute('''INSERT INTO expansions VALUES(DEFAULT,%s) RETURNING eid''', (card['expansion'],))
expansions[card['expansion']] = cursor.fetchone()[0]
if card['numAnswers'] == 0:
cursor.execute('''INSERT INTO white_cards VALUES(%s,DEFAULT,%s,%s)''', (expansions[card['expansion']], False, card['text']))
else:
cursor.execute('''INSERT INTO black_cards VALUES(%s,DEFAULT,%s,%s)''', (expansions[card['expansion']], card['numAnswers'], card['text']))
connection.commit()
print('Finished importing the rest')
except Exception as e:
print('Failed to import the rest')
raise e
print('Testing random cards: ')
cursor.execute('''SELECT value, type FROM black_cards ORDER BY RANDOM() LIMIT 1''')
black_card = cursor.fetchone()
connection.commit()
print(black_card[0])
cursor.execute('''SELECT value FROM white_cards ORDER BY RANDOM() LIMIT %s''', (black_card[1],))
white_cards = cursor.fetchall()
for c in white_cards:
print(c[0])
print('Test complete.')
print('Game test...')
u1 = User.create('user1', 'password')
u2 = User.create('user2', 'password')
u3 = User.create('user3', 'password')
g = Game.create(u1, 10, Expansion.list_all())
g.add_player(u2)
g.add_player(u3)
czar, b_card = g.new_round()
print(u1.username, [c.value for c in g.get_hand(u1)])
print(u2.username, [c.value for c in g.get_hand(u2)])
print(u3.username, [c.value for c in g.get_hand(u3)])
print(czar.username, b_card.value)
for user in [u1, u2, u3]:
if user == czar:
continue
hand = g.get_hand(user)
for i in range(b_card.answers):
g.play_card(user, hand[i])
print(user.username, 'played', hand[i].value)
print('Round over, voting begins...' if all([g.turn_over(u) for u in [u1, u2, u3] if u != czar]) else 'A problem occurred. Round is not over for some reason.')
played_hands = g.get_played_hands()
print('Czar sees:', [[c.value for c in hand] for hand in played_hands])
print('Czar picks:', [c.value for c in played_hands[0]])
winner = g.czar_pick(played_hands[0])
print('Pick failed.' if not winner else 'Winner was {}.'.format(winner.username))
print('Done.')
User.create('nhardy', 'password')
|
Python
| 0
|
@@ -168,16 +168,22 @@
re, json
+, time
%0A%0Aconnec
@@ -3285,16 +3285,36 @@
.value)%0A
+ time.sleep(0.1)%0A
print('R
|
10a78f1d5cfb38c14c7e5434fdd5258fdf41a351
|
Fix failing tests (oops)
|
test.py
|
test.py
|
#!/usr/bin/env python
import os
import subprocess
import time
import glob
import unittest
class TestPasses(unittest.TestCase):
@classmethod
def setUpClass(self):
clean()
self.output = run_zx_spec("bin/test-passes.tap")
def test_zx_spec_header_displayed(self):
self.assertRegexpMatches(self.output, 'ZX Spec - The TDD Framework')
def test_indicators_show_tests_passed(self):
self.assertRegexpMatches(self.output, '\.' * 4)
def test_all_tests_pass(self):
self.assertRegexpMatches(self.output, 'Pass: 5, Fail: 0, Total: 5')
@classmethod
def tearDownClass(self):
clean()
class TestFailures(unittest.TestCase):
@classmethod
def setUpClass(self):
clean()
self.output = run_zx_spec("bin/test-failures.tap")
def test_zx_spec_header_displayed(self):
self.assertRegexpMatches(self.output, 'ZX Spec - The TDD Framework')
def test_shows_failed_tests(self):
self.assertRegexpMatches(self.output, 'assert_fail fails')
self.assertRegexpMatches(self.output, 'assert_a_equals fails')
self.assertRegexpMatches(self.output, 'assert_a_not_equals fails')
self.assertRegexpMatches(self.output, 'assert_a_is_zero fails')
self.assertRegexpMatches(self.output, 'assert_a_is_not_zero fails')
def test_all_tests_failed(self):
self.assertRegexpMatches(self.output, 'Pass: 0, Fail: 5, Total: 5')
@classmethod
def tearDownClass(self):
clean()
def clean():
for f in glob.glob("printout.*"):
os.remove(f)
def run_zx_spec(tape):
ZX_SPEC_OUTPUT_FILE = "printout.txt"
proc = subprocess.Popen([
"fuse",
"--tape", tape,
"--auto-load",
"--no-autosave-settings"])
wait_count = 0
while not os.path.exists(ZX_SPEC_OUTPUT_FILE):
time.sleep(0.1)
wait_count += 1
if wait_count == 20:
raise 'Output file not produced in time'
time.sleep(10)
proc.kill()
with open(ZX_SPEC_OUTPUT_FILE, 'r') as f:
return f.read()
if __name__ == '__main__':
unittest.main(verbosity=2)
|
Python
| 0.000002
|
@@ -330,34 +330,33 @@
output, 'ZX Spec
- -
+:
The TDD Framewo
@@ -904,10 +904,9 @@
Spec
- -
+:
The
@@ -1020,22 +1020,16 @@
ert_fail
- fails
')%0A
@@ -1081,30 +1081,24 @@
ert_a_equals
- fails
')%0A s
@@ -1154,22 +1154,16 @@
t_equals
- fails
')%0A
@@ -1220,22 +1220,16 @@
_is_zero
- fails
')%0A
@@ -1294,14 +1294,8 @@
zero
- fails
')%0A%0A
|
8638e02de720954ed33098ec88a044dee38302f6
|
test ...
|
test.py
|
test.py
|
#!/usr/bin/env python
import os
import socket
import sys
def test():
print "hello"
pass
if __name__=="__main__":
test()
sys.exit(0)
|
Python
| 0
|
@@ -84,16 +84,154 @@
%22hello%22%0A
+ sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)%0A server_address = '/var/run/docker.sock'%0A sock.connect(server_address)%0A
pass
|
25e2c02ebc9a19ad7fe193ed5912fcd21bec4065
|
Test if machine deciphers correctly
|
test.py
|
test.py
|
import unittest
from enigma import Enigma, Steckerbrett, Umkehrwalze, Walzen
class RotorTestCase(unittest.TestCase):
def test_rotor_encoding(self):
rotor = Walzen(wiring='EKMFLGDQVZNTOWYHXUSPAIBRCJ', notch='Q')
self.assertEqual('E', rotor.encode('A'))
def test_rotor_reverse_encoding(self):
rotor = Walzen(wiring='EKMFLGDQVZNTOWYHXUSPAIBRCJ', notch='Q')
self.assertEqual('U', rotor.encode_reverse('A'))
def test_rotor_different_setting(self):
rotor = Walzen(wiring='EKMFLGDQVZNTOWYHXUSPAIBRCJ', notch='Q',
setting='B')
self.assertEqual('K', rotor.encode('A'))
self.assertEqual('K', rotor.encode_reverse('A'))
def test_rotor_different_offset(self):
rotor = Walzen(wiring='EKMFLGDQVZNTOWYHXUSPAIBRCJ', notch='Q',
offset='B')
self.assertEqual('D', rotor.encode('A'))
self.assertEqual('W', rotor.encode_reverse('A'))
def test_rotor_different_setting_and_offset(self):
rotor = Walzen(wiring='EKMFLGDQVZNTOWYHXUSPAIBRCJ', notch='Q',
setting='B', offset='B')
self.assertEqual('J', rotor.encode('A'))
self.assertEqual('V', rotor.encode_reverse('A'))
class ReflectorTestCase(unittest.TestCase):
def test_reflector(self):
reflector = Umkehrwalze(wiring='YRUHQSLDPXNGOKMIEBFZCWVJAT')
self.assertEqual('Y', reflector.encode('A'))
def test_reflector_fails_on_invalid_wiring(self):
self.assertRaises(KeyError, Umkehrwalze,
wiring='YRUHQSLDPXNGOKMIEBFZCWVJA')
self.assertRaises(KeyError, Umkehrwalze,
wiring='YRYHQSLDPXNGOKMIEBFZCWVJAT')
class PlugboardTestCase(unittest.TestCase):
def test_plugboard_swapping(self):
plugboard = Steckerbrett('PO', 'ML', 'IU', 'KJ', 'NH', 'YT', 'GB',
'VF', 'RE', 'DC')
self.assertEqual('O', plugboard.swap('P'))
self.assertEqual('M', plugboard.swap('L'))
def test_plugboard_fails_on_repeated_letter(self):
self.assertRaises(KeyError, Steckerbrett, 'PO', 'PL')
class EnigmaTestCase(unittest.TestCase):
def setUp(self):
self.rotors = (
Walzen(wiring='EKMFLGDQVZNTOWYHXUSPAIBRCJ', notch='Q'),
Walzen(wiring='AJDKSIRUXBLHWTMCQGZNPYFVOE', notch='E'),
Walzen(wiring='BDFHJLCPRTXVZNYEIWGAKMUSQO', notch='V'),
)
self.reflector = Umkehrwalze(wiring='YRUHQSLDPXNGOKMIEBFZCWVJAT')
self.plugboard = Steckerbrett('PO', 'ML', 'IU', 'KJ', 'NH', 'YT', 'GB',
'VF', 'RE', 'DC')
def test_enigma_cipher(self):
machine = Enigma(rotors=self.rotors[::-1], reflector=self.reflector)
self.assertEqual('BDZGO', machine.cipher('AAAAA'))
def run_tests():
unittest.main()
if __name__ == '__main__': # pragma: no cover
run_tests()
|
Python
| 0.00308
|
@@ -2828,16 +2828,189 @@
AAA'))%0A%0A
+ def test_enigma_decipher(self):%0A machine = Enigma(rotors=self.rotors%5B::-1%5D, reflector=self.reflector)%0A self.assertEqual('AAAAA', machine.cipher('BDZGO'))%0A%0A
%0Adef run
|
a93c281e126f41d9ac388ec2dafd829eed2ea6b1
|
add coverage flags
|
test.py
|
test.py
|
import os
import sys
import subprocess
import shlex
import shutil
import sys
import time
import datetime
HERE = os.path.abspath(os.path.dirname(__file__))
# ------------------------------------------------------------------------------
def exe(command):
"""
Executes command and returns string representations of stdout and stderr captured from the console.
When universal_newlines=True stdout and stderr are opened in text mode.
Otherwise, they are opened in binary mode. In that case captured stdout and stderr
are not strings and Python 3 throws type error when compared against strings later in tests.
Note:
This feature is only available if Python is built with universal newline support (the default).
Also, the newlines attribute of the file objects stdout, stdin and stderr are not updated by the
communicate() method.
See https://docs.python.org/2/library/subprocess.html
"""
stdout, stderr = subprocess.Popen(shlex.split(command),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True).communicate()
if stderr:
sys.stderr.write(stderr)
return stdout, stderr
# ------------------------------------------------------------------------------
def configure_build_and_exe(name, setup_command):
stamp = datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d-%H-%M-%S')
os.chdir(os.path.join(HERE, name))
if sys.platform == 'win32':
setup_command += ' --generator="MinGW Makefiles"'
setup_command += ' build-%s' % stamp
stdout, stderr = exe(setup_command)
os.chdir(os.path.join(HERE, name, 'build-%s' % stamp))
if sys.platform == 'win32':
stdout, stderr = exe('mingw32-make')
shutil.copy('..\src\example\EXAMPLE.INP', 'bin\EXAMPLE.INP')
os.chdir('bin')
stdout, stderr = exe('example.exe')
else:
stdout, stderr = exe('make')
shutil.copy('../src/example/EXAMPLE.INP', 'bin/EXAMPLE.INP')
os.chdir('bin')
stdout, stderr = exe('./example')
return stdout, stderr
# ------------------------------------------------------------------------------
def test_examples():
stdout, stderr = configure_build_and_exe('', 'python setup.py')
assert 'Fortran Input Reader OK!' in stdout
|
Python
| 0
|
@@ -2354,16 +2354,41 @@
setup.py
+ --fc=gfortran --coverage
')%0A a
|
c04d010366009eb49f94960ddbdaedbb5850dd98
|
Fix typo in test.py
|
test.py
|
test.py
|
import steam, sys
valid_modes = ["bp", "schema", "assets-catalog"]
try:
testmode = sys.argv[2]
testkey = sys.argv[1]
if testmode not in valid_modes: raise Exception
except:
sys.stderr.write("Run " + sys.argv[0] + " <apikey> " + "<" + ", ".join(valid_modes) + ">\n")
raise SystemExit
steam.set_api_key(testkey)
test_schema = steam.tf2.item_schema(lang = "en")
def print_item_list(items):
for item in items:
print("\n\x1b[1m" + str(item) + "\x1b[0m\n")
for attr in item:
print attr
if testmode == "bp":
test_pack = steam.tf2.backpack("stragglerastic", schema = test_schema)
print_item_list(test_pack)
elif testmode == "schema":
print_item_list(test_chema)
elif testmode == "assets-catalog":
assets = steam.tf2.assets(currency = "usd")
for item in test_schema:
try:
print("\x1b[1m" + str(item) + "\x1b[0m:\t $" + str(assets[item]))
except KeyError:
pass
|
Python
| 0.999785
|
@@ -710,16 +710,17 @@
st(test_
+s
chema)%0Ae
|
4893105835a8acf4ee19a96c6fefce45f08ec08f
|
fix some
|
test.py
|
test.py
|
from __future__ import print_function
import logging
from logging import StreamHandler
from memory_profiler import profile
logger = logging.getLogger()
logger.addHandler(StreamHandler())
logger.setLevel(logging.DEBUG)
def glow_pyconf_ppt():
import requests
for i in range(1, 24):
s = requests.get('http://boolan.com/Courses/a4e31043-f830-49ff-8488-0801a84dcc0c/slide%s.jpg'%i)
f = open("/home/zhyq/slide%s.jpg"%i,"w")
f.write(s.content)
@profile
def test_reference_cycle():
import gc
import time
MAX = 10000
LIFE = 15
class A(object):
def __init__(self, b=None):
self.b = b
self.l = []
self.init()
def init(self):
for i in range(0, MAX):
self.l.append('abcdefgaaaaaaa')
class B(object):
def __init__(self, a=None):
self.a = a
self.l = []
self.init()
def init(self):
for i in range(0, MAX):
self.l.append('gfedcbaaaaaaaa')
logger.debug('start generate object')
a = A()
b = B()
logger.debug('object exists last %ss'%LIFE)
time.sleep(LIFE)
#a.b = b
#b.a = a
logger.debug('delete a, b')
a = None
b = None
logger.debug('a and b is deleted')
time.sleep(LIFE)
if __name__ == '__main__':
test_reference_cycle()
|
Python
| 0.947237
|
@@ -467,16 +467,17 @@
ntent)%0A%0A
+#
@profile
@@ -477,16 +477,16 @@
profile%0A
-
def test
@@ -547,19 +547,16 @@
MAX = 10
-000
%0A LIF
@@ -564,9 +564,9 @@
= 1
-5
+0
%0A%0A
@@ -1202,25 +1202,29 @@
sleep(LIFE)%0A
+
%0A
-
#a.b = b
@@ -1269,16 +1269,46 @@
a, b')%0A
+ a.l = None%0A b.l = None%0A
a =
@@ -1321,24 +1321,44 @@
b = None%0A
+ del a%0A del b%0A
logger.d
@@ -1389,31 +1389,449 @@
')%0A%0A
-time.sleep(LIFE
+logger.debug('wait %25s to collect'%25LIFE)%0A time.sleep(LIFE)%0A gc.collect()%0A logger.debug('collect is finished')%0A%0A time.sleep(LIFE)%0A%0Adef test_inspect():%0A import inspect%0A%0A def hello(route, objid, val=10, *args, **kwargs):%0A import time%0A time.sleep(100)%0A print('hello world')%0A%0A print(inspect.getargspec(hello))%0A #print(inspect.formatargspec())%0A print(inspect.getcallargs(hello, 'paper', 'id')
)%0A%0Aif __
|
e057b586e2dc43ff367cb1ed6fc5bbb7dbfe514c
|
print flask
|
test.py
|
test.py
|
import flask
|
Python
| 0.000006
|
@@ -6,8 +6,19 @@
t flask%0A
+print flask
|
848751ca2906a5e1e8e5ccf3828bf13994b074fe
|
Update test script
|
test.py
|
test.py
|
import xorcise
try:
console = xorcise.turn_on_console()
console.erase()
line = xorcise.Line()
console.print_line(0, line)
line = xorcise.Line(
xorcise.Character("h", xorcise.ColorAttribute.black),
xorcise.Character("e", xorcise.ColorAttribute.blue),
xorcise.Character("l", xorcise.ColorAttribute.red),
xorcise.Character("l", xorcise.ColorAttribute.green),
xorcise.Character("o", xorcise.ColorAttribute.yellow),
xorcise.Character(","),
xorcise.Character("\t", xorcise.ColorAttribute.cyan
| xorcise.RenditionAttribute.reverse),
xorcise.Character("w", xorcise.ColorAttribute.magenta),
xorcise.Character("o", xorcise.ColorAttribute.white),
xorcise.Character("r"),
xorcise.Character("l", xorcise.ColorAttribute.white),
xorcise.Character("d"),
xorcise.Character("!", xorcise.ColorAttribute.white
| xorcise.RenditionAttribute.underline),
)
console.print_line(1, line)
console.print_line(2, xorcise.Line(
xorcise.Character("\t"),
xorcise.Character(" "),
xorcise.Character("A", xorcise.ColorAttribute.cyan)))
console.refresh()
console.get_char()
with open("debug.log", "w") as f:
for char in line:
f.write(char.value)
f.write("\n")
for char in line.normalized:
f.write(char.value)
finally:
xorcise.turn_off_console()
|
Python
| 0.000001
|
@@ -205,13 +205,33 @@
ute.
-black
+get_best_match((0, 0, 0))
),%0A
@@ -285,12 +285,35 @@
ute.
-blue
+get_best_match((0, 0, 255))
),%0A
@@ -367,11 +367,35 @@
ute.
-red
+get_best_match((255, 0, 0))
),%0A
@@ -450,12 +450,34 @@
te.g
-reen
+et_best_match((0, 255, 0))
),%0A
@@ -531,14 +531,37 @@
ute.
-yellow
+get_best_match((255, 255, 0))
),%0A
@@ -646,13 +646,42 @@
ute.
-cyan%0A
+get_best_match((0, 255, 255))%0A
@@ -797,15 +797,37 @@
ute.
-magenta
+get_best_match((255, 0, 255))
),%0A
@@ -869,37 +869,63 @@
.ColorAttribute.
-white
+get_best_match((255, 255, 255))
),%0A xorcise
@@ -972,38 +972,8 @@
(%22l%22
-, xorcise.ColorAttribute.white
),%0A
@@ -1057,13 +1057,39 @@
ute.
-white
+get_best_match((255, 255, 255))
%0A
@@ -1340,12 +1340,37 @@
ute.
-cyan
+get_best_match((0, 255, 255))
)))%0A
|
35b2028ed09f64442092bdcb617c80acd1741948
|
Fix for ticket #18
|
urls.py
|
urls.py
|
from django.conf.urls.defaults import *
from django.conf import settings
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
from mailng.extensions import loadextensions, loadmenus
loadextensions()
urlpatterns = patterns('',
# Example:
# (r'^mailng/', include('mailng.foo.urls')),
# Uncomment the admin/doc line below and add 'django.contrib.admindocs'
# to INSTALLED_APPS to enable admin documentation:
# (r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# (r'^admin/(.*)', admin.site.root),
(r'^mailng/admin/', include('mailng.admin.urls')),
(r'^mailng/main/', include('mailng.main.urls')),
(r'^accounts/login/$', 'django.contrib.auth.views.login'),
(r'^accounts/logout/$', 'django.contrib.auth.views.logout_then_login'),
(r'^jsi18n/$', 'django.views.i18n.javascript_catalog',
{'packages': ('mailng',),})
)
urlpatterns += patterns('', loadmenus())
if settings.DEBUG:
urlpatterns += patterns(
'',
(r'^static/(?P<path>.*)$', 'django.views.static.serve', {'document_root': '/home/tonio/projets/mailng/static'})
)
|
Python
| 0
|
@@ -978,16 +978,56 @@
,),%7D)%0A)%0A
+menus = loadmenus()%0Aif menus != ():%0A
urlpatte
@@ -1046,27 +1046,21 @@
rns('',
-load
menus
-()
)%0A%0Aif se
|
a5357056bda5daf741a5096f88c50dc93bfff1b7
|
fix typo
|
urls.py
|
urls.py
|
from django.conf.urls.defaults import *
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
(r'^forum/', include('nidarholm.forum.urls.debate')),
(r'^news/', include('nidarholm.news.urls.story'))
(r'^admin/', include(admin.site.urls)),
)
|
Python
| 0.999991
|
@@ -226,16 +226,17 @@
story'))
+,
%0A%0A (r
|
b8faad87145b777d5bf1fc807fc06dd940d0816d
|
Put messaging app's urls under a path
|
urls.py
|
urls.py
|
from django.conf.urls.defaults import patterns, include, url
from django.contrib import admin
from django.conf import settings
#from ajax_select import urls as ajax_select_urls
from tastypie.api import Api
from storybase.api import CreativeCommonsLicenseGetProxyView
from storybase_asset.urls import urlpatterns as asset_urlpatterns
from storybase_messaging.urls import urlpatterns as messaging_urlpatterns
from storybase_user.urls import urlpatterns as user_urlpatterns
from storybase_story.urls import urlpatterns as story_urlpatterns
from storybase_asset.api import AssetResource, DataSetResource
from storybase_geo.api import (GeocoderResource, GeoLevelResource,
LocationResource, PlaceResource)
from storybase_help.api import (HelpResource)
from storybase_story.api import StoryResource
from storybase_taxonomy.api import TagResource
admin.autodiscover()
urlpatterns = patterns('')
# Set up Tastypie API resources
v0_1_api = Api(api_name='0.1')
v0_1_api.register(AssetResource())
v0_1_api.register(DataSetResource())
v0_1_api.register(StoryResource())
v0_1_api.register(GeocoderResource())
v0_1_api.register(GeoLevelResource())
v0_1_api.register(LocationResource())
v0_1_api.register(PlaceResource())
v0_1_api.register(HelpResource())
v0_1_api.register(TagResource())
urlpatterns += patterns('',
# REST API
(r'^api/', include(v0_1_api.urls)),
# Proxy for Creative Commons endpoint
url(r"^api/%s/license/get/" % v0_1_api.api_name,
CreativeCommonsLicenseGetProxyView.as_view(),
name="api_cc_license_get"),
)
# Include storybase_user URL patterns
# Use this pattern instead of include since we want to put the URLs
# at the top-level
urlpatterns += messaging_urlpatterns + user_urlpatterns + story_urlpatterns + asset_urlpatterns
urlpatterns += patterns('',
# Examples:
# url(r'^$', 'atlas.views.home', name='home'),
# url(r'^atlas/', include('atlas.foo.urls')),
# StoryBase account management
# This needs to come before the admin URLs in order to use
# the custom login form
(r'^accounts/', include('storybase_user.account_urls')),
# (r'^$', include('storybase_messaging.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
#url(r'^admin/lookups/', include(ajax_select_urls)),
url(r'^admin/', include(admin.site.urls)),
# Make translations available in JavaScript
(r'^jsi18n/$', 'django.views.i18n.javascript_catalog', {}),
# Comments
(r'^comments/', include('django.contrib.comments.urls')),
# 3rd-party apps
(r'^tinymce/', include('tinymce.urls')),
(r'^accounts/', include('storybase_user.registration.backends.extrainfo.urls')),
(r'^accounts/', include('social_auth.urls')),
(r'^notices/', include('notification.urls')),
# django CMS URLs
url(r'^', include('cms.urls')),
)
if settings.DEBUG:
urlpatterns = patterns('',
url(r'^media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT, 'show_indexes': True}),
url(r'', include('django.contrib.staticfiles.urls')),
) + urlpatterns
|
Python
| 0
|
@@ -332,82 +332,8 @@
rns%0A
-from storybase_messaging.urls import urlpatterns as messaging_urlpatterns%0A
from
@@ -1643,32 +1643,8 @@
s +=
- messaging_urlpatterns +
use
@@ -2037,17 +2037,16 @@
),%0A%0A
-#
(r'%5E
$',
@@ -2041,17 +2041,26 @@
(r'%5E
-$
+messaging/
', inclu
|
fbba73e772e5055dce81dd2a3f8814011733f882
|
Add ajax_select lookup url.
|
urls.py
|
urls.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
##
## Author: Adriano Monteiro Marques <adriano@umitproject.org>
## Author: Diogo Pinheiro <diogormpinheiro@gmail.com>
##
## Copyright (C) 2011 S2S Network Consultoria e Tecnologia da Informacao LTDA
##
## This program is free software: you can redistribute it and/or modify
## it under the terms of the GNU Affero General Public License as
## published by the Free Software Foundation, either version 3 of the
## License, or (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU Affero General Public License for more details.
##
## You should have received a copy of the GNU Affero General Public License
## along with this program. If not, see <http://www.gnu.org/licenses/>.
##
from django.conf.urls.defaults import *
from django.contrib import admin
admin.autodiscover()
handler500 = 'djangotoolbox.errorviews.server_error'
urlpatterns = patterns('',
('^_ah/warmup$', 'djangoappengine.views.warmup'),
(r'', include('gui.urls')),
(r'', include('geoip.urls')),
(r'^map/$', 'gui.views.map'),
(r'^realtimebox/$', 'gui.views.realtimebox'),
(r'^events/(?P<event_id>\d+)/$', 'gui.views.event'),
(r'^twitter/', include('twitter.urls')),
(r'^accounts/', include('registration.urls')),
(r'^notification/', include('notificationsystem.urls')),
(r'^admin/', include(admin.site.urls)),
(r'^api/', include('api.urls')),
(r'^decision/', include('decision.urls')),
(r'^agents/', include('agents.urls')),
)
|
Python
| 0
|
@@ -987,16 +987,66 @@
admin%0A%0A
+from ajax_select import urls as ajax_select_urls%0A%0A
admin.au
@@ -1566,47 +1566,8 @@
-(r'%5Eadmin/', include(admin.site.urls)),
%0A
@@ -1690,10 +1690,106 @@
rls')),%0A
+ (r'%5Eajax/lookups/', include(ajax_select_urls)),%0A (r'%5Eadmin/', include(admin.site.urls)),%0A
)%0A
|
3ac72f0a9f83988584cee89896eaeb5c6f06d6b0
|
Fix `previous_float` in util.py
|
util.py
|
util.py
|
# util.py
# Imports
import re
# raise_if_not_shape
def raise_if_not_shape(name, A, shape):
"""Raise a `ValueError` if the np.ndarray `A` does not have dimensions
`shape`."""
if A.shape != shape:
raise ValueError('{}.shape != {}'.format(name, shape))
# previous_float
PARSE_FLOAT_RE = re.compile(r'([+-]*)0x1\.([\d]{13})p(.*)')
def previous_float(x):
"""Return the next closest float (towards zero)."""
s, f, e = PARSE_FLOAT_RE.match(float(x).hex()).groups()
f, e = int(f, 16), int(e)
if f > 0:
f -= 1
else:
f = int('f' * 13, 16)
e -= 1
return float.fromhex('{}0x1.{:013x}p{:d}'.format(s, f, e))
|
Python
| 0.000554
|
@@ -342,16 +342,19 @@
x1%5C.(%5B%5Cd
+a-f
%5D%7B13%7D)p(
@@ -490,16 +490,24 @@
x).hex()
+.lower()
).groups
|
804c51951650e22a688abe306d47fba97e11acd6
|
Add '/' to allowed filename characters
|
util.py
|
util.py
|
import smtplib
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
import os
from configparser import ConfigParser
import requests
import praw
from markdown import markdown
def to_html(comment):
result = markdown(comment.body) + '<footer>' + comment.author.name + '</footer>'
children = ['<blockquote>' + to_html(reply) + '</blockquote>' for reply in comment.replies if
reply.author is not None]
if children:
result += ''.join(children)
return result
def get_comments(submission):
out = ''
for comment in submission.comments:
if comment.author is not None:
out += '<blockquote>' + to_html(comment) + '</blockquote>'
return out
def get_auth():
if os.path.isfile(os.path.join(os.path.dirname(__file__), 'settings.cfg')):
config = ConfigParser()
config.read(os.path.join(os.path.dirname(__file__), 'settings.cfg'))
username = config.get('auth', 'username')
password = config.get('auth', 'password')
else:
username = os.environ['SMTP_USERNAME']
password = os.environ['SMTP_PASSWORD']
return username, password
def get_smtp():
if os.path.isfile(os.path.join(os.path.dirname(__file__), 'settings.cfg')):
config = ConfigParser()
config.read(os.path.join(os.path.dirname(__file__), 'settings.cfg'))
server = config.get('smtp', 'server')
port = config.get('smtp', 'port')
else:
server = os.environ['SMTP_SERVER']
port = os.environ['SMTP_PORT']
return server, port
def get_readability_token():
if os.path.isfile(os.path.join(os.path.dirname(__file__), 'settings.cfg')):
config = ConfigParser()
config.read(os.path.join(os.path.dirname(__file__), 'settings.cfg'))
token = config.get('readability', 'token')
else:
token = os.environ['READABILITY_TOKEN']
return token
def send_email(to, kindle_address, attachment, title):
msg = MIMEMultipart()
msg['From'] = 'convert@reddit2kindle.com'
if kindle_address == 'free':
msg['To'] = to + '@free.kindle.com'
else:
msg['To'] = to + '@kindle.com'
msg['Subject'] = title
attach = MIMEText(attachment.encode('iso-8859-1', 'xmlcharrefreplace'), 'html', 'iso-8859-1')
attach.add_header('Content-Disposition', 'attachment',
filename="".join(c for c in title if c.isalnum() or c in ['-', '_', ',', ' ']).rstrip() + '.html')
msg.attach(attach)
s = smtplib.SMTP(get_smtp()[0], get_smtp()[1])
s.login(get_auth()[0], get_auth()[1])
s.send_message(msg)
s.quit()
def validate_request_post(values):
if values['submission'] is '':
return 'You need to put a URL in!'
if values['email'] is '':
return 'How am I supposed to send it to you without an email address?'
if values['kindle_address'] not in ['free', 'normal']:
return 'Which kindle address do you want me to send to?'
return None
def validate_request_subreddit(values):
if values['subreddit'] is '':
return 'I need a subreddit name!'
if values['time'] not in ['all', 'year', 'month', 'week', 'day', 'hour']:
return 'That\'s not a valid time period, is it?'
try:
if values['limit'] is '' or 0 > int(values['limit']) or int(values['limit']) > 25:
return 'How many posts would you like?'
except ValueError:
return 'How many posts would you like?'
if values['email'] is '':
return 'How am I supposed to send it to you without an email address?'
if values['kindle_address'] not in ['free', 'normal']:
return 'Which kindle address do you want me to send to?'
return None
def get_posts(subreddit, time, limit):
if time == 'hour':
return r.get_subreddit(subreddit).get_top_from_hour(limit=limit)
elif time == 'day':
return r.get_subreddit(subreddit).get_top_from_day(limit=limit)
elif time == 'week':
return r.get_subreddit(subreddit).get_top_from_week(limit=limit)
elif time == 'month':
return r.get_subreddit(subreddit).get_top_from_month(limit=limit)
elif time == 'year':
return r.get_subreddit(subreddit).get_top_from_year(limit=limit)
elif time == 'all':
return r.get_subreddit(subreddit).get_top_from_all(limit=limit)
def get_readability(url):
request = requests.get(
'https://readability.com/api/content/v1/parser?url=' + url + '&token=' + get_readability_token())
return request.json()['content']
r = praw.Reddit(user_agent='reddit2kindle')
|
Python
| 0.000394
|
@@ -2464,16 +2464,21 @@
',', '
+', '/
'%5D).rstr
|
7e3f28329d887229345fa0e8085ca7e09fe7686e
|
Improve wsgi.py
|
wsgi.py
|
wsgi.py
|
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
application = get_wsgi_application()
|
Python
| 0.000005
|
@@ -1,65 +1,38 @@
-import os%0A%0Afrom django.core.wsgi import get_wsgi_application%0A
+# -*- coding: utf-8 -*-%0A%0Aimport os
%0Aos.
@@ -89,16 +89,66 @@
ings%22)%0A%0A
+from django.core.wsgi import get_wsgi_application%0A
applicat
|
8947f6f7733593ec2b701aaa0b6fb98d973b7850
|
Add pull to wsgi startup process, and /uptime route to the app
|
wsgi.py
|
wsgi.py
|
import os
import sys
import time
import datetime
from apscheduler.scheduler import Scheduler
from bottle import Bottle, mako_view
# sys.path is a global for this python thread, so this enables local imports throughout the app
sys.path.insert(0, '.')
from fetch import fetch
from settings import datadir
from sync import sync
from logger import logger
schedule = Scheduler()
schedule.start()
schedule.add_interval_job(fetch, minutes=1)
schedule.add_interval_job(sync, hours=6)
# schedule.add_interval_job(sync, minutes=5)
now = datetime.datetime.utcnow()
logger.debug('Scheduler initialized. UTC=%s', now.isoformat())
application = Bottle()
def linecount(filepath):
lines = -1
with open(filepath, 'rb') as fp:
for lines, _ in enumerate(fp):
pass
return lines
@application.route('/')
@mako_view('index.mako')
def index():
filenames = sorted(os.listdir(datadir))
filepaths = [os.path.join(datadir, filename) for filename in filenames]
files = [dict(name=filepath, lines=linecount(filepath)) for filepath in sorted(filepaths)]
return dict(files=files)
@application.route('/fetch')
def get_fetch():
started = time.time()
fetch()
ended = time.time()
return 'Fetch done. Took %0.2f seconds.' % (ended - started)
@application.route('/sync')
def get_sync():
started = time.time()
sync()
ended = time.time()
return 'Sync done. Took %0.2f seconds.' % (ended - started)
|
Python
| 0
|
@@ -318,16 +318,25 @@
ort sync
+, get_dir
%0Afrom lo
@@ -355,16 +355,90 @@
logger%0A%0A
+%0Alogger.debug('Initializing with datadir from github.')%0Aget_dir(datadir)%0A%0A
schedule
@@ -558,54 +558,13 @@
=6)%0A
-# schedule.add_interval_job(sync, minutes=5)%0A%0A
+%0Autc_
now
@@ -638,16 +638,20 @@
TC=%25s',
+utc_
now.isof
@@ -683,16 +683,50 @@
ottle()%0A
+application_started = time.time()%0A
%0A%0Adef li
@@ -1311,33 +1311,33 @@
h done. Took %250.
-2
+3
f seconds.' %25 (e
@@ -1494,9 +1494,9 @@
%250.
-2
+3
f se
@@ -1503,28 +1503,152 @@
conds.' %25 (ended - started)%0A
+%0A%0A@application.route('/uptime')%0Adef get_uptime():%0A return 'Uptime: %250.3f seconds.' %25 (time.time() - application_started)%0A
|
0f570e5a0f33583dbc419be5d6d71ce9c804e131
|
Upgrade comments
|
wsgi.py
|
wsgi.py
|
"""
WSGI config for {{ project_name }} project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "{{ project_name }}.settings.production")
from django.core.wsgi import get_wsgi_application
from dj_static import Cling, MediaCling
application = Cling(MediaCling(get_wsgi_application()))
|
Python
| 0
|
@@ -201,9 +201,9 @@
n/1.
-7
+8
/how
|
1e362ab8704c76e3606fae9317dd85eeb06259ea
|
remove superflous block size
|
zero.py
|
zero.py
|
#!/bin/py
import os
count = 1
def zeroToDrive():
''' write zeros to drive '''
wipes = 1
for int in range(count):
os.system(("dd if=/dev/zero bs=4096 | pv --progress --timer --rate --bytes| dd of=/dev/null bs=4096"))
# os.system(os.system(("dd if=/dev/zero bs=4096 | pv -ptrb | dd of=/dev/null bs=4096"))
wipes+=1
zeroToDrive()
|
Python
| 0
|
@@ -157,34 +157,25 @@
f=/dev/zero
-bs=4096 %7C
+%7C
pv --progres
@@ -278,17 +278,8 @@
zero
- bs=4096
%7C pv
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.