commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
|---|---|---|---|---|---|---|---|
e0280150917934e9ba051966e8aa966d4cd43a6d
|
fix flake warning try 2
|
verisure/devices/smartplug.py
|
verisure/devices/smartplug.py
|
"""
Smartplug device
"""
import time
from .overview import Overview
OVERVIEW_URL = '/overview/smartplug'
COMMAND_URL = '/smartplugs/onoffplug.cmd'
DETAILS_URL = '/smarthome/{}/details'
class Smartplug(object):
""" Smartplug device
Args:
session (verisure.session): Current session
"""
def __init__(self, session):
self._session = session
def get(self):
""" Get device overview """
status = self._session.get(OVERVIEW_URL)
return [Overview('smartplug', val) for val in status]
def set(self, device_id, value):
""" Set device status
Args:
device_id (str): Id of the smartplug
value (str): new status, 'on' or 'off'
"""
data = {
'targetDeviceLabel': device_id,
'targetOn': value
}
return not self._session.post(COMMAND_URL, data)
def get_details(self, device_id):
""" Get details from a smartplug
Args:
device_id (str): Id of the smartplug
"""
return self._session.get(DETAILS_URL.format(
device_id.upper().replace(' ', '%20')))
def set_location(self, device_id, location):
""" Set smartplug location
Args:
device_id (str): Id of the smartplug
location (str): New location
"""
details_url = DETAILS_URL.format(
device_id.upper().replace(' ', '%20'))
details = self._session.get(details_url)
details['location'] = location
self._session.put(details_url, details)
def wait_while_updating(self, device_id, value, max_request_count=100):
""" Wait for device status to update
Args:
device_id (str): Id of the smartplug
value (str): status to wait for, 'on' or 'off'
max_request_count (int): maximum number of post requests
Returns: retries if success else -1
"""
for counter in range(max_request_count):
if [overview for overview in self.get()
if (overview.id == device_id
and overview.status == value)]:
return counter
time.sleep(1)
return -1
|
Python
| 0
|
@@ -2086,17 +2086,16 @@
if
-(
overview
@@ -2110,36 +2110,16 @@
evice_id
-%0A
and ove
@@ -2139,17 +2139,16 @@
== value
-)
%5D:%0A
|
7e48ea5fe20eb63de782ec8d110da4f848155f79
|
Remove unnecessary struct from colour tables parse
|
gifprime/parser.py
|
gifprime/parser.py
|
"""Construct-based parser for the GIF file format.
Based on specifications:
http://www.w3.org/Graphics/GIF/spec-gif89a.txt
http://www.w3.org/Graphics/GIF/spec-gif87.txt
"""
import construct
import gifprime.lzw
def BlockStart(name, label):
"""Return header for a block."""
return construct.Embedded(
construct.Struct(
'block_header',
construct.Value('block_type', lambda ctx: name),
construct.Const(construct.ULInt8('ext_intro'), 0x21),
construct.Const(construct.ULInt8('ext_label'), label),
),
)
def DataSubBlocks(name):
"""Return Adapter to parse GIF data sub-blocks."""
return construct.ExprAdapter(
construct.Struct(
name,
construct.OptionalGreedyRange(
construct.Struct(
'blocks',
construct.NoneOf(construct.ULInt8('block_size'), [0]),
construct.Bytes('data_values', lambda ctx: ctx.block_size),
),
),
construct.Const(construct.ULInt8('terminator'), 0)
),
encoder=None, # TODO implement encoder
decoder=lambda obj, ctx: ''.join(dsb.data_values for dsb in obj.blocks),
)
class LzwAdapter(construct.Adapter):
"""Adapter for LZW-compressed data.
Example:
LzwAdapter(Bytes('foo', 4))
"""
def _encode(self, obj, context):
return ''.join(gifprime.lzw.compress(obj, context.lzw_min))
def _decode(self, obj, context):
return ''.join(gifprime.lzw.decompress(obj, context.lzw_min))
gif = construct.Struct(
'GIF',
construct.Select(
'magic',
construct.Magic('GIF89a'),
construct.Magic('GIF87a'),
),
construct.Struct(
'logical_screen_descriptor',
construct.ULInt16('logical_width'),
construct.ULInt16('logical_height'),
construct.EmbeddedBitStruct(
construct.Flag('gct_flag'),
construct.macros.BitField('colour_res', 3),
construct.Flag('sort_flag'),
construct.macros.BitField('gct_size', 3),
),
construct.ULInt8('bg_col_index'),
construct.ULInt8('pixel_aspect'),
),
construct.If(
lambda ctx: ctx.logical_screen_descriptor.gct_flag,
construct.Struct(
'gct',
construct.Array(
lambda ctx: pow(2,
ctx._.logical_screen_descriptor.gct_size + 1),
construct.Array(3, construct.ULInt8('colour_component')),
),
),
),
construct.GreedyRange(
construct.Select(
'body',
construct.Struct(
'application_extension',
BlockStart('application', 0xFF),
construct.Const(construct.ULInt8('block_size'), 11),
construct.String('app_id', 8),
construct.Bytes('app_auth_code', 3),
DataSubBlocks('app_data'),
),
construct.Struct(
'comment_extension',
BlockStart('comment', 0xFE),
DataSubBlocks('comment'),
),
construct.Struct(
'image',
construct.Optional(
construct.Struct(
'gce',
BlockStart('gce', 0xF9),
construct.Const(construct.ULInt8('block_size'), 4),
construct.EmbeddedBitStruct(
construct.Padding(3), # reserved
construct.macros.BitField('disposal_method', 3),
construct.Flag('user_input_flag'),
construct.Flag('transparent_colour_flag'),
),
construct.ULInt16('delay_time'),
construct.ULInt8('transparent_colour_index'),
construct.Const(construct.ULInt8('terminator'), 0),
),
),
construct.Struct(
'image_descriptor',
construct.Const(construct.ULInt8('img_sep'), 0x2C),
construct.ULInt16('left'),
construct.ULInt16('top'),
construct.ULInt16('width'),
construct.ULInt16('height'),
construct.EmbeddedBitStruct(
construct.Flag('lct_flag'),
construct.Flag('interlace_flag'),
construct.Flag('sort_flag'),
construct.Padding(2), # reserved
construct.macros.BitField('lct_size', 3),
),
),
construct.If(
lambda ctx: ctx.image_descriptor.lct_flag,
construct.Struct(
'lct',
construct.Array(
lambda ctx: pow(
2,
ctx._.image_descriptor.lct_size + 1
),
construct.Array(
3,
construct.ULInt8('colour_component'),
),
),
),
),
construct.ULInt8('lzw_min'),
# TODO: creates an array called data_subblocks instead of index
construct.Tunnel(
LzwAdapter(DataSubBlocks('pixels')),
construct.Array(
lambda ctx: (ctx.image_descriptor.width *
ctx.image_descriptor.height),
construct.ULInt8('index'),
),
),
),
),
),
construct.Const(
construct.ULInt8('trailer'),
0x3B,
),
construct.Terminator,
)
|
Python
| 0.000004
|
@@ -2299,57 +2299,8 @@
ag,%0A
- construct.Struct(%0A 'gct',%0A
@@ -2312,36 +2312,32 @@
onstruct.Array(%0A
-
lamb
@@ -2354,47 +2354,13 @@
w(2,
-%0A
ctx.
-_.
logi
@@ -2392,28 +2392,24 @@
_size + 1),%0A
-
@@ -2449,41 +2449,13 @@
t8('
-colour_component')),%0A
+gct')
),%0A
@@ -4754,81 +4754,8 @@
ag,%0A
- construct.Struct(%0A 'lct',%0A
@@ -4779,36 +4779,32 @@
onstruct.Array(%0A
-
@@ -4831,82 +4831,15 @@
pow(
-%0A 2,%0A
+2,
ctx.
-_.
imag
@@ -4867,43 +4867,11 @@
+ 1
-%0A ),%0A
+),%0A
@@ -4882,33 +4882,32 @@
-
construct.Array(
@@ -4910,169 +4910,34 @@
ray(
-%0A 3,%0A construct.ULInt8('colour_component'),%0A ),%0A
+3, construct.ULInt8('lct')
),%0A
|
902b7233088be73a9eeaab6721f26b108ea2668c
|
Add link to GIF87a spec
|
gifprime/parser.py
|
gifprime/parser.py
|
"""Construct-based parser for the GIF89a file format.
Based on specification:
http://www.w3.org/Graphics/GIF/spec-gif89a.txt
"""
import construct
# common representation of blocks of data (includes terminator)
_data_subblocks = construct.Struct(
'_data_subblocks',
construct.OptionalGreedyRange(
construct.Struct(
'data_subblock',
construct.NoneOf(construct.ULInt8('block_size'), [0]),
construct.Bytes('data_values', lambda ctx: ctx.block_size),
),
),
construct.Const(construct.ULInt8('terminator'), 0),
)
gif = construct.Struct(
'GIF',
construct.Select(
'magic',
construct.Magic('GIF89a'),
construct.Magic('GIF87a'),
),
construct.Struct(
'logical_screen_descriptor',
construct.ULInt16('logical_width'),
construct.ULInt16('logical_height'),
construct.EmbeddedBitStruct(
construct.Flag('gct_flag'),
construct.macros.BitField('colour_res', 3),
construct.Flag('sort_flag'),
construct.macros.BitField('gct_size', 3),
),
construct.ULInt8('bg_col_index'),
construct.ULInt8('pixel_aspect'),
),
construct.If(
lambda ctx: ctx.logical_screen_descriptor.gct_flag,
construct.Struct(
'gct',
construct.Array(
lambda ctx: pow(2, ctx._.logical_screen_descriptor.gct_size + 1),
construct.Array(3, construct.ULInt8('colour_component')),
),
),
),
construct.GreedyRange(
construct.Select(
'body',
construct.Struct(
'application_extension',
construct.Const(construct.ULInt8('ext_intro'), 0x21),
construct.Const(construct.ULInt8('comm_label'), 0xFF),
construct.Const(construct.ULInt8('block_size'), 11),
construct.String('app_id', 8),
construct.Bytes('app_auth_code', 3),
_data_subblocks,
),
construct.Struct(
'comment_extension',
construct.Const(construct.ULInt8('ext_intro'), 0x21),
construct.Const(construct.ULInt8('comm_label'), 0xFE),
construct.ULInt8('comm_size'),
construct.String('comment', lambda ctx: ctx.comm_size),
construct.Const(construct.ULInt8('terminator'), 0)
),
construct.Struct(
'image',
construct.Optional(
construct.Struct(
'gce',
construct.Const(construct.ULInt8('ext_intro'), 0x21),
construct.Const(construct.ULInt8('gce_label'), 0xF9),
construct.Const(construct.ULInt8('block_size'), 4),
construct.EmbeddedBitStruct(
construct.Padding(3), # reserved
construct.macros.BitField('disposal_method', 3),
construct.Flag('user_input_flag'),
construct.Flag('transparent_colour_flag'),
),
construct.ULInt16('delay_time'),
construct.ULInt8('transparent_colour_index'),
construct.Const(construct.ULInt8('terminator'), 0),
),
),
construct.Struct(
'image_descriptor',
construct.Const(construct.ULInt8('img_sep'), 0x2C),
construct.ULInt16('left'),
construct.ULInt16('top'),
construct.ULInt16('width'),
construct.ULInt16('height'),
construct.EmbeddedBitStruct(
construct.Flag('lct_flag'),
construct.Flag('interlace_flag'),
construct.Flag('sort_flag'),
construct.Padding(2), # reserved
construct.macros.BitField('lct_size', 3),
),
),
construct.If(
lambda ctx: ctx.image_descriptor.lct_flag,
construct.Struct(
'lct',
construct.Array(
lambda ctx: pow(2, ctx._.image_descriptor.lct_size + 1),
construct.Array(
3,
construct.ULInt8('colour_component'),
),
),
),
),
construct.ULInt8('lzw_min'),
_data_subblocks,
),
),
),
construct.Const(
construct.ULInt8('trailer'),
0x3B,
),
construct.Terminator,
)
|
Python
| 0
|
@@ -30,19 +30,16 @@
the GIF
-89a
file fo
@@ -67,16 +67,17 @@
fication
+s
:%0Ahttp:/
@@ -117,16 +117,62 @@
89a.txt%0A
+http://www.w3.org/Graphics/GIF/spec-gif87.txt%0A
%22%22%22%0A%0A%0Aim
|
4c54dc3d7c82bdbba9054e3835dd679e65461015
|
Remove unnecessary sys.exit() call on main()
|
backquotes.py
|
backquotes.py
|
#!/usr/bin/env python
# -*- coding:ascii -*-
import contextlib
import inspect
import locale
import optparse
import os
import string
import subprocess
import sys
import tempfile
import tokenize
import warnings
__all__ = ['shell', 'preprocess']
__version__ = '0.0.4'
def shell(argstring):
r"""Invoke shell commands substituted by variables in the current scope.
"""
frame = inspect.currentframe().f_back
variables = frame.f_globals.copy()
variables.update(frame.f_locals)
command = string.Template(argstring).substitute(variables)
process = subprocess.Popen(command,
stdout=subprocess.PIPE,
shell=True)
out, _err = process.communicate()
if sys.version_info < (3,):
return out
else:
return out.decode(locale.getpreferredencoding())
def preprocess(filename, readline):
r"""Preprocess Python source code using backquotes into plain Python code.
.. warning:: preprocess() blocks while processing entire source codes.
"""
tokens = []
inside_backquotes = False
quote_start = 0
for token in tokenize.generate_tokens(readline):
type, string, (srow, scol), (erow, ecol), line = token
if string == '`':
if inside_backquotes:
# print(`ls`.splitlines())
# ^
quote_end = scol
quoted_string = line[quote_start:quote_end]
if _is_quoted(quoted_string):
quoted_string = quoted_string[1:-1]
tokens.extend([
(tokenize.STRING, _triple_quote(quoted_string)),
(tokenize.OP, ')'),
])
else:
# print(`ls`.splitlines())
# ^
quote_start = ecol
tokens.extend([
(tokenize.NAME, 'backquotes'),
(tokenize.OP, '.'),
(tokenize.NAME, 'shell'),
(tokenize.OP, '('),
])
inside_backquotes ^= True
else:
if inside_backquotes:
# print(`ls`.splitlines())
# ^^
# quoted string will be extracted at the end of the quotation
pass
else:
# print(`ls`.splitlines())
# ^^^^^^ ^^^^^^^^^^^^^^
tokens.append((type, string))
return tokenize.untokenize(tokens)
@contextlib.contextmanager
def _append_to_python_path(path):
current_python_path = os.getenv('PYTHONPATH', '')
if current_python_path:
os.environ['PYTHONPATH'] = ':'.join((current_python_path, path))
else:
os.environ['PYTHONPATH'] = path
yield
os.environ['PYTHONPATH'] = current_python_path
def _detect_environment(frame):
r"""Detect how Python source code is executed.
"""
if frame.f_code.co_filename == '<stdin>':
if frame.f_locals.get('__file__') is None:
return 'repl'
else:
return 'redirect'
outer_frame = frame.f_back
if outer_frame and outer_frame.f_locals.get('__name__') != '__main__':
return 'module'
else:
return 'script'
def _exec(object, globals, locals):
r"""A wrapper function to provide consistent interface among Python 2/3.
"""
if sys.version_info < (3,):
exec('exec object in globals, locals')
else:
exec(object, globals, locals)
def _is_quoted(s):
r"""Returns whether if string is surrouded by quotations.
"""
return s[0] in ('"', "'") and s[0] == s[-1]
def _triple_quote(s):
r"""Returns raw triple single-quoted string.
"""
return "r'''" + s + "'''"
def _main(argv=sys.argv[1:]):
r"""Main entry point of this script.
"""
usage = 'Usage: %prog -m backquotes [options] [FILE] [ARG, ...]'
prog = os.path.basename(sys.executable)
parser = optparse.OptionParser(usage=usage, version=__version__, prog=prog)
parser.add_option(
'-E',
'--no-exec',
dest='execute',
action='store_false',
default=True,
help='stop after preprocessing stage and print preprocessed source')
opts, args = parser.parse_args(argv)
try:
infile = open(args.pop(0), 'r') # not 'rb'
except IndexError:
infile = sys.stdin
with contextlib.closing(infile):
preprocessed_source = preprocess(infile.name, infile.readline)
if opts.execute:
with tempfile.NamedTemporaryFile(mode='w+') as f:
f.write(preprocessed_source)
f.seek(0)
with _append_to_python_path(os.path.dirname(infile.name)):
return_code = subprocess.call([sys.executable, f.name] + args)
sys.exit(return_code)
else:
sys.stdout.write(preprocessed_source)
sys.exit()
if __name__ == '__main__':
sys.exit(_main())
else:
frame = inspect.currentframe().f_back
while frame.f_code.co_filename.startswith('<frozen importlib'):
frame = frame.f_back
environment = _detect_environment(frame)
if environment in ('redirect', 'repl'):
warnings.warn("backquotes doesn't work on REPL.")
elif environment == 'module':
warnings.warn(
"backquotes doesn't work when imported by another script")
elif sys.version_info < (3,):
with open(frame.f_code.co_filename, 'rb') as f:
source = preprocess(f.name, f.readline)
_exec(source, frame.f_globals, frame.f_locals)
sys.exit()
|
Python
| 0.00001
|
@@ -4747,15 +4747,8 @@
turn
-_code =
sub
@@ -4797,38 +4797,8 @@
gs)%0A
- sys.exit(return_code)%0A
@@ -4857,26 +4857,24 @@
-sys.exit()
+return 0
%0A%0A%0Aif __
|
6bef219564614d7f54cc662368e5a1f7bf37ca24
|
Add public port 80 for nginx
|
bag8/tools.py
|
bag8/tools.py
|
from __future__ import absolute_import, print_function, unicode_literals
import click
import os
import shutil
from compose.cli.docker_client import docker_client
from bag8.common import PREFIX
from bag8.common import TMPFOLDER
from bag8.common import call
from bag8.common import simple_name
from bag8.common import get_available_projects
from bag8.common import get_container_name
from bag8.common import get_bag8_path
from bag8.common import get_site_projects
from bag8.common import iter_containers
from bag8.common import json_check
from bag8.common import render_yml
from bag8.common import update_container_hosts
from bag8.common import update_local_hosts
class Tools(object):
def __init__(self, project=None):
self.project = project
def hosts(self):
"""Updates your containers /etc/hosts and/or you local /etc/hosts.
"""
hosts_list = []
user_dict = {}
# get the current list [(ip, domain)]
client = docker_client()
for name, container in iter_containers(client=client):
infos = client.inspect_container(container['Id'])
ip = infos['NetworkSettings']['IPAddress']
hostname = infos['Config']['Domainname']
user = infos['Config']['User'] or 'root'
if not hostname:
continue
hosts_list.append((ip, hostname))
user_dict[name] = user
# here s the current hosts
click.echo('hosts found:')
click.echo('----')
click.echo('\n'.join(['{0}\t{1}'.format(*h) for h in hosts_list]))
click.echo('')
# update containers ?
click.echo("Update your containers /etc/hosts ?")
char = None
while char not in ['y', 'n']:
click.echo('Yes (y) or skip (n) ?')
char = click.getchar()
if char == 'y':
for name, __ in iter_containers(client=client):
update_container_hosts(hosts_list, name,
user_dict.get(name, 'root'))
# update local ?
click.echo("Update your local /etc/hosts ?")
char = None
while char not in ['y', 'n']:
click.echo('Yes (y) or no (n) ?')
char = click.getchar()
if char == 'y':
update_local_hosts(hosts_list)
def projects(self):
click.echo('\n'.join(get_available_projects()))
def nginx(self, links=None, volumes=None):
conf_path = os.path.join(TMPFOLDER, 'nginx', 'conf.d')
# remove previous configs
shutil.rmtree(conf_path, ignore_errors=True)
# create new conf folder
os.makedirs(conf_path)
log_path = os.path.join(TMPFOLDER, 'nginx', 'log')
if not os.path.exists(log_path):
os.makedirs(log_path)
environment, links, volumes = json_check(None, links, volumes)
links = ['{0}:{1}'.format(get_container_name(l.split(':')[0]),
l.split(':')[1]) for l in links]
volumes += [
'{0}:/etc/nginx/conf.d'.format(conf_path),
'{0}:/var/log/nginx'.format(log_path),
]
containers = {n.split('_')[1]: n
for n, __ in iter_containers()}
volumes_from = []
for project in get_site_projects(running=True):
# shortcut
name = simple_name(project)
container_name = containers[name]
# updates volumes from to share between site and nginx containers
volumes_from.append(container_name)
# add link to nginx
links.append('{0}:{1}.local'.format(container_name, name))
# copy nginx site conf
shutil.copy(os.path.join(get_bag8_path(project), 'site.conf'),
os.path.join(conf_path, '{0}.conf'.format(project)))
docker_args = [
'docker',
'run',
'-d',
'--name {0}_nginx_1'.format(PREFIX), # TODO get prefix from cli
'--hostname www.nginx.local',
' '.join(['--volumes-from {0}'.format(v) for v in volumes_from]),
' '.join(['-v {0}'.format(v) for v in volumes]),
' '.join(['--link {0}'.format(l) for l in links]),
'nginx',
]
return call(' '.join(docker_args))
def render(self, environment, links, ports, user, volumes, no_volumes,
prefix):
environment, links, volumes = json_check(environment, links, volumes)
render_yml(self.project, environment=environment, links=links,
ports=ports, user=user, volumes=volumes,
no_volumes=no_volumes, prefix=prefix)
|
Python
| 0.000001
|
@@ -4023,16 +4023,51 @@
rom cli%0A
+ '-p', '0.0.0.0:80:80',%0A
|
401c4bf8a421d4de3161662fba51d9e72ba5af1b
|
Handle nullable object attributes on base.tests reverse_pattern
|
base/tests.py
|
base/tests.py
|
"""
This file demonstrates writing tests using the unittest module. These will pass
when you run "manage.py test".
Replace this with more appropriate tests for your application.
"""
# standard library
# django
from django.conf import settings
from django.contrib import admin
from django.urls import NoReverseMatch
from django.urls import reverse
from django.test import TestCase
# django-cron
from django_cron import get_class
# urls
from project.urls import urlpatterns
# utils
from inflection import underscore
from base.utils import get_our_models
from base.utils import random_string
from base.mockups import Mockup
class BaseTestCase(TestCase, Mockup):
def setUp(self):
super(BaseTestCase, self).setUp()
self.password = random_string()
self.user = self.create_user(self.password)
self.login()
def login(self, user=None, password=None):
if user is None:
user = self.user
password = self.password
return self.client.login(email=user.email, password=password)
def reverse_pattern(pattern, namespace, args=None, kwargs=None):
try:
if namespace:
return reverse('{}:{}'.format(
namespace, pattern.name, args=args, kwargs=kwargs)
)
else:
return reverse(pattern.name, args=args, kwargs=kwargs)
except NoReverseMatch:
return None
class UrlsTest(BaseTestCase):
def setUp(self):
super(UrlsTest, self).setUp()
# we are going to send parameters, so one thing we'll do is to send
# tie id 1
self.user.delete()
self.user.id = 1
# give the user all the permissions, so we test every page
self.user.is_superuser = True
self.user.save()
self.login()
# store default values for urls. E.g. user_id
self.default_params = {}
# store default objects to get foreign key parameters
self.default_objects = {}
for model in get_our_models():
model_name = underscore(model.__name__)
method_name = 'create_{}'.format(model_name)
# store the created object
obj = getattr(self, method_name)(**self.get_obj_kwargs(model))
self.default_objects[model_name] = obj
self.assertIsNotNone(obj, '{} returns None'.format(method_name))
# store the object id with the expected name a url should use
# when using object ids:
param_name = '{}_id'.format(model_name)
self.default_params[param_name] = obj.id
def get_obj_kwargs(self, model):
"""
When testing all urls, there are business logic that require certain
values on the objects we are creating. This method returns a kwrags
diciontary to be passed to the create_X method that creaates an
instance of model.
For example, imagine that APP has the url /message/1/
It is reasonable that the view will return 404 if the logged in user
has no nothing to do with.
This method will be called when creating the test objects to be used
on the UrlsTest, in our example a solution would be to return a
dictionary where the user is the logged in user
return {"user": self.user}
"""
return {}
def get_url_using_param_names(self, url_pattern, namespace):
"""
Using the dictionary of parameters defined on self.default_params and
the list of objects defined on self.default_objects, construct urls
with valid parameters.
This method assumes that nested urls name their parents ids as
{model}_id
Thus something like the comments of a user should be in the format of
'/users/{user_id}/comments/'
"""
param_names = url_pattern.pattern.regex.groupindex.keys()
params = {}
if not param_names:
return
callback = url_pattern.callback
obj = None
for param_name in param_names:
if param_name == 'pk' and hasattr(callback, 'view_class'):
model_name = underscore(
url_pattern.callback.view_class.model.__name__
)
params['pk'] = self.default_params['{}_id'.format(model_name)]
obj = self.default_objects[model_name]
else:
try:
params[param_name] = self.default_params[param_name]
except KeyError:
return None
if obj:
# if the object has an attribute named as the parameter
# assume it should be used on the url, since many views
# filter nested objects
for param in params:
if hasattr(obj, param):
params[param] = getattr(obj, param)
return reverse_pattern(url_pattern, namespace, kwargs=params)
def reverse_pattern(self, url_pattern, namespace):
url = self.get_url_using_param_names(url_pattern, namespace)
if url:
return url
param_names = url_pattern.pattern.regex.groupindex.keys()
url_params = {}
for param in param_names:
try:
url_params[param] = self.default_params[param]
except KeyError:
url_params[param] = 1
return reverse_pattern(url_pattern, namespace, kwargs=url_params)
def test_responses(self):
ignored_namespaces = [
'admin',
]
def test_url_patterns(patterns, namespace=''):
if namespace in ignored_namespaces:
return
for pattern in patterns:
self.login()
if hasattr(pattern, 'name'):
url = self.reverse_pattern(pattern, namespace)
if not url:
continue
try:
response = self.client.get(url)
except Exception:
print("Url {} failed: ".format(url))
raise
msg = 'url "{}" ({})returned {}'.format(
url, pattern.name, response.status_code
)
self.assertIn(
response.status_code,
(200, 302, 403, 405), msg
)
else:
test_url_patterns(pattern.url_patterns, pattern.namespace)
test_url_patterns(urlpatterns)
for model, model_admin in admin.site._registry.items():
patterns = model_admin.get_urls()
test_url_patterns(patterns, namespace='admin')
class CheckErrorPages(TestCase):
def test_404(self):
response = self.client.get('/this-url-does-not-exist')
self.assertTemplateUsed(response, 'exceptions/404.pug')
class CronTests(BaseTestCase):
def test_cron_classes_to_run(self):
"""
Asserts that a cron class name can be imported using the canonical name
given in project settings
"""
cron_class_names = getattr(settings, 'CRON_CLASSES', [])
for cron_class_name in cron_class_names:
assert get_class(cron_class_name)
|
Python
| 0.000002
|
@@ -1233,34 +1233,8 @@
name
-, args=args, kwargs=kwargs
)%0A
@@ -4783,24 +4783,48 @@
hasattr(obj,
+ param) and getattr(obj,
param):%0A
|
2cfa733900ca385dbc835124895771f04dc21969
|
add v4l2 codecs
|
voctocore/lib/localplayout.py
|
voctocore/lib/localplayout.py
|
#!/usr/bin/env python3
import logging
import gi
from gi.repository import Gst
from vocto.video_codecs import construct_video_encoder_pipeline
from lib.args import Args
from lib.config import Config
class LocalPlayout():
def __init__(self, source, port, use_audio_mix=False, audio_blinded=False):
# create logging interface
self.log = logging.getLogger('LocalPlayout'.format(source))
# remember things
self.source = source
self._port = port
# open bin
self.bin = "" if Args.no_bins else """
bin.(
name=LocalPlayout
"""
# video pipeline
self.bin += """
video-mix.
! {vcaps}
! queue
max-size-time=3000000000
name=queue-mux-video-localplayout
{vpipeline}
! queue
max-size-time=3000000000
name=queue-mux-localplayout-{source}
! mux-localplayout-{source}.
""".format(source=self.source,
vpipeline=construct_video_encoder_pipeline('localplayout'),
vcaps=Config.getVideoCaps())
# audio pipeline
if use_audio_mix or source in Config.getAudioSources(internal=True):
self.bin += """
{use_audio}audio-{audio_source}{audio_blinded}.
! queue
max-size-time=3000000000
name=queue-audio-localplayout-convert-{source}
! audioconvert
! queue
max-size-time=3000000000
name=queue-mux-audio-{source}
! mux-localplayout-{source}.
""".format(source=self.source,
use_audio="" if use_audio_mix else "source-",
audio_source="mix" if use_audio_mix else self.source,
audio_blinded="-blinded" if Config.getBlinderEnabled() and audio_blinded else ""
)
# mux pipeline
self.bin += """
matroskamux
name=mux-localplayout-{source}
streamable=true
writing-app=Voctomix-LocalPlayout
! queue
max-size-time=3000000000
name=queue-sink-localplayout-{source}
! sink-localplayout-{source}.
""".format(source=self.source)
# sink pipeline
self.bin += """
srtserversink
name=sink-localplayout-{source}
uri=srt://:{port}
""".format(source=self.source,
port=self._port)
# close bin
self.bin += "" if Args.no_bins else "\n)\n"
def __str__(self):
return 'LocalPlayout[{}] at port {}'.format(self.source, self._port)
def port(self):
return "srt://:{}".format(self._port)
def num_connections(self):
return 0
def audio_channels(self):
return Config.getNumAudioStreams()
def video_channels(self):
return 1
def is_input(self):
return False
def attach(self, pipeline):
self.pipeline = pipeline
|
Python
| 0.00002
|
@@ -2660,32 +2660,78 @@
layout-%7Bsource%7D%0A
+ wait-for-connection=false%0A
|
55431e184aaca8c443794569d1b37d1163e5ad0d
|
removed redundant ? in regexp
|
volta/common/eventshandler.py
|
volta/common/eventshandler.py
|
""" Event parser
"""
import queue as q
import logging
import re
import threading
# data sample: lightning: [volta] 12345678 fragment TagFragment start
# following regexp grabs 'app name', 'nanotime', 'type', 'tag' and 'message' from sample above
re_ = re.compile(r"""
^(?P<app>\S+)
\s+
\[volta\]
\s+
(?P<nanotime>\S+)
\s+
(?P<type>\S+)
\s+
(?P<tag>\S+)
\s+
(?P<message>.*?)
$
""", re.VERBOSE | re.IGNORECASE
)
logger = logging.getLogger(__name__)
class EventsParser(threading.Thread):
"""
reads source queue, parse message and sort events/sync messages to separate queues.
Returns: puts df into appropriate destination queue.
"""
def __init__(self, source, events, sync):
super(EventsParser, self).__init__()
self.source = source
self.destination = {
'sync': sync,
'event': events,
'metric': events,
'fragment': events,
'unknown': events,
}
self._finished = threading.Event()
self._interrupted = threading.Event()
def run(self):
for _ in range(self.source.qsize()):
try:
df = self.source.get_nowait()
except q.Empty:
break
else:
for group in df.apply(self.__parse_event, axis=1).groupby('type'):
if group[0] in self.destination:
self.destination[group[0]].put(group[1])
else:
logger.warning('Unknown event type! %s. Message: %s', group[0], group[1], exc_info=True)
if self._interrupted.is_set():
break
self._finished.set()
def __parse_event(self, row):
match = re_.match(row.message)
if match:
row["app"] = match.group('app')
row["nanotime"] = match.group('nanotime')
row["type"] = match.group('type')
row["tag"] = match.group('tag')
row["message"] = match.group('message')
return row
else:
row["type"] = 'unknown'
row["message"] = row.message
return row
def wait(self, timeout=None):
self._finished.wait(timeout=timeout)
def close(self):
self._interrupted.set()
# =====================================
def main():
import argparse
parser = argparse.ArgumentParser(description='')
parser.add_argument('--debug', dest='debug', action='store_true', default=False)
args = parser.parse_args()
logging.basicConfig(
level="DEBUG" if args.debug else "INFO",
format='%(asctime)s [%(levelname)s] [Volta EventsHandler] %(filename)s:%(lineno)d %(message)s')
logger.info("Volta EventsHandler init")
phone_q = q.Queue()
import datetime
import pandas as pd
# test data:
test_data = []
# message for EventParser - common message
test_data.append([datetime.datetime.now(), 'MessageEventParserCommon data'])
# message for EventParser - uncommon: app: [volta] {nt} event {tag} {message}
test_data.append([datetime.datetime.now(), 'lightning: [volta] 12345678 event TagEventUncommon MessageEventParserUncommon data'])
# message for MetricParser: app: [volta] {nt} metric {tag} {message}
test_data.append([datetime.datetime.now(), 'lightning: [volta] 12345678 metric TagMetric MessageMetricParser data'])
# messages for FragmentParser: app: [volta] {nt} fragment {tag} {start/stop}
test_data.append([datetime.datetime.now(), 'lightning: [volta] 12345678 fragment TagFragment start'])
test_data.append([datetime.datetime.now(), 'lightning: [volta] 12345678 fragment TagFragment stop'])
# message for SyncParser app: [volta] {nt} sync {tag} {rise/fall}
test_data.append([datetime.datetime.now(), 'lightning: [VOLTA] 12345678 sync TagSync rise'])
test_data.append([datetime.datetime.now(), 'lightning: [volta] 12345678 sync TagSync fall'])
# wrong type
test_data.append([datetime.datetime.now(), 'Brokenlightning: [VOLTA] 12345678 sync1 TagSyncBroken riseBroken'])
df = pd.DataFrame(test_data, columns=['ts', 'message'])
phone_q.put(df)
sync_q = q.Queue()
events_q = q.Queue()
events_worker = EventsParser(phone_q, events_q, sync_q)
events_worker.run()
for _ in range(events_q.qsize()):
try:
logger.info('Events: %s', events_q.get_nowait())
except q.Empty:
pass
for _ in range(sync_q.qsize()):
try:
logger.info('Sync: %s', sync_q.get_nowait())
except q.Empty:
pass
if __name__ == "__main__":
main()
|
Python
| 0.999921
|
@@ -76,16 +76,55 @@
ading%0A%0A%0A
+logger = logging.getLogger(__name__)%0A%0A%0A
# data s
@@ -449,17 +449,16 @@
ssage%3E.*
-?
)%0A $%0A
@@ -501,47 +501,8 @@
)%0A%0A%0A
-logger = logging.getLogger(__name__)%0A%0A%0A
clas
@@ -4145,17 +4145,22 @@
678 sync
-1
+broken
TagSync
|
d301cfc0e8f76c94f8f3bcd1b0263f9bd6e1604c
|
Add call_decorator to RedisManager.
|
vumi/persist/redis_manager.py
|
vumi/persist/redis_manager.py
|
# -*- test-case-name: vumi.persist.tests.test_redis_manager -*-
import redis
from vumi.persist.redis_base import Manager
from vumi.persist.fake_redis import FakeRedis
class RedisManager(Manager):
@classmethod
def _fake_manager(cls, key_prefix, client=None):
if client is None:
client = FakeRedis()
manager = cls(client, key_prefix)
# Because ._close() assumes a real connection.
manager._close = client.teardown
return manager
@classmethod
def _manager_from_config(cls, config, key_prefix):
"""Construct a manager from a dictionary of options.
:param dict config:
Dictionary of options for the manager.
:param str key_prefix:
Key prefix for namespacing.
"""
return cls(redis.Redis(**config), key_prefix)
def _close(self):
"""Close redis connection."""
pass
def _purge_all(self):
"""Delete *ALL* keys whose names start with this manager's key prefix.
Use only in tests.
"""
for key in self.keys():
self.delete(key)
def _make_redis_call(self, call, *args, **kw):
"""Make a redis API call using the underlying client library.
"""
return getattr(self._client, call)(*args, **kw)
def _filter_redis_results(self, func, results):
"""Filter results of a redis call.
"""
return func(results)
|
Python
| 0
|
@@ -166,38 +166,149 @@
dis%0A
-%0A%0Aclass RedisManager(Manager):
+from vumi.persist.riak_manager import flatten_generator%0A%0A%0Aclass RedisManager(Manager):%0A%0A call_decorator = staticmethod(flatten_generator)%0A
%0A
|
8320b81b7355c0158231b4a3c7ac40c49872f7b1
|
handle partial pbuttons in ss tab
|
scripts/ss_tab.py
|
scripts/ss_tab.py
|
# pandas and numpy for data manipulation
import pandas as pd
import numpy as np
import sqlite3
import holoviews as hv
hv.extension('bokeh')
from bokeh.plotting import Figure
from bokeh.models import (CategoricalColorMapper, HoverTool,
ColumnDataSource, Panel,
FuncTickFormatter, SingleIntervalTicker, LinearAxis)
from bokeh.models.widgets import (CheckboxGroup, Slider, RangeSlider,
Tabs, CheckboxButtonGroup,
TableColumn, DataTable, Select,PreText)
from bokeh.layouts import column, row, WidgetBox
import matplotlib.pyplot as plt
import matplotlib.colors as colors
from .generic_tab import generic_tab
def ss_tab(db):
ss_1_tab = generic_tab(db,"ss1")
ss_2_tab = generic_tab(db,"ss2")
ss_3_tab = generic_tab(db,"ss3")
ss_4_tab = generic_tab(db,"ss4")
tabs = Tabs(tabs = [ss_1_tab,ss_2_tab,ss_3_tab,ss_4_tab])
tab = Panel(child=tabs, title = "%SS")
#tab = Panel(child=layout, title = mode)
return tab
|
Python
| 0
|
@@ -796,22 +796,8 @@
)%0A%09t
-abs = Tabs(tab
s =
@@ -835,16 +835,70 @@
4_tab%5D)%0A
+ tabs = Tabs(tabs = list(filter(None.__ne__, ts)))%0A
%09tab = P
|
aa71eb4e8e1e61f449c4e7dce60f74e9a7d80ae0
|
Fix url and timeout
|
src/kuas_api/kuas/ap.py
|
src/kuas_api/kuas/ap.py
|
#-*- encoding=utf-8 -*-
"""This module `ap` provide manipulate of kuas AP system.
"""
__version__ = 2.0
import requests
from lxml import etree
# AP URL Setting
#: AP sytem base url
AP_BASE_URL = "http://140.127.113.224"
#: AP system login url
AP_LOGIN_URL = AP_BASE_URL + "/kuas/perchk.jsp"
#: AP system general query url, with two args,
# first: prefix of qid, second: qid
AP_QUERY_URL = AP_BASE_URL + "/kuas/%s_pro/%s.jsp?"
#: AP guest account
AP_GUEST_ACCOUNT = "guest"
#: AP guest password
AP_GUEST_PASSWORD = "123"
# Timeout Setting
#: Login timeout
LOGIN_TIMEOUT = 1.0
#: Query timeout
QUERY_TIMEOUT = 5.0
def status():
"""Return AP server status code
:rtype: int
:returns: A HTTP status code
>>> status()
200
"""
try:
ap_status_code = requests.head(
AP_BASE_URL,
timeout=LOGIN_TIMEOUT).status_code
except requests.exceptions.Timeout:
ap_status_code = 408
return ap_status_code
def login(session, username, password, timeout=LOGIN_TIMEOUT):
"""Login to KUAS AP system.
:param session: requests session object
:type session: class requests.sessions.Session
:param username: username of kuas ap system, actually your kuas student id
:type username: str or int
:param password: password of kuas ap system.
:type password: str or int
:param timeout: login timeout
:type timeout: int
:return: login status
:rtype: bool
Login with correct username and password
>>> s = requests.Session()
>>> login(s, "guest", "123")
True
Login with bad username or password
>>> login(s, "guest", "777")
False
"""
payload = {"uid": username, "pwd": password}
# If timeout, return false
try:
r = session.post(AP_LOGIN_URL, data=payload, timeout=timeout)
except requests.exceptions.Timeout:
return False
root = etree.HTML(r.text)
try:
is_login = not root.xpath("//script")[-1].text.startswith("alert")
except:
is_login = False
return is_login
def get_semester_list():
"""Get semester list from ap system.
:rtype: dict
>>> get_semester_list()[-1]['value']
'92,2'
"""
s = requests.Session()
login(s, AP_GUEST_ACCOUNT, AP_GUEST_PASSWORD)
content = query(s, "ag304_01")
root = etree.HTML(content)
options = root.xpath("id('yms_yms')/option")
options = map(lambda x: {"value": x.values()[0].replace("#", ","),
"selected": 1 if "selected" in x.values() else 0,
"text": x.text},
root.xpath("id('yms_yms')/option")
)
options = list(options)
return options
def query(session, qid, args={}):
"""Query AP system page by qid and args
:param session: requests session object, the session must login first.
:type session: class requests.sessions.Session
:param qid: query id of ap system page
:type qid: str
:param args: arguments of query post
:type args: dict
:return" content of query page
:rtype: str
You must login first when using query
Otherwise ap system won't let you use it.
>>> s = requests.Session()
>>> content = query(s, "ag222", {"arg01": "103", "arg02": "2"})
>>> "Please Logon" in content
True
Login to guest
>>> login(s, "guest", "123")
True
Query course data (ag202)
>>> args = {"yms_yms": "103#2", "dgr_id": "14", "unt_id": "UC02", \
"clyear": "", "sub_name": "", "teacher": "", "week": 2, \
"period": 4, "reading": "reading"}
>>> content = query(s, "ag202", args)
>>> "內部控制暨稽核制度" in content
True
"""
data = {"arg01": "", "arg02": "", "arg03": "",
"fncid": "", "uid": ""}
data['fncid'] = qid
for key in args:
data[key] = args[key]
try:
resp = session.post(AP_QUERY_URL % (qid[:2], qid),
data=data,
timeout=QUERY_TIMEOUT
)
resp.encoding = "utf-8"
content = resp.text
except requests.exceptions.ReadTimeout:
content = ""
return content
if __name__ == "__main__":
import doctest
doctest.testmod()
|
Python
| 0.000346
|
@@ -216,10 +216,10 @@
13.2
-24
+31
%22%0A%0A#
@@ -573,17 +573,17 @@
MEOUT =
-1
+5
.0%0A%0A#: Q
|
64526ca4dc81ceaae2fe139502a0d98d0f3558a7
|
write to STDOUT, update name of fn, doc
|
scripts/fisher_compare.py
|
scripts/fisher_compare.py
|
from fisher import pvalue
import sys
import os.path as op
sys.path.insert(0, op.join(op.dirname(__file__), "../code"))
from methyl import MethylGroup
import numpy as np
from flatfeature import Flat
def bin_setup(chr, adir, bdir, context):
am = MethylGroup(adir)[chr]
bm = MethylGroup(bdir)[chr]
return am.as_context(context), bm.as_context(context)
def run_50bp_gff(flat, adir, bdir, context, window, binary, pvalue_cutoff, ratio_range):
fh = open('fisher.different.%s.%ibp.gff' % (context, window), 'w')
print >>sys.stderr, "writing to:", fh.name
print >>fh, "##gff-version 3"
for chr in flat.seqids:
bp_max = len(flat.fasta[chr])
(a_cs, a_ts, a_mask), (b_cs, b_ts, b_mask) = bin_setup(chr, adir, bdir, context)
for start in xrange(0, bp_max + window, window):
end = min(start + window, bp_max)
if start == end: continue
a_t_count = a_ts[start:end].sum()
a_c_count = a_cs[start:end].sum()
b_t_count = b_ts[start:end].sum()
b_c_count = b_cs[start:end].sum()
p = pvalue(a_t_count, a_c_count, b_t_count, b_c_count)
pv = float(p.two_tail)
if not binary and pv > pvalue_cutoff: continue
gc = f.fasta[chr][start:end].upper()
gc = gc.count("G") + gc.count("C")
# if a_tot or b_tot == 0, then use 'na'
a_tot = float(a_c_count + a_t_count)
a_methyl = (a_c_count / a_tot) if a_tot != 0 else None
b_tot = float(b_c_count + b_t_count)
b_methyl = (b_c_count / b_tot) if b_tot !=0 else None
#strand = "+" if a_methyl > b_methyl else "-"
strand = "."
# TODO: use absolute?
plot = a_methyl - b_methyl if not None in (a_methyl, b_methyl) else 'na'
#print plot, a_methyl, b_methyl
#if plot == 'na': continue
if binary:
if plot != 'na':
plot == 1 if (ratio_range[0] <= plot <= ratio_range[1]) else 0
else:
if not (ratio_range[0] <= plot <= ratio_range[1]): continue
if binary and plot != 'na': plot = 0 if pv > pvalue_cutoff else 1
attrs="p=%.3G;ac=%i;at=%i;bc=%i;bt=%i;gc=%i" % \
(pv, a_c_count, a_t_count, b_c_count, b_t_count, gc)
"""
accns = flat.get_features_in_region(chr, start + 1, end)
accns = [a["accn"] for a in accns]
if accns:
attrs +=";accns=" + ",".join(accns)
"""
print >>fh, "\t".join(map(str, [chr, "methylation", "dmc", start + 1, end, plot, strand, ".", attrs]))
if __name__ == "__main__":
import optparse, sys
p = optparse.OptionParser(__doc__)
p.add_option("--fasta", dest="fasta", help="path to fasta",
default="/labdata/thaliana_v9/thaliana_v9.fasta")
p.add_option("--flat", dest="flat", help="path to flat file",
default="/labdata/thaliana_v9/thaliana_v9.flat")
p.add_option("--window", dest="window", help="window size",
type='int', default=50)
p.add_option("--context", dest="context", help="methylation context "
"one of CG, CHG, or CHH")
p.add_option("-b", dest="binary", action="store_true", default=False,
help="use either 1 or 0 for ratio")
p.add_option("-p", dest="pvalue", type="float", default=1.0,
help="p value cutoff")
p.add_option("--ratio_range", dest="ratio_range", help="optional: range of ratios"
" to include. specify in format low:high. e.g.: 0.2:0.8")
opts, args = p.parse_args()
f = Flat(opts.flat, opts.fasta)
if len(args) != 2 or not (opts.context in ('CG', 'CHG', 'CHH')):
sys.exit(not p.print_help())
dir_a, dir_b = args
assert op.exists(dir_a)
assert op.exists(dir_b)
rr = (-2, 2)
if opts.ratio_range:
rr = map(float, opts.ratio_range.split(":"))
run_50bp_gff(f, dir_a, dir_b, opts.context, opts.window, opts.binary,
opts.pvalue, rr)
|
Python
| 0.000001
|
@@ -1,12 +1,139 @@
+%22%22%22%0Acompare 2 sets of data that have been run through the methylcode pipeline.%0Ausage:%0A%0A %25 prog %5Boptions%5D dir_a/ dir_b/%0A%0A%22%22%22%0A
from fisher
@@ -274,27 +274,8 @@
oup%0A
-import numpy as np%0A
from
@@ -472,24 +472,23 @@
def run_
-50bp_gff
+compare
(flat, a
@@ -556,16 +556,17 @@
e):%0A
+#
fh = ope
@@ -624,16 +624,36 @@
), 'w')%0A
+ fh = sys.stdout%0A
prin
@@ -4156,16 +4156,15 @@
run_
-50bp_gff
+compare
(f,
|
53dd23af60f18c165c3359c3ab5da03d40ebb62e
|
version 2.1
|
upload.py
|
upload.py
|
#!/usr/bin/env python
# Upload CAN messages and Tags detected to Industrial PC via WiFi
# This script will be called every 5 second by the main script
import httplib
import datetime
import time
import os
logFilePath = None
def write_log(text):
logfile = open(logFilePath, 'a', 1)
now = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
logfile.write('[' + now + '] ' + text + '\n')
logfile.close()
def get_timestamp():
return str(int(round(time.time() * 1000)))
def upload(datafolder, logFolder_):
# URL or IP and Port of Industrial PC
url = "127.0.0.1:80"
servlet = "/BananaProServer/index.php"
logFilePath = logFolder + 'slope-upload.log'
# Timestamp here to have the same timestamp for all the loops
timestamp = get_timestamp()
try:
# Within this code we are trying to check if the industrial pc is reachable
conn = httplib.HTTPConnection(url)
conn.request("GET", servlet)
if conn.getresponse().status == 200:
write_log('Connection with the industrial pc established, send data')
headers = {"Content-type": "text/plain"}
# Send all the files not delivered before
listFiles = os.listdir(datafolder)
for file in listFiles:
msgFilePath = datafolder + file
# Send files all together
if os.path.isfile(msgFilePath):
filename, extension = os.path.splitext(msgFilePath)
if(extension != '.done'):
msgFile = open(msgFilePath, 'r')
msgString = msgFile.read()
conn = httplib.HTTPConnection(url)
conn.request("POST", servlet, msgString, headers)
response = conn.getresponse()
status = response.status
text = response.read().strip();
msgFile.close()
if status != 200:
write_log('Error: ' + status + ' ' + response.reason)
if status == 200:
# Then, we will rename _rfid-tags.txt in order to avoid to send same data in the future
write_log('Done: ' + msgFilePath)
os.rename(msgFilePath, msgFilePath + "." + timestamp + ".done")
except IOError:
write_log('Error: Industrial PC unreachable')
finally:
conn.close()
|
Python
| 0.000001
|
@@ -505,17 +505,16 @@
ogFolder
-_
):%0A%09# UR
|
64967f3d262a1823a5d0cd22fb096f9529e6aa98
|
resolve flake8 linting errors
|
watson/validators/__init__.py
|
watson/validators/__init__.py
|
# -*- coding: utf-8 -*-
__version__ = '1.0.6'
try:
# Fix for setup.py version import
from watson.validators.numeric import Range
from watson.validators.string import Length, Required, RegEx, Csrf
__all__ = ['Range', 'Length', 'Required', 'RegEx', 'Csrf']
except: # pragma: no cover
pass # pragma: no cover
|
Python
| 0.000001
|
@@ -276,16 +276,22 @@
cept: #
+ noqa,
pragma:
|
5aa594d02bfcdfe5eefe90fad1e464070b6ae9b5
|
Update common.py
|
bin/common.py
|
bin/common.py
|
#!/usr/bin/python
import os
import multiprocessing as mp
import subprocessing as sub
import shlex
def fixDirName(dirpath):
if dirpath[-1] != '/':
dirpath += '/'
return dirpath
def makeDir(dirpath):
if not os.path.exists(dirpath):
os.mkdir(dirpath)
return 0
def importSampleList(infile):
if os.path.exists(infile):
files = []
with open(infile, 'r') as IN:
for x in IN:
files.append(x.rstrip())
else:
errorText = '\nERROR: the specified sample name file does not exist, please fix\n\t' + infile + '\n'
print(errorText)
raise SystemExit
if len(files) == 0:
errorText = '\nERROR: The sample name file does not contain any sample names, please fix\n'
print(errorText)
raise SystemExit
return files
###daemon to run multiprocessing and parallelize tasks###
def daemon(target, argList, name, cpuPerProcess=1):
print( str( '\t' + str(len(argList)) + ' processes to run to ' + name ) )
numCPU = mp.cpu_count()
numWorkers = min( [int(numCPU / cpuPerProcess), len(argList)] )
pool = mp.Pool(numWorkers)
processes = [pool.apply_async(target, args=x) for x in argList]
pool.close()
for i,j in enumerate(processes):
j.wait()
if not j.successful():
pool.terminate()
print '\n\n\nprocessing failed, getting traceback now...'
p = mp.Process(target=target, args=argList[i])
p.start()
p.join()
# else:
# print( str( '\t\t' + str(i+1) + ' of ' + str(len(argList)) + ' processes complete' ) )
print( str( '\tAll processing to ' + name + ' complete\n' ) )
def zipping(filepath, gunzip=True):
if filepath.split('.')[-1] != 'gz' and gunzip:
return filepath
elif filepath.split('.')[-1] == 'gz' and not gunzip:
return filepath
if gunzip:
cmd = 'gunzip ' + filepath
fixname = filepath[:-3]
else:
cmd = 'gzip ' + filepath
fixname = filepath + '.gz'
cmd = shlex.split(cmd)
p = sub.popen(cmd)
p.wait()
return fixname
|
Python
| 0.000001
|
@@ -1905,17 +1905,17 @@
p = sub.
-p
+P
open(cmd
|
5e5de38dcdfd3e6884b70a917b14ae6329e2e458
|
Update vbbReq.py
|
vbbReq.py
|
vbbReq.py
|
#! /usr/bin/python
# _*_ coding: latin-1 _*_
#TODO brauch ich das???
import requests
import xml.dom.minidom as dom
import codecs
import os
# It's possible that a train won't stop at the station, therefor vbb uses the
# 'getIn' Attribute at the <Dep> Element. This attribute is ignored in this
# version. So the train might not stop, even thou this script says so... You
# can add this, if you want ;)
def test ():
bus = '0001000000000000'
# If the example doesn't work, you might need to update the date-parameter in the
# next line.
postDict = newPostDict('9068205', '9017101', '20140516', '12:00', bus, '0')
postXML = newPostXML(postDict)
requestXML = requestDataFromVBB(postXML)
connectionsList = getConnectionsList(requestXML, False)
printConnectionsList(connectionsList)
#requestXML.close()
def request(departure, destination, date, time, vehicle, direct, enableFoot):
postDict = newPostDict(departure, destination, date, time, vehicle, direct)
postXML = newPostXML(postDict)
requestXML = requestDataFromVBB(postXML)
connectionsList = getConnectionsList(requestXML, enableFoot)
#requestXML.close()
return connectionsList
def getConnectionsList(requestXML, enableFoot):
tree = dom.parseString(requestXML)
#tree = dom.parse('output.xml')
connectionsList = []
for connection in tree.getElementsByTagName('Connection'):
if not (connection.parentNode.getAttribute('type') == 'IV' and enableFoot == False):
auxiliary = 1
conList = []
for conSection in connection.getElementsByTagName('ConSection'):
conSectDict = {'depStation':'','depTime':'','vehicle':'','direction':'','arrStation':'','arrTime':''}
for departure in conSection.getElementsByTagName('Departure'):
for depStation in departure.getElementsByTagName('Station'):
conSectDict['depStation'] = depStation.getAttribute('name').encode('utf-8')
for depTime in departure.getElementsByTagName('Time'):
departureTime = depTime.firstChild.data.strip().encode('utf-8')
departureTime = departureTime[3:]
conSectDict['depTime'] = departureTime.encode('utf-8')
for arrival in conSection.getElementsByTagName('Arrival'):
for arrStation in arrival.getElementsByTagName('Station'):
conSectDict['arrStation'] = arrStation.getAttribute('name').encode('utf-8')
for arrTime in conSection.getElementsByTagName('Time'):
conSectDict['arrTime'] = arrTime.firstChild.data.strip().encode('utf-8')
for journey in conSection.getElementsByTagName('Journey'):
for attribute in journey.getElementsByTagName('Attribute'):
if attribute.getAttribute('type') == 'NAME':
for text in attribute.getElementsByTagName('Text'):
conSectDict['vehicle']= text.firstChild.data.strip().encode('utf-8')
elif attribute.getAttribute('type') == 'DIRECTION':
for text in attribute.getElementsByTagName('Text'):
conSectDict['direction'] = text.firstChild.data.strip().encode('utf-8')
if auxiliary == 1:
conList.append(departureTime)
auxiliary = 0
conList.append(conSectDict)
connectionsList.append(conList)
return connectionsList
# Use def NewPostDict(...) as parameter
def newPostXML(postDict):
postXML = os.path.join(os.path.dirname(__file__), 'conReq.xml')
tree = dom.parse(postXML)
for conReq in tree.firstChild.childNodes:
if conReq.nodeName == 'ConReq':
for node1 in conReq.childNodes:
if node1.nodeName == 'Start':
for node2 in node1.childNodes:
if node2.nodeName == 'Station':
node2.setAttribute('externalId', postDict['startStation'])
elif node2.nodeName == 'Prod':
node2.setAttribute('prod', postDict['vehicle'])
node2.setAttribute('direct', postDict['direct'])
elif node1.nodeName == 'Dest':
for node2 in node1.childNodes:
if node2.nodeName == 'Station':
node2.setAttribute('externalId', postDict['destStation'])
elif node1.nodeName == 'ReqT':
node1.setAttribute('date', postDict['date'])
node1.setAttribute('time', postDict['time'])
return tree.toxml()
# Give the dictionary only strings.
# startStation: Look in ./open_vbb_data/stops.txt...
# date: For Example '20140516' for 16.05.2014
# time: for Example '12:45'
# vehicle: For Example '0001000000000000'. This means "bus".
# Set the other '0' to '1', if you wan't to enable
# other vehicles
# direct: If you wan't a direct connection, set this to '1'.
# Otherwise set this to '0
def newPostDict(startStation, destStation, date, time, vehicle, direct):
return {'startStation': startStation, 'destStation': destStation, 'date': date, 'time': time, 'vehicle': vehicle, 'direct': direct}
# Use def newPostXML(...) as parameter
def requestDataFromVBB(postXML):
URL = "http://demo.hafas.de/bin/pub/vbb-fahrinfo/relaunch2011/extxml.exe/"
request = requests.post(URL, data=postXML).text
return request.encode('latin-1')
#requestXML = codecs.open('output.xml', 'w+', 'utf-8-sig')
#requestXML.write(request)
#return requestXML
#return codecs.decode(request, 'utf-8-sig')
#return request.encode('utf-8-sig')
def printConnectionDict(conDict):
print conDict
string = ''
string += conDict['depStation']+' '
string += conDict['depTime']+' '
string += conDict['arrStation']+' '
string += conDict['arrTime']+' '
string += conDict['vehicle']+' '
string += conDict['direction']
print string
def printConnectionsList(connectionsList):
print connectionsList
line = ''
number = 1
for connection in connectionsList:
for conDict in connection:
if type(conDict) == 'dict':
printConnectionDict(conDict)
print line
# Uncomment to test this
test()
|
Python
| 0
|
@@ -6536,15 +6536,16 @@
st this%0A
+#
test()%0A
|
eda6ccfdce2166c6ba5442b95a80d6723533c59b
|
Add comma
|
app/main/forms/direct_award_forms.py
|
app/main/forms/direct_award_forms.py
|
from flask_wtf import FlaskForm
from wtforms.validators import DataRequired, Length, NumberRange, InputRequired, ValidationError
from dmutils.forms.fields import (
DMBooleanField,
DMDateField,
DMPoundsField,
DMStripWhitespaceStringField,
DMRadioField,
)
from dmutils.forms.validators import GreaterThan
from decimal import Decimal
class CreateProjectForm(FlaskForm):
save_search_selection = DMRadioField(
validators=[
InputRequired("Select a save location")
]
)
name = DMStripWhitespaceStringField(
"Name your search. A reference number or short description of what you want to buy makes a good name.",
)
def __init__(self, projects, **kwargs):
super().__init__(**kwargs)
self.save_search_selection.options = [{
"label": project["name"] or f"Untitled project {project['id']}",
"value": str(project["id"]),
} for project in projects]
self.save_search_selection.options.append({
"label": "Save a new search",
"value": "new_search",
"reveal": {
"question": self.name.label.text,
"hint": "100 characters maximum",
"name": self.name.name,
}
})
def validate_name(form, field):
if form.save_search_selection.data == "new_search":
try:
Length(min=1, max=100, message="Name must be between 1 and 100 characters")(form, field)
except ValidationError as e:
form.save_search_selection.options[-1]["reveal"]["error"] = e.args[0]
raise
class DidYouAwardAContractForm(FlaskForm):
YES = 'yes'
NO = 'no'
STILL_ASSESSING = 'still-assessing'
did_you_award_a_contract = DMRadioField(
"Did you award a contract?",
validators=[InputRequired(message="Select yes if you awarded a contract")],
options=[
{'value': YES, 'label': 'Yes'},
{'value': NO, 'label': 'No'},
{'value': STILL_ASSESSING, 'label': 'We are still assessing services'},
])
class WhichServiceWonTheContractForm(FlaskForm):
which_service_won_the_contract = DMRadioField(
"Which service won the contract?",
validators=[InputRequired(message="Select the service that won the contract")],
)
def __init__(self, services, *args, **kwargs):
super(WhichServiceWonTheContractForm, self).__init__(*args, **kwargs)
self.which_service_won_the_contract.options = [{
"label": service["data"]["serviceName"],
"value": service["id"],
"hint": service["supplier"]["name"],
} for service in services['services']]
class TellUsAboutContractForm(FlaskForm):
INVALID_VALUE_MESSAGE = "Enter the value in pounds and pence using numbers and decimals only"
start_date = DMDateField(
"Start date",
validators=[
InputRequired("Enter the start date"),
DataRequired("Enter a real start date"),
],
)
end_date = DMDateField(
"End date",
validators=[
InputRequired("Enter the end date"),
DataRequired("Enter a real end date"),
GreaterThan("start_date", "Your end date must be later than the start date."),
],
)
value_in_pounds = DMPoundsField(
"Value",
validators=[
InputRequired("Enter the contract value"),
DataRequired(INVALID_VALUE_MESSAGE),
NumberRange(min=Decimal('0.01'), message=INVALID_VALUE_MESSAGE),
],
)
buying_organisation = DMStripWhitespaceStringField(
"Organisation buying the service",
hint="For example, National Audit Office or Lewisham Council",
validators=[
InputRequired("Enter an organisation")
],
)
class WhyDidYouNotAwardForm(FlaskForm):
why_did_you_not_award_the_contract = DMRadioField(
"Why didn't you award a contract?",
options=[
{
"label": "The work has been cancelled",
"value": "work_cancelled",
"hint": "For example, because you no longer have the budget",
},
{
"label": "There were no suitable services",
"value": "no_suitable_services",
"hint": "The services in your search results did not meet your requirements",
},
],
validators=[InputRequired(message="Please select a reason why you didn't award a contract")]
)
class BeforeYouDownloadForm(FlaskForm):
user_understands = DMBooleanField(
"I understand that I cannot edit my search again after I export my results",
validators=[
InputRequired(message="Confirm that you have finished editing your search")
],
)
|
Python
| 0.999968
|
@@ -2844,16 +2844,17 @@
nd pence
+,
using n
|
ec42cc0941687c43926e31701578eeee9144d6d7
|
Move CFP to main menu
|
gopher/protocol.py
|
gopher/protocol.py
|
import re
from blog.models import Post
from config.utils import get_active_event
from django.contrib.sites.models import Site
from django.template.loader import render_to_string
from django.utils.html import strip_tags
from django.utils.text import wrap
from gopher.ascii_art import header
from pages.models import Page
from talks.models import Talk
from twisted.internet import protocol
from workshops.models import Workshop
site = Site.objects.get_current()
domain = site.domain.split(":")[0]
event = get_active_event()
class Gopher(protocol.Protocol):
@property
def port(self):
return self.transport.server.port
def file_item(self, title, id):
line = f"0{title}\t{id}\t{domain}\t{self.port}\r\n"
self.transport.write(line.encode())
def menu_item(self, title, id):
line = f"1{title}\t{id}\t{domain}\t{self.port}\r\n"
self.transport.write(line.encode())
def write_line(self, line):
self.transport.write(line.encode())
self.transport.write(b"\r\n")
def write_text(self, text):
wrapped_text = wrap(strip_tags(text), 80)
for line in wrapped_text.splitlines():
self.write_line(line)
def get_cfp(self):
cfp = event.get_cfp()
if cfp and (cfp.is_active() or cfp.is_pending()):
return cfp
def list_pages(self):
pages = []
for page in Page.objects.filter(published=True):
pages.append((page.title, f"page:{page.pk}"))
cfp = self.get_cfp()
if cfp:
pages.append((cfp.title, "cfp"))
return sorted(pages)
def pages_menu(self):
for title, id in self.list_pages():
self.file_item(title, id)
def talks_menu(self):
for talk in event.talks.prefetch_related('applicants__user'):
title = f"{talk.title} - {talk.speaker_names}"
self.file_item(title, f"talk:{talk.pk}")
def workshops_menu(self):
workshops = (event.workshops
.prefetch_related('applicants__user')
.filter(published=True)
.order_by('title'))
for workshop in workshops:
speakers = ", ".join(workshop.applicant_names())
title = f"{workshop.title} - {speakers}"
self.file_item(title, f"workshop:{workshop.pk}")
def news_menu(self):
for post in event.posts.all():
title = f"[{post.created_at.date()}] {post.title}"
self.file_item(title, f"post:{post.pk}")
def write_markdown_template(self, template, context):
markdown = render_to_string(template, context)
for line in markdown.splitlines():
self.write_line(line)
def page(self, page_id):
self.write_markdown_template('pages/page.md', {
"page": Page.objects.get(pk=page_id)
})
def talk(self, talk_id):
self.write_markdown_template('talks/talk.md', {
"talk": Talk.objects.get(pk=talk_id)
})
def post(self, post_id):
self.write_markdown_template('blog/post.md', {
"post": Post.objects.get(pk=post_id)
})
def workshop(self, workshop_id):
self.write_markdown_template('workshops/workshop.md', {
"workshop": Workshop.objects.get(pk=workshop_id)
})
def cfp(self):
cfp = self.get_cfp()
self.write_markdown_template('cfp/cfp_announcement.md', {"cfp": cfp})
def main_menu(self):
has_workshops = event.workshops.filter(published=True).exists()
has_talks = event.talks.exists()
has_news = event.posts.exists()
for line in header.splitlines():
self.write_line(line)
self.menu_item("Pages", "pages")
if has_news:
self.menu_item("News", "news")
if has_talks:
self.menu_item("Talks", "talks")
if has_workshops:
self.menu_item("Workshops", "workshops")
def connectionMade(self):
client_ip, client_port = self.transport.client
print(f"Connection made from {client_ip}:{client_port}")
def connectionLost(self, reason):
client_ip, client_port = self.transport.client
print(f"Connection lost from {client_ip}:{client_port}")
def dataReceived(self, data):
print("Received:", data)
data = data.decode()
if data == '\r\n':
self.main_menu()
elif data == 'pages\r\n':
self.pages_menu()
elif data == 'talks\r\n':
self.talks_menu()
elif data == 'news\r\n':
self.news_menu()
elif data == 'workshops\r\n':
self.workshops_menu()
elif re.match(r"^page:(\d+)\r\n$", data):
self.page(int(data.split(":")[1]))
elif re.match(r"^talk:(\d+)\r\n$", data):
self.talk(int(data.split(":")[1]))
elif re.match(r"^post:(\d+)\r\n$", data):
self.post(int(data.split(":")[1]))
elif re.match(r"^workshop:(\d+)\r\n$", data):
self.workshop(int(data.split(":")[1]))
elif data == 'cfp\r\n':
self.cfp()
else:
print("???")
self.transport.loseConnection()
class GopherFactory(protocol.Factory):
def buildProtocol(self, addr):
return Gopher()
|
Python
| 0
|
@@ -1337,44 +1337,25 @@
def
-list_pages(self):%0A pages = %5B%5D
+pages_menu(self):
%0A
@@ -1410,68 +1410,27 @@
rue)
-:%0A pages.append((page.title, f%22page:%7Bpage.pk%7D%22))%0A
+.order_by('title'):
%0A
@@ -1438,227 +1438,56 @@
-cfp =
+
self.
-get_cfp()%0A if cfp:%0A pages.append((cfp.title, %22cfp%22))%0A%0A return sorted(pages)%0A%0A def pages_menu(self):%0A for title, id in self.list_pages():%0A self.file_item(title, id
+file_item(page.title, f%22page:%7Bpage.pk%7D%22
)%0A%0A
@@ -3378,16 +3378,45 @@
exists()
+%0A cfp = self.get_cfp()
%0A%0A
@@ -3481,24 +3481,94 @@
line(line)%0A%0A
+ if cfp:%0A self.menu_item(%22Call for papers%22, %22cfp%22)%0A%0A
self
|
534d66ff92e6fd00ea92a08c76f39614b9977967
|
make it compatible to python3
|
uuhash.py
|
uuhash.py
|
#!/usr/bin/env python
import os
import hashlib
import binascii
import struct
__all__ = ["UUHash"]
# https://en.wikipedia.org/wiki/UUHash
# MLDonkey source code, file src/utils/lib/fst_hash.c, retrieved 2014-08-20
# http://sourceforge.net/projects/mldonkey/files/mldonkey/3.1.5/mldonkey-3.1.5.tar.bz2
# http://www.opensource.apple.com/source/xnu/xnu-1456.1.26/bsd/libkern/crc32.c
def UUHash(fobj):
chunksize = 307200
fobj.seek(0, os.SEEK_END)
filesize = fobj.tell()
fobj.seek(0)
chunk = fobj.read(chunksize)
md5hash = hashlib.md5(chunk).digest()
smallhash = 0
if filesize > chunksize:
lastpos = fobj.tell()
offset = 0x100000
while offset + 2*chunksize < filesize: # yes, LESS than, not equal
fobj.seek(offset)
chunk = fobj.read(chunksize)
smallhash = binascii.crc32(chunk, smallhash)
lastpos = offset + chunksize
offset <<= 1
endlen = filesize - lastpos
if endlen > chunksize:
endlen = chunksize
fobj.seek(filesize-endlen)
chunk = fobj.read(endlen)
smallhash = binascii.crc32(chunk, smallhash)
smallhash = ((~smallhash) ^ filesize) % 2**32
return md5hash + struct.pack("<I", smallhash)
if __name__ == '__main__':
import sys
import glob
import base64
import time
files = []
for globbable in sys.argv[1:]:
files += glob.glob(globbable) or [globbable]
for fname in files:
if not os.path.isfile(fname): continue
t0 = time.time()
hash = UUHash(file(fname, 'rb'))
t1 = time.time()
encoded = base64.b64encode(hash)
print "%-28s" % encoded, hash.encode('hex').upper(), fname
|
Python
| 0.000283
|
@@ -14,16 +14,17 @@
v python
+3
%0A%0Aimport
@@ -1427,20 +1427,20 @@
UUHash(
-file
+open
(fname,
@@ -1511,52 +1511,53 @@
rint
- %22%25-28s%22 %25 encoded, hash.encode('hex').upper
+(%22%7B%7D %7B%7D %7B%7D%22.format(encoded.decode(), hash.hex
(),
@@ -1561,9 +1561,11 @@
), fname
+))
%0A
|
088d729d0f8713227a7dd68e6e8a317b0e6e80fe
|
Version bump to 0.1.3
|
graphs/_version.py
|
graphs/_version.py
|
__version__ = '0.1.2'
|
Python
| 0
|
@@ -16,7 +16,7 @@
0.1.
-2
+3
'%0A
|
ae179cf964939a97184402039574d7ee9b2e62da
|
Add BlogPost update test
|
blog/tests.py
|
blog/tests.py
|
from django.test import TestCase
from .models import BlogPost
from django.contrib.auth.models import User
class BlogTest(TestCase):
def setUp(self):
self.user = User.objects.create_user(username = "user001", email = "email@domain.com", password = "password123456")
def test_post_creation(self):
blogpost = BlogPost()
blogpost.user = self.user
blogpost.title = "Title Test"
blogpost.text = "Lorem ipsum tarapia tapioco..."
blogpost.save()
self.assertTrue(blogpost.id > 0, "BlogPost created correctly")
|
Python
| 0
|
@@ -560,8 +560,560 @@
ectly%22)%0A
+%0A def test_post_update(self):%0A blogpost = BlogPost()%0A blogpost.user = self.user%0A blogpost.title = %22Title Test%22%0A blogpost.text = %22Lorem ipsum tarapia tapioco...%22%0A blogpost.save()%0A self.assertTrue(blogpost.id %3E 0, %22BlogPost created correctly%22)%0A blogpost.title = %22Title Test - modified%22%0A blogpost.save()%0A blogpost_id = blogpost.id%0A blogpost_saved = BlogPost.objects.get(id = blogpost_id)%0A self.assertEquals(blogpost_saved.title, blogpost.title, %22BlogPost updated correctly%22)%0A
|
c2600e3b831c5c55c7e2d23a98b73a4c07ea694c
|
add cleaning comment
|
custom/enikshay/nikshay_datamigration/models.py
|
custom/enikshay/nikshay_datamigration/models.py
|
from django.db import models
class PatientDetail(models.Model):
PregId = models.CharField(max_length=255, primary_key=True) # need to remove trailing whitespace in Excel
Stocode = models.CharField(max_length=255, null=True)
Dtocode = models.CharField(max_length=255, null=True)
Tbunitcode = models.IntegerField(null=True)
pname = models.CharField(max_length=255, null=True)
pgender = models.CharField(max_length=255)
page = models.IntegerField(null=True)
poccupation = models.IntegerField(null=True)
paadharno = models.CharField(max_length=255, null=True) # big ints (scientific notation) and nulls. requires some formatting
paddress = models.CharField(max_length=255, null=True)
pmob = models.CharField(max_length=255, null=True) # contains " ", big ints
plandline = models.BigIntegerField(null=True)
ptbyr = models.CharField(max_length=255, null=True) # dates, but not clean
pregdate1 = models.DateField() # remove time in Excel (format as DD-MM-YYYY)
cname = models.CharField(max_length=255, null=True)
caddress = models.CharField(max_length=255, null=True)
cmob = models.CharField(max_length=255, null=True) # contains " ", big ints
clandline = models.BigIntegerField(null=True)
cvisitedby = models.CharField(max_length=255, null=True)
cvisitedDate1 = models.CharField(max_length=255, null=True) # datetimes, look like they're all midnight
dcpulmunory = models.CharField(
max_length=255, choices=(
('y', 'y'),
('N', 'N'),
)
) # y or N
dcexpulmunory = models.CharField(max_length=255, null=True)
dcpulmunorydet = models.CharField(max_length=255, null=True)
dotname = models.CharField(max_length=255, null=True)
dotdesignation = models.CharField(max_length=255, null=True)
dotmob = models.CharField(max_length=255, null=True)
dotlandline = models.CharField(max_length=255, null=True)
dotpType = models.IntegerField()
dotcenter = models.CharField(max_length=255, null=True)
PHI = models.IntegerField()
dotmoname = models.CharField(max_length=255, null=True)
dotmosignDate = models.CharField(max_length=255, null=True) # datetimes, look like they're all midnight. also have a bunch of 1/1/1990
atbtreatment = models.CharField(max_length=255, choices=(
('Y', 'Y'),
('N', 'N'),
)) # Y or N
atbduration = models.CharField(max_length=255, null=True) # some int, some # months poorly formatted
atbsource = models.CharField(max_length=255, null=True, choices=(
('G', 'G'),
('O', 'O'),
('P', 'P'),
))
atbregimen = models.CharField(max_length=255, null=True)
atbyr = models.IntegerField(null=True)
Ptype = models.IntegerField()
pcategory = models.IntegerField()
InitiationDate1 = models.CharField(max_length=255, null=True) # datetimes, look like they're all midnight
@property
def first_name(self):
return self._list_of_names[0]
@property
def middle_name(self):
return ' '.join(self._list_of_names[1:-1])
@property
def last_name(self):
return self._list_of_names[-1]
@property
def _list_of_names(self):
return self.pname.split(' ')
@property
def sex(self):
return {
'F': 'female',
'M': 'male',
'T': 'transgender'
}[self.pgender]
class Outcome(models.Model):
PatientId = models.ForeignKey(PatientDetail, primary_key=True)
Outcome = models.CharField(max_length=255, null=True)
OutcomeDate1 = models.CharField(max_length=255, null=True)
MO = models.CharField(max_length=255, null=True)
XrayEPTests = models.CharField(max_length=255, null=True)
MORemark = models.CharField(max_length=255, null=True)
HIVStatus = models.CharField(max_length=255, null=True)
HIVTestDate = models.CharField(max_length=255, null=True)
CPTDeliverDate = models.CharField(max_length=255, null=True)
ARTCentreDate = models.CharField(max_length=255, null=True)
InitiatedOnART = models.CharField(max_length=255, null=True)
InitiatedDate = models.CharField(max_length=255, null=True)
class Followup(models.Model):
PatientID = models.CharField(max_length=255, null=True)
IntervalId = models.CharField(max_length=255, null=True)
TestDate = models.CharField(max_length=255, null=True)
DMC = models.CharField(max_length=255, null=True)
LabNo = models.CharField(max_length=255, null=True)
SmearResult = models.CharField(max_length=255, null=True)
PatientWeight = models.CharField(max_length=255, null=True)
class Household(models.Model):
id = models.IntegerField(primary_key=True)
PatientID = models.CharField(max_length=255, null=True)
Name = models.CharField(max_length=255, null=True)
Dosage = models.CharField(max_length=255, null=True)
Weight = models.CharField(max_length=255, null=True)
M1 = models.CharField(max_length=255, null=True)
M2 = models.CharField(max_length=255, null=True)
M3 = models.CharField(max_length=255, null=True)
M4 = models.CharField(max_length=255, null=True)
M5 = models.CharField(max_length=255, null=True)
M6 = models.CharField(max_length=255, null=True)
|
Python
| 0
|
@@ -4269,24 +4269,65 @@
, null=True)
+ # requires trimming whitespace in excel
%0A Interva
|
59851b283b8cb6f92895090ba23e71be48f1a990
|
remove distinction between sfa aggregate and geni_aggregate
|
sfa/methods/GetVersion.py
|
sfa/methods/GetVersion.py
|
from sfa.util.faults import *
from sfa.util.namespace import *
from sfa.util.method import Method
from sfa.util.parameter import Parameter
class GetVersion(Method):
"""
Returns this GENI Aggregate Manager's Version Information
@return version
"""
interfaces = ['geni_am','registry']
accepts = []
returns = Parameter(dict, "Version information")
def call(self):
self.api.logger.info("interface: %s\tmethod-name: %s" % (self.api.interface, self.name))
manager_base = 'sfa.managers'
if self.api.interface in ['geni_am']:
mgr_type = self.api.config.SFA_GENI_AGGREGATE_TYPE
manager_module = manager_base + ".geni_am_%s" % mgr_type
manager = __import__(manager_module, fromlist=[manager_base])
return manager.GetVersion()
if self.api.interface in ['registry']:
mgr_type = self.api.config.SFA_REGISTRY_TYPE
manager_module = manager_base + ".slice_manager_%s" % mgr_type
manager = __import__(manager_module, fromlist=[manager_base])
return manager.GetVersion()
return {}
|
Python
| 0.999021
|
@@ -281,26 +281,53 @@
= %5B'
-geni_am','registry
+registry','aggregate', 'slicemgr', 'component
'%5D%0A
@@ -513,17 +513,16 @@
.name))%0A
-%0A
@@ -532,288 +532,39 @@
ager
-_base = 'sfa.managers'%0A%0A if self.api.interface in %5B'geni_am'%5D:%0A mgr_type = self.api.config.SFA_GENI_AGGREGATE_TYPE%0A manager_module = manager_base + %22.geni_am_%25s%22 %25 mgr_type%0A manager = __import__(manager_module, fromlist=%5Bmanager_base%5D
+ = self.api.get_manager(
)%0A
+%0A
@@ -586,340 +586,21 @@
ger.
-GetVersion()%0A if self.api.interface in %5B'registry'%5D:%0A mgr_type = self.api.config.SFA_REGISTRY_TYPE%0A manager_module = manager_base + %22.slice_manager_%25s%22 %25 mgr_type%0A manager = __import__(manager_module, fromlist=%5Bmanager_base%5D)%0A return manager.GetVersion()%0A %0A return %7B%7D
+get_version()
%0A
|
e99dbdcec1dd80cb2243d65ebf958620a7671ceb
|
Change the importer call back treatment
|
share/plugins/delivery.py
|
share/plugins/delivery.py
|
""" Builds the check list with the waiting status """
from baseplugin import BasePlugin
from basejob import BaseJob, BaseJobRuntimeError
from importer import Importer
import psycopg2
PLUGIN_NAME = "delivery"
class DeliveryRuntimeError(BaseJobRuntimeError):
""" BaseJob Exceptions. """
def __init__(self, error):
""" Init method. """
Exception.__init__(self, error)
class Job(BaseJob):
def __init__(self, logger, infos, params):
BaseJob.__init__(self, logger, infos)
self.importer = Importer()
self.importer['distant_url'] = 'https://%s/exporter/' % (self.infos["address"])
self.server_address = self.infos["address"]
self.params = params
def connect(self):
"""
Connection to the database.
"""
try:
connector = "host=%s port=%s user=%s password=%s dbname=%s" % \
( self.params['host'], \
self.params['port'], \
self.params['user'], \
self.params['password'],\
self.params['database'])
self.conn = psycopg2.connect(connector)
self.cursor = self.conn.cursor();
except Exception:
raise DeliveryRuntimeError("Could not connect to the server")
def set_metadata(self, entries):
"""
Set metadata for a set of files for an receiver given.
"""
entries = self.importer.call('webengine.delivery.metadata', 'deliver_files', entries)
error_messages = ()
ret_status = "OK"
for entry in entries:
set = ()
dic = {"det_id" : entry["DET_ID"]}
self.cursor.execute("SAVEPOINT save_no_resend_%(DET_ID)s" % (entry))
try:
if isinstance (entry["success"], Exception) or not (entry["success"]):
raise Exception(entry["DET_ID"])
set = ("det_transfer_status='Complete'", )
if entry["STATUS"] == "Processing":
set += ("det_status='Complete'",)
self.cursor.execute("UPDATE sjg_delivery_transfer SET %s WHERE det_id = '%s'" % (",".join(set), dic['det_id']))
except Exception, e:
ret_status='ERROR'
error_messages += ("%(DET_ID)s " % entry,)
entry['HOSTNAME'] = self.server_address
self.cursor.execute("ROLLBACK TO SAVEPOINT save_no_resend_%(DET_ID)s" % entry)
if entry["STATUS"] == "Processing":
self.log('Failed to process NO-RESEND for delivery (DET_ID=%(DET_ID)s of file %(MD5)s (%(FILENAME)s) sent to %(HOSTNAME)s. Putting delivery transfer status back to "Processing"' % (entry))
self.cursor.execute("UPDATE sjg_delivery_transfer SET det_transfer_status='Processing' WHERE det_id = %(DET_ID)s" % (entry))
else:
self.log('Failed to set metadata for delivery (DET_ID=%(DET_ID)s of file %(MD5)s (%(FILENAME)s) sent to %(HOSTNAME)s. Will retry later' % (entry))
return ret_status, error_messages
def get_files_to_set_metadata(self):
"""
Select and set metadata for files are in waiting status for a
specific receiver
"""
self.log('Get the check with waiting status')
self.connect()
fields = ("DET_ID", "STATUS", "MD5", "FILENAME", "CPY_ID", "CPY_NAME", "STREAM_ID")
query = "SELECT det.det_id, det.det_status, det.det_md5, det.det_target_filename, cpy.cpy_id, cpy.cpy_name, upu.upu_private_id " \
"FROM sjg_delivery_transfer det LEFT OUTER JOIN sjg_uplink_use upu ON upu.det_id = det.det_id " \
"JOIN sjg.sjg_company cpy ON det.cpy_initiator_id=cpy.cpy_id " \
"JOIN sjg_machine mac ON mac.mac_id = det.mac_dest_id AND mac.mac_hostname=%(mac_hostname)s" \
"WHERE det.det_transfer_status = 'Waiting'"
self.cursor.execute(query.encode('ISO 8859-15'), {'mac_hostname': self.server_address.encode('ISO 8859-15')})
files = self.cursor.fetchall()
# If some files are in waiting status
entries = reduce(lambda infos, row: infos + [dict(zip(fields, row))], files, [])
if (len(entries)):
self.log('entries found on server : ' + self.server_address)
status, error_messages = self.set_metadata(entries)
self.conn.commit()
if (len(error_messages)):
self.infos['message'] = "Fail for delivery with det_id: " + ",".join(error_messages)
if status == "OK":
self.infos['status'] = "FINISHED"
else:
self.infos['status'] = "ERROR"
else:
self.infos['message'] = "No file to set metadata"
self.infos['status'] = "FINISHED"
class Plugin(BasePlugin):
def __init__(self, log, event, url=None, params=None):
""" Init method of the streams plugin.
@params is a dictionary of optional parameters among:
host: address where the database is hosting
user: user used to database connection
password: password used to database connection
database: database using to select delivery
port: port using for datbase connection
@see BasePlugin documentation
"""
delivery_params = {'host': 'dbpsql-1.lab', 'user': 'sjg', 'password': 'sjg', 'database': 'smartjog', 'port': '5432'}
delivery_params.update(params)
BasePlugin.__init__(self, PLUGIN_NAME, log, event, url, delivery_params)
def create_new_job(self, job):
return Job(self.logger, job, self.params)
|
Python
| 0
|
@@ -1801,74 +1801,63 @@
if
-isinstance (entry%5B%22success%22%5D, Exception) or not (entry%5B%22success%22%5D)
+entry%5B%22status%22%5D == %22KO%22 or entry%5B%22status%22%5D == %22Warning%22
:%0A
|
1a36b71299f95213f23ee62bed8295ec61a65b18
|
Comment update to reflect name change of component location params.
|
sifra/modelling/hazard.py
|
sifra/modelling/hazard.py
|
import numpy as np
import os
import csv
from collections import OrderedDict
class Hazard(object):
def __init__(self, hazard_scenario_name, scenario_hazard_data,
hazard_input_method):
self.hazard_scenario_name = hazard_scenario_name
self.scenario_hazard_data = scenario_hazard_data
self.hazard_input_method = hazard_input_method
self.round_off = 2
def get_hazard_intensity_at_location(self, x_location, y_location):
for comp in self.scenario_hazard_data:
if self.hazard_input_method == 'hazard_array':
return comp["hazard_intensity"]
else:
if (round(float(comp["longitude"]), self.round_off)
== round(float(x_location), self.round_off)) and \
(round(float(comp["latitude"]), self.round_off)
== round(float(y_location), self.round_off)):
return comp["hazard_intensity"]
raise Exception("Invalid Values for Longitude or Latitude")
def get_seed(self):
seed = 0
for i, letter in enumerate(self.hazard_scenario_name):
seed = seed + (i + 1) * ord(letter)
return seed
def __str__(self):
output = self.hazard_scenario_name+'\n'
for hazrd in self.scenario_hazard_data:
output = output + \
"longitude: "+str(hazrd["longitude"]) + \
" latitude: " + str(hazrd["latitude"]) + \
" hazard_intensity: "+ str(hazrd["hazard_intensity"]) +'\n'
return output
class HazardsContainer(object):
"""
The idea is to abstract the number and type of hazards to allow greater
flexibility in the type and number of hazards to be modelled.
"""
def __init__(self, configuration):
# string variables
self.listOfhazards = []
self.hazard_type = configuration.HAZARD_TYPE
self.intensity_measure_param = configuration.HAZARD_INTENSITY_MEASURE_PARAM
self.intensity_measure_unit = configuration.HAZARD_INTENSITY_MEASURE_UNIT
self.focal_hazard_scenarios = configuration.FOCAL_HAZARD_SCENARIOS
# get hazard data from scenario file
if configuration.HAZARD_INPUT_METHOD == "scenario_file":
self.scenario_hazard_data, self.hazard_scenario_list = \
HazardsContainer.populate_scenario_hazard_using_hazard_file(
configuration.SCENARIO_FILE)
self.num_hazard_pts = len(self.hazard_scenario_list)
# get hazard data from an array of hazard intensity values
elif configuration.HAZARD_INPUT_METHOD == "hazard_array":
self.num_hazard_pts = \
int(round((configuration.INTENSITY_MEASURE_MAX
- configuration.INTENSITY_MEASURE_MIN) /
float(configuration.INTENSITY_MEASURE_STEP) + 1
)
)
# Using the limits and step generate a list of hazard
# intensity values
self.hazard_scenario_list \
= np.linspace(configuration.INTENSITY_MEASURE_MIN,
configuration.INTENSITY_MEASURE_MAX,
num=self.num_hazard_pts)
# containing hazard value for each location
self.scenario_hazard_data, self.hazard_scenario_name = \
HazardsContainer.populate_scenario_hazard_using_hazard_array(
self.hazard_scenario_list)
self.hazard_scenario_list = ["%0.3f" % np.float(x)
for x in self.hazard_scenario_list]
for hazard_scenario_name in self.scenario_hazard_data.keys():
self.listOfhazards.append(
Hazard(
hazard_scenario_name,
self.scenario_hazard_data[hazard_scenario_name],
configuration.HAZARD_INPUT_METHOD
)
)
# self.hazard_scenario_name = self.hazard_scenario_list
def get_listOfhazards(self):
for hazard_intensity in self.listOfhazards:
yield hazard_intensity
@staticmethod
def populate_scenario_hazard_using_hazard_file(scenario_file):
root = os.path.dirname(os.path.abspath(__file__))
csv_path = os.path.join(root, "hazard", scenario_file )
scenario_hazard_data = {}
with open(csv_path, "rb") as f_obj:
reader = csv.DictReader(f_obj, delimiter=',')
hazard_scenario_list \
= [scenario for scenario in reader.fieldnames if
scenario not in ["longitude", "latitude"]]
for scenario in hazard_scenario_list:
scenario_hazard_data[scenario] = []
for row in reader:
for col in row:
if col not in ["longitude", "latitude"]:
hazard_intensity = row[col]
scenario_hazard_data[col].append(
{"longitude": row["longitude"],
"latitude": row["latitude"],
"hazard_intensity": hazard_intensity})
return scenario_hazard_data, hazard_scenario_list
@staticmethod
def populate_scenario_hazard_using_hazard_array(num_hazard_pts):
scenario_hazard_data = OrderedDict()
hazard_scenario_name = []
for i, hazard_intensity in enumerate(num_hazard_pts):
hazard_scenario_name.append("s_"+str(i))
scenario_hazard_data["s_"+str(i)] \
= [{"longitude": 0,
"latitude": 0,
"hazard_intensity": hazard_intensity}]
return scenario_hazard_data, hazard_scenario_name
|
Python
| 0
|
@@ -1003,9 +1003,9 @@
lid
-V
+v
alue
@@ -1014,29 +1014,27 @@
for
-Longitude or Latitude
+component location.
%22)%0A%0A
@@ -1970,16 +1970,30 @@
_param =
+ %5C%0A
configu
@@ -2067,16 +2067,30 @@
e_unit =
+ %5C%0A
configu
|
1fde3398d5fe132d628acce03392d459e718e2ad
|
Fix 400 error response for proxied resources
|
slyd/slyd/splash/proxy.py
|
slyd/slyd/splash/proxy.py
|
from __future__ import absolute_import
import functools
import requests
from twisted.internet.threads import deferToThread
from twisted.internet.defer import CancelledError
from twisted.web.resource import Resource
from twisted.web.server import NOT_DONE_YET
from PyQt4.QtNetwork import QNetworkRequest
from .ferry import User
from .css_utils import process_css
class ProxyResource(Resource):
def render_GET(self, request):
if not request.auth_info or not request.auth_info.get('username', None):
return self._error(request, 403, 'Auth required')
for arg in 'url', 'referer', 'tabid':
if arg not in request.args or len(request.args[arg]) != 1:
return self._error(request, 400, 'Argument required: {}'.format(arg))
url = request.args['url'][0]
referer = request.args['referer'][0]
tabid = int(request.args['tabid'][0])
user = User.findById(tabid)
# It's not easy to cancel a request that's being made by splash, because it does't
# return the QNetworkReply and when redirecting the current QNetworkReply changes,
# so if the client closes the connection while fetching the content we simply note
# it in this object and let the request finish without aborting.
connection_status = { "finished": False }
cb = functools.partial(self.end_response, request, url, connection_status, tabid)
if not user or not user.tab:
d = deferToThread(requests.get, url, headers={'referer': referer})
d.addCallback(cb)
d.addErrback(self._requestError, request)
request.notifyFinish().addErrback(self._requestDisconnect, deferred=d)
return NOT_DONE_YET
if request.auth_info['username'] != user.auth['username']:
return self._error(request, 403, "You don't own that browser session")
request.notifyFinish().addErrback(self._requestDisconnect, None, connection_status)
user.tab.http_client.get(url, cb, headers={'referer': referer})
return NOT_DONE_YET
def _requestError(self, err, request):
if not err.check(CancelledError):
request.setResponseCode(500)
request.write('Error fetching the content')
request.finish()
def _requestDisconnect(self, err, deferred=None, connection_status=None):
if deferred:
deferred.cancel()
if connection_status:
connection_status["finished"] = True
def end_response(self, request, original_url, connection_status, tabid, reply):
if connection_status["finished"]:
return
if hasattr(reply, 'readAll'):
content = str(reply.readAll())
status_code = reply.attribute(QNetworkRequest.HttpStatusCodeAttribute).toPyObject()
request.setResponseCode(status_code or 500)
else:
content = ''.join(chunk for chunk in reply.iter_content(65535))
redirect_url = None
request.setResponseCode(reply.status_code)
headers = {
'cache-control': 'private',
'pragma': 'no-cache',
'content-type': 'application/octet-stream',
}
for header in ('content-type', 'cache-control', 'pragma', 'vary',
'max-age'):
if hasattr(reply, 'hasRawHeader') and reply.hasRawHeader(header):
headers[header] = str(reply.rawHeader(header))
elif hasattr(reply, 'headers') and header in reply.headers:
headers[header] = str(reply.headers.get(header))
if header in headers:
request.setHeader(header, headers[header])
if headers['content-type'].strip().startswith('text/css'):
content = process_css(content, tabid, original_url)
request.write(content)
request.finish()
def _error(self, request, code, message):
request.setResponseCode(code)
return message
|
Python
| 0
|
@@ -912,36 +912,130 @@
-user = User.findById(tabid)%0A
+return self._load_resource(request, url, referer, tabid)%0A%0A def _load_resource(self, request, url, referer, tabid=None):
%0A
@@ -1103,16 +1103,26 @@
splash,
+%0A #
because
@@ -1131,26 +1131,16 @@
t does't
-%0A #
return
@@ -1181,16 +1181,26 @@
ting the
+%0A #
current
@@ -1222,26 +1222,16 @@
changes,
-%0A #
so if t
@@ -1261,16 +1261,26 @@
nnection
+%0A #
while f
@@ -1313,26 +1313,16 @@
ply note
-%0A #
it in t
@@ -1339,16 +1339,26 @@
and let
+%0A #
the req
@@ -1387,16 +1387,52 @@
orting.%0A
+ user = User.findById(tabid)%0A
@@ -1452,17 +1452,16 @@
atus = %7B
-
%22finishe
@@ -1469,17 +1469,16 @@
%22: False
-
%7D%0A
@@ -1530,24 +1530,64 @@
equest, url,
+ referer,%0A
connection_
@@ -1871,16 +1871,62 @@
connect,
+%0A
deferre
@@ -2182,24 +2182,66 @@
nnect, None,
+%0A
connection_
@@ -2819,16 +2819,25 @@
nal_url,
+ referer,
connect
@@ -2843,24 +2843,45 @@
tion_status,
+%0A
tabid, repl
@@ -3123,16 +3123,126 @@
bject()%0A
+ if status_code == 400:%0A return self._load_resource(request, original_url, referer)%0A
@@ -3383,40 +3383,8 @@
5))%0A
- redirect_url = None%0A
|
7dd344129e2ae30a857f72fdf61dca6e40768983
|
fix city field duplication (#17919)
|
addons/base_address_city/models/res_partner.py
|
addons/base_address_city/models/res_partner.py
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from lxml import etree
from odoo import api, models, fields
class Partner(models.Model):
_inherit = 'res.partner'
country_enforce_cities = fields.Boolean(related='country_id.enforce_cities')
city_id = fields.Many2one('res.city', string='Company')
@api.onchange('city_id')
def _onchange_city_id(self):
self.city = self.city_id.name
self.zip = self.city_id.zipcode
self.state_id = self.city_id.state_id
@api.model
def _fields_view_get_address(self, arch):
arch = super(Partner, self)._fields_view_get_address(arch)
# render the partner address accordingly to address_view_id
doc = etree.fromstring(arch)
for city_node in doc.xpath("//field[@name='city']"):
replacement_xml = """
<div>
<field name="country_enforce_cities" invisible="1"/>
<field name='city' attrs="{'invisible': [('country_enforce_cities', '=', True), ('city_id', '!=', False)], 'readonly': [('type', '=', 'contact'), ('parent_id', '!=', False)]}"/>
<field name='city_id' attrs="{'invisible': [('country_enforce_cities', '=', False)], 'readonly': [('type', '=', 'contact'), ('parent_id', '!=', False)]}" context="{'default_country_id': country_id}" domain="[('country_id', '=', country_id)]"/>
</div>
"""
city_id_node = etree.fromstring(replacement_xml)
city_node.getparent().replace(city_node, city_id_node)
arch = etree.tostring(doc)
return arch
|
Python
| 0.000004
|
@@ -667,24 +667,103 @@
dress(arch)%0A
+ if not self._context.get('no_address_format'):%0A return arch%0A
# re
|
e9d6dcf16f1696cfb3d934c0196ace38af5fae4c
|
get_invoice_line_account in valuation = 'periodic'
|
addons/stock_account/models/account_invoice.py
|
addons/stock_account/models/account_invoice.py
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, models
import logging
_logger = logging.getLogger(__name__)
class AccountInvoice(models.Model):
_inherit = "account.invoice"
@api.model
def invoice_line_move_line_get(self):
res = super(AccountInvoice, self).invoice_line_move_line_get()
if self.company_id.anglo_saxon_accounting and self.type in ('out_invoice', 'out_refund'):
for i_line in self.invoice_line_ids:
res.extend(self._anglo_saxon_sale_move_lines(i_line))
return res
@api.model
def _anglo_saxon_sale_move_lines(self, i_line):
"""Return the additional move lines for sales invoices and refunds.
i_line: An account.invoice.line object.
res: The move line entries produced so far by the parent move_line_get.
"""
inv = i_line.invoice_id
company_currency = inv.company_id.currency_id
if i_line.product_id.type == 'product' and i_line.product_id.valuation == 'real_time':
fpos = i_line.invoice_id.fiscal_position_id
accounts = i_line.product_id.product_tmpl_id.get_product_accounts(fiscal_pos=fpos)
# debit account dacc will be the output account
dacc = accounts['stock_output'].id
# credit account cacc will be the expense account
cacc = accounts['expense'].id
if dacc and cacc:
price_unit = i_line._get_anglo_saxon_price_unit()
if inv.currency_id != company_currency:
currency_id = inv.currency_id.id
amount_currency = i_line._get_price(company_currency, price_unit)
else:
currency_id = False
amount_currency = False
return [
{
'type': 'src',
'name': i_line.name[:64],
'price_unit': price_unit,
'quantity': i_line.quantity,
'price': price_unit * i_line.quantity,
'currency_id': currency_id,
'amount_currency': amount_currency,
'account_id':dacc,
'product_id':i_line.product_id.id,
'uom_id':i_line.uom_id.id,
'account_analytic_id': i_line.account_analytic_id.id,
'analytic_tag_ids': i_line.analytic_tag_ids.ids and [(6, 0, i_line.analytic_tag_ids.ids)] or False,
},
{
'type': 'src',
'name': i_line.name[:64],
'price_unit': price_unit,
'quantity': i_line.quantity,
'price': -1 * price_unit * i_line.quantity,
'currency_id': currency_id,
'amount_currency': -1 * amount_currency,
'account_id':cacc,
'product_id':i_line.product_id.id,
'uom_id':i_line.uom_id.id,
'account_analytic_id': i_line.account_analytic_id.id,
'analytic_tag_ids': i_line.analytic_tag_ids.ids and [(6, 0, i_line.analytic_tag_ids.ids)] or False,
},
]
return []
class AccountInvoiceLine(models.Model):
_inherit = "account.invoice.line"
def _get_anglo_saxon_price_unit(self):
self.ensure_one()
price = self.product_id.standard_price
if not self.uom_id or self.product_id.uom_id == self.uom_id:
return price
else:
return self.product_id.uom_id._compute_price(price, self.uom_id)
def _get_price(self, company_currency, price_unit):
if self.invoice_id.currency_id.id != company_currency.id:
price = company_currency.with_context(date=self.invoice_id.date_invoice).compute(price_unit * self.quantity, self.invoice_id.currency_id)
else:
price = price_unit * self.quantity
return round(price, self.invoice_id.currency_id.decimal_places)
def get_invoice_line_account(self, type, product, fpos, company):
if company.anglo_saxon_accounting and type in ('in_invoice', 'in_refund') and product and product.type == 'product':
accounts = product.product_tmpl_id.get_product_accounts(fiscal_pos=fpos)
if accounts['stock_input']:
return accounts['stock_input']
return super(AccountInvoiceLine, self).get_invoice_line_account(type, product, fpos, company)
|
Python
| 0.999997
|
@@ -4529,16 +4529,77 @@
if
+product.categ_id.property_valuation != 'manual_periodic' and
accounts
|
7a105fe9201882749a7415bff580b9588b7f9a46
|
update version number
|
account_invoice_line_description/__openerp__.py
|
account_invoice_line_description/__openerp__.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 Agile Business Group sagl
# (<http://www.agilebg.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': "Account invoice line description",
'version': '8.0.1.0.0',
'category': 'Generic Modules/Accounting',
'author': "Agile Business Group, Odoo Community Association (OCA)",
'website': 'http://www.agilebg.com',
'license': 'AGPL-3',
"depends": [
'account',
],
"data": [
'security/invoice_security.xml',
'res_config_view.xml',
],
"installable": True
}
|
Python
| 0.000088
|
@@ -1022,17 +1022,17 @@
8.0.1.0.
-0
+1
',%0A '
|
2a53e8e94f2a81e7b5d01b25004d6a94e1041f33
|
fix test
|
account_reversal/tests/test_account_reversal.py
|
account_reversal/tests/test_account_reversal.py
|
# -*- encoding: utf-8 -*-
# #############################################################################
#
# Account partner required module for OpenERP
# Copyright (C) 2014 Acsone (http://acsone.eu).
# @author Stéphane Bidoul <stephane.bidoul@acsone.eu>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from datetime import datetime
from openerp.tests import common
from openerp import fields
class test_account_reversal(common.TransactionCase):
def setUp(self):
super(test_account_reversal, self).setUp()
self.move_obj = self.env['account.move']
self.move_line_obj = self.env['account.move.line']
def _create_move(self, with_partner, amount=100):
date = datetime.now()
company_id = self.env.ref('base.main_company').id
period_id = self.env['account.period'].with_context(
account_period_prefer_normal=True,
company_id=self.env.ref('base.main_company').id).find(date)[0]
journal = self.env['account.journal'].create({
'name': 'Test journal',
'code': 'COD',
'type': 'sale',
'sequence_id': self.env.ref('account.sequence_sale_journal').id,
'company_id': company_id})
move_vals = {
'journal_id': journal.id,
'period_id': period_id.id,
'date': date,
'company_id': company_id,
}
# Why this doesn't work I don't know:
# acct = self.ref('account.a_sale'
account1, account2 = self.env['account.account'].search(
[('company_id', '=', company_id), ('type', '=', 'other')])[:2]
move_id = self.move_obj.create(move_vals)
self.move_line_obj.create({
'move_id': move_id.id,
'name': '/',
'debit': 0,
'credit': amount,
'company_id': company_id,
'account_id': account1.id})
move_line_id = self.move_line_obj.create(
{
'move_id': move_id.id,
'name': '/',
'debit': amount,
'credit': 0,
'account_id': account2.id,
'company_id': company_id,
'partner_id': self.ref('base.res_partner_1')
if with_partner else False
}
)
return move_line_id.move_id
def test_reverse(self):
move = self._create_move(with_partner=False)
company_id = self.env.ref('base.main_company').id
account1 = self.env['account.account'].search(
[('company_id', '=', company_id), ('type', '=', 'other')])[0]
movestr = ''.join(['%.2f%.2f%s' % (x.debit, x.credit,
x.account_id == account1 and
'aaaa' or 'bbbb')
for x in move.line_id])
self.assertEqual(movestr, '100.000.00bbbb0.00100.00aaaa')
yesterday_date = datetime(
year=time.localtime().tm_year, month=3, day=3
)
yesterday = fields.Date.to_string(yesterday_date)
reversed_move = move.create_reversals(yesterday)
movestr_reversed = ''.join(
['%.2f%.2f%s' % (x.debit, x.credit,
x.account_id == account1 and 'aaaa' or 'bbbb')
for x in reversed_move.line_id])
self.assertEqual(movestr_reversed, '0.00100.00bbbb100.000.00aaaa')
|
Python
| 0.000036
|
@@ -3841,16 +3841,20 @@
sed_move
+_ids
= move.
@@ -3881,16 +3881,92 @@
terday)%0A
+ reversed_moves = self.env%5B'account.move'%5D.browse(reversed_move_ids)%0A
@@ -4148,24 +4148,25 @@
eversed_move
+s
.line_id%5D)%0A
|
b20d07aa14d3c11d5509e96fd4911de5886afe28
|
fix order for AttributeViewSet
|
apps/domain/views.py
|
apps/domain/views.py
|
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from rest_framework import viewsets, mixins, filters
from rest_framework.permissions import DjangoModelPermissions, IsAuthenticated
from .models import *
from .serializers import *
@login_required()
def domain(request):
return render(request, 'domain/domain.html')
class AttributeEntityViewSet(viewsets.ReadOnlyModelViewSet):
permission_classes = (DjangoModelPermissions, )
queryset = AttributeEntity.objects.filter(attribute__attributeset=None).order_by('tag')
serializer_class = AttributeEntitySerializer
class AttributeViewSet(viewsets.ModelViewSet):
permission_classes = (DjangoModelPermissions, )
queryset = Attribute.objects.order_by('tag')
serializer_class = AttributeSerializer
filter_backends = (filters.DjangoFilterBackend, )
filter_fields = ('tag', )
class AttributeSetViewSet(viewsets.ModelViewSet):
permission_classes = (DjangoModelPermissions, )
queryset = AttributeSet.objects.order_by('tag')
serializer_class = AttributeSetSerializer
filter_backends = (filters.DjangoFilterBackend, )
filter_fields = ('tag', )
class ValueTypeViewSet(mixins.ListModelMixin, viewsets.GenericViewSet):
permission_classes = (IsAuthenticated, )
serializer_class = ValueTypeSerializer
def get_queryset(self):
return Attribute.VALUE_TYPE_CHOICES
|
Python
| 0.000001
|
@@ -753,32 +753,53 @@
bjects.order_by(
+'attributeset__tag',
'tag')%0A seria
|
34035c4b272e9271834c531990c404940eee8633
|
Add a link between vote and subproposal
|
apps/votes/models.py
|
apps/votes/models.py
|
from django.db import models
from meps.models import MEP
class Proposal(models.Model):
id = models.CharField(max_length=63, primary_key=True)
title = models.CharField(max_length=255, unique=True)
class SubProposal(models.Model):
datetime = models.DateTimeField()
subject = models.CharField(max_length=255)
part = models.CharField(max_length=255)
description = models.CharField(max_length=511)
weight = models.IntegerField(null=True)
vote = models.ForeignKey(Proposal)
recommendation = models.CharField(max_length=15, choices=((u'against', u'against'), (u'for', u'for')), null=True)
class Vote(models.Model):
choice = models.CharField(max_length=15, choices=((u'for', u'for'), (u'against', u'against'), (u'abstention', u'abstention')))
name = models.CharField(max_length=127)
mep = models.ForeignKey(MEP)
|
Python
| 0
|
@@ -815,16 +815,66 @@
th=127)%0A
+ sub_proposal = models.ForeignKey(SubProposal)%0A
mep
|
757c0c9bc14118bd0b95548270a6a3d6a2e54e75
|
return len of step pile contents
|
conjure/ui/widgets/step.py
|
conjure/ui/widgets/step.py
|
from ubuntui.utils import Padding, Color
from ubuntui.widgets.hr import HR
from ubuntui.widgets.buttons import submit_btn
from urwid import (WidgetWrap, Pile, Columns, Text)
class StepWidget(WidgetWrap):
def __init__(self, app, step_model, step_model_widget, cb):
"""
Arguments:
step_model: step model
step_model_widget: step model widget
cb: callback
"""
self.app = app
self._step_model = step_model
self._step_model_widget = step_model_widget
self.cb = cb
self.step_pile = self.build_widget()
super().__init__(self.step_pile)
@property
def model(self):
return self._step_model
@property
def widget(self):
return self._step_model_widget
def __repr__(self):
return "<StepWidget: {}>".format(self.model.title)
def set_description(self, description, color='info_minor'):
self.widget.description.set_text(
(color, description))
def set_icon_state(self, result_code):
""" updates status icon
Arguments:
icon: icon widget
result_code: 3 types of results, error, waiting, complete
"""
if result_code == "error":
self.widget.icon.set_text(
("error_icon", "\N{BLACK FLAG}"))
elif result_code == "waiting":
self.widget.icon.set_text(
("pending_icon", "\N{HOURGLASS}"))
elif result_code == "active":
self.widget.icon.set_text(
("success_icon", "\N{BALLOT BOX WITH CHECK}"))
else:
# NOTE: Should not get here, if we do make sure we account
# for that error type above.
self.widget.icon.set_text(("error_icon", "?"))
@property
def current_button_index(self):
""" Returns the pile index where the button is located
"""
return self.step_pile.contents.index(
self.step_pile.contents[len(self.step_pile.contents)-2])
@property
def current_button_widget(self):
""" Returns the current button widget
"""
if self.button:
return self.button
def clear_button(self):
""" Clears current button so it can't be pressed again
"""
self.app.log.debug(
"Contents: {}".format(
self.step_pile.contents[self.current_button_index]))
self.step_pile.contents[self.current_button_index] = (
Text(""), self.step_pile.options())
def build_widget(self):
return Pile([
Columns(
[
('fixed', 3, self.widget.icon),
self.widget.description,
], dividechars=1
)]
)
def generate_additional_input(self):
""" Generates additional input fields, useful for doing it after
a previous step is run
"""
self.set_description(self.model.description, 'body')
for i in self.widget.additional_input:
self.app.log.debug(i)
self.step_pile.contents.append((Padding.line_break(""),
self.step_pile.options()))
column_input = [
('weight', 0.5, Padding.left(i['label'], left=5))
]
if i['input']:
column_input.append(
('weight', 1, Color.string_input(
i['input'],
focus_map='string_input focus')))
self.step_pile.contents.append(
(Columns(column_input, dividechars=3),
self.step_pile.options()))
self.button = submit_btn(on_press=self.submit)
self.step_pile.contents.append(
(Padding.right_20(
Color.button_primary(self.button,
focus_map='button_primary focus')),
self.step_pile.options()))
self.step_pile.contents.append((HR(), self.step_pile.options()))
self.step_pile.focus_position = self.current_button_index
def submit(self, btn):
self.cb(self)
|
Python
| 0.000001
|
@@ -1915,75 +1915,8 @@
urn
-self.step_pile.contents.index(%0A self.step_pile.contents%5B
len(
@@ -1941,18 +1941,16 @@
tents)-2
-%5D)
%0A%0A @p
|
a21dc1a4bcf89fe34bffcd3b297eb8c3848b53cf
|
random.choice is fast enough, simplify away numpy
|
streamkov/markov.py
|
streamkov/markov.py
|
# -*- coding: utf-8 -*-
"""
markov
~~~~~~
Class for streamable markov chain
"""
from collections import Counter, defaultdict
import numpy as np
def has_sentence_boundary(bigram):
return bigram[0].endswith('.')
class MarkovGenerator(object):
def __init__(self):
self.word_list = []
self.word_index = {}
self.word_states = defaultdict(WordState)
self.initial_state = WordState()
def draw(self):
tokens = []
word = self.initial_state.draw()
while not word.endswith('.'):
tokens.append(word)
word = self.word_states[word].draw()
tokens.append(word)
return ' '.join(tokens).capitalize()
def receive(self, bigram):
if has_sentence_boundary(bigram):
self.initial_state.receive(bigram[1])
return
self.word_states[bigram[0]].receive(bigram[1])
class WordState(object):
"""
Information and methods for transitioning from a word
"""
def __init__(self):
self.cumsum = []
self.labels = []
self.count = 0.
def receive(self, word):
word_counts = self._recover_word_counts()
self.count += 1
if word not in word_counts:
self.labels.append(word)
word_counts[word] += 1
self._set_cumsum(word_counts)
def _recover_word_counts(self):
freq_vector = [
(self.cumsum[i] - (self.cumsum[i - 1] if i > 0 else 0.))
for i, _ in enumerate(self.cumsum)
]
return Counter({
k: self.count * v
for k, v in zip(self.labels, freq_vector)
})
def _set_cumsum(self, word_counts):
freq_vector = [
word_counts[w] / self.count
for w in self.labels
]
self.cumsum = []
running_total = 0.
for freq in freq_vector:
running_total += freq
self.cumsum.append(running_total)
def draw(self):
return self.labels[np.searchsorted(self.cumsum, np.random.rand())]
|
Python
| 0.999999
|
@@ -113,17 +113,8 @@
port
- Counter,
def
@@ -133,91 +133,14 @@
ort
-numpy as np%0A%0A%0Adef has_sentence_boundary(bigram):%0A return bigram%5B0%5D.endswith('.')
+random
%0A%0A%0Ac
@@ -386,32 +386,36 @@
%5B%5D%0A word
+_idx
= self.initial_
@@ -419,32 +419,72 @@
al_state.draw()%0A
+ word = self.word_list%5Bword_idx%5D%0A
while no
@@ -545,32 +545,36 @@
word
+_idx
= self.word_sta
@@ -581,16 +581,20 @@
tes%5Bword
+_idx
%5D.draw()
@@ -586,32 +586,77 @@
ord_idx%5D.draw()%0A
+ word = self.word_list%5Bword_idx%5D%0A%0A
tokens.a
@@ -757,38 +757,97 @@
i
-f has_sentence_boundary(bigram
+x_1, ix_2 = self.bigrams_to_indices(bigram)%0A%0A if self.word_list%5Bix_1%5D.endswith('.'
):%0A
@@ -884,25 +884,20 @@
receive(
-bigram%5B1%5D
+ix_2
)%0A
@@ -909,16 +909,17 @@
return%0A
+%0A
@@ -939,37 +939,417 @@
tes%5B
-bigram%5B0%5D%5D.receive(bigram%5B1%5D)
+ix_1%5D.receive(ix_2)%0A%0A def bigrams_to_indices(self, bigram):%0A indices = %5B%5D%0A for word in bigram:%0A if word in self.word_index:%0A indices.append(self.word_index%5Bword%5D)%0A else:%0A ix = len(self.word_list)%0A self.word_list.append(word)%0A self.word_index%5Bword%5D = ix%0A indices.append(ix)%0A return indices
%0A%0A%0Ac
@@ -1442,24 +1442,25 @@
ord%0A %22%22%22%0A
+%0A
def __in
@@ -1488,346 +1488,106 @@
elf.
-cumsum = %5B%5D%0A self.labels = %5B%5D%0A self.count = 0.%0A%0A def receive(self, word):%0A word_counts = self._recover_word_counts()%0A self.count += 1%0A%0A if word not in word_counts:%0A self.labels.append(word)%0A word_counts%5Bword%5D += 1%0A%0A self._set_cumsum(word_counts)%0A%0A def _recover_word_counts
+adjacencies = %5B%5D%0A%0A def receive(self, word):%0A self.adjacencies.append(word)%0A%0A def draw
(sel
@@ -1602,677 +1602,43 @@
-f
re
-q_vector = %5B%0A (self.cumsum%5Bi%5D - (self.cumsum%5Bi - 1%5D if i %3E 0 else 0.))%0A for i, _ in enumerate(self.cumsum)%0A %5D%0A%0A return Counter(%7B%0A k: self.count * v%0A for k, v in zip(self.labels, freq_vector)%0A %7D)%0A%0A def _set_cumsum(self, word_counts):%0A freq_vector = %5B%0A word_counts%5Bw%5D / self.count%0A for w in self.labels%0A %5D%0A%0A self.cumsum = %5B%5D%0A running_total = 0.%0A for freq in freq_vector:%0A running_total += freq%0A self.cumsum.append(running_total)%0A%0A def draw(self):%0A return self.labels%5Bnp.searchsorted(self.cumsum, np.random.rand())%5D
+turn random.choice(self.adjacencies)
%0A
|
a2082e319854f88842e3acf8244d38a81f7046ae
|
Add secure/insecure reverse helpers.
|
subdomains/utils.py
|
subdomains/utils.py
|
from urlparse import urlunparse
from django.conf import settings
from django.contrib.sites.models import Site
from django.core.urlresolvers import reverse as simple_reverse
def urljoin(domain, path=None, scheme=None):
if path is None:
path = ''
if scheme is None:
scheme = getattr(settings, 'DEFAULT_URL_SCHEME', 'http')
return urlunparse((scheme, domain, path, None, None, None))
def reverse(viewname, subdomain=None, scheme=None, urlconf=None,
*args, **kwargs):
# We imply the urlconf from the `subdomain` argument -- providing the
# urlconf is a violation of this logic.
if urlconf is not None:
raise ValueError('`subdomains.utils.reverse` does not accept the '
'`urlconf` argument.')
site = Site.objects.get_current()
urlconf = settings.SUBDOMAIN_URLCONFS.get(subdomain)
if subdomain is not None:
domain = '%s.%s' % (subdomain, site.domain)
else:
domain = site.domain
path = simple_reverse(viewname, urlconf=urlconf, *args, **kwargs)
return urljoin(domain, path, scheme=scheme)
|
Python
| 0.000001
|
@@ -1,12 +1,29 @@
+import functools%0A
from urlpars
@@ -1109,8 +1109,188 @@
scheme)%0A
+%0A%0Ainsecure_reverse = functools.partial(reverse, scheme='http')%0Asecure_reverse = functools.partial(reverse, scheme='https')%0Arelative_reverse = functools.partial(reverse, scheme='')%0A
|
8315fb3fb1b7ef65b9c1ced4feaeb473863495f5
|
Remove current_site_domain
|
subdomains/utils.py
|
subdomains/utils.py
|
import functools
try:
from urlparse import urlunparse
except ImportError:
from urllib.parse import urlunparse
from django.conf import settings
from django.core.urlresolvers import reverse as simple_reverse
def current_site_domain():
domain = getattr(settings, 'BASE_DOMAIN', False)
prefix = 'www.'
if getattr(settings, 'REMOVE_WWW_FROM_DOMAIN', False) \
and domain.startswith(prefix):
domain = domain.replace(prefix, '', 1)
return domain
get_domain = current_site_domain
def urljoin(domain, path=None, scheme=None):
"""
Joins a domain, path and scheme part together, returning a full URL.
:param domain: the domain, e.g. ``example.com``
:param path: the path part of the URL, e.g. ``/example/``
:param scheme: the scheme part of the URL, e.g. ``http``, defaulting to the
value of ``settings.DEFAULT_URL_SCHEME``
:returns: a full URL
"""
if scheme is None:
scheme = getattr(settings, 'DEFAULT_URL_SCHEME', 'http')
return urlunparse((scheme, domain, path or '', None, None, None))
def reverse(viewname, subdomain=None, scheme=None, args=None, kwargs=None, current_app=None):
"""
Reverses a URL from the given parameters, in a similar fashion to
:meth:`django.core.urlresolvers.reverse`.
:param viewname: the name of URL
:param subdomain: the subdomain to use for URL reversing
:param scheme: the scheme to use when generating the full URL
:param args: positional arguments used for URL reversing
:param kwargs: named arguments used for URL reversing
:param current_app: hint for the currently executing application
"""
urlconf = settings.SUBDOMAIN_URLCONFS.get(subdomain, settings.ROOT_URLCONF)
domain = get_domain()
if subdomain is not None:
domain = '%s.%s' % (subdomain, domain)
path = simple_reverse(viewname, urlconf=urlconf, args=args, kwargs=kwargs, current_app=current_app)
return urljoin(domain, path, scheme=scheme)
#: :func:`reverse` bound to insecure (non-HTTPS) URLs scheme
insecure_reverse = functools.partial(reverse, scheme='http')
#: :func:`reverse` bound to secure (HTTPS) URLs scheme
secure_reverse = functools.partial(reverse, scheme='https')
#: :func:`reverse` bound to be relative to the current scheme
relative_reverse = functools.partial(reverse, scheme='')
|
Python
| 0.000157
|
@@ -214,28 +214,19 @@
e%0A%0A%0Adef
-current_site
+get
_domain(
@@ -476,42 +476,8 @@
in%0A%0A
-get_domain = current_site_domain%0A%0A
%0Adef
@@ -1087,16 +1087,28 @@
me=None,
+%0A
args=No
@@ -1831,32 +1831,41 @@
simple_reverse(
+%0A
viewname, urlcon
@@ -1857,16 +1857,24 @@
iewname,
+%0A
urlconf
@@ -1882,16 +1882,24 @@
urlconf,
+%0A
args=ar
@@ -1901,16 +1901,24 @@
gs=args,
+%0A
kwargs=
@@ -1924,16 +1924,24 @@
=kwargs,
+%0A
current
@@ -1956,16 +1956,21 @@
rent_app
+%0A
)%0A re
|
496481e3bd6392a44788fadc7cf517fc36143e96
|
Change to cb_story, clean up TZ handling some more
|
contrib/plugins/w3cdate.py
|
contrib/plugins/w3cdate.py
|
"""
Add a 'w3cdate' key to every entry -- this contains the date in ISO8601 format
WARNING: you must have PyXML installed as part of your python installation
in order for this plugin to work
Place this plugin early in your load_plugins list, so that the w3cdate will
be available to subsequent plugins
"""
__author__ = "Ted Leung <twl@sauria.com>"
__version__ = "$Id:"
__copyright__ = "Copyright (c) 2003 Ted Leung"
__license__ = "Python"
import xml.utils.iso8601
import time
def cb_prepare(args):
request = args["request"]
form = request.getHttp()['form']
config = request.getConfiguration()
data = request.getData()
entry_list = data['entry_list']
for i in range(len(entry_list)):
entry = entry_list[i]
t = entry['timetuple']
# adjust for daylight savings time
t = t[0],t[1],t[2],t[3]+time.localtime()[-1],t[4],t[5],t[6],t[7],t[8]
entry['w3cdate'] = xml.utils.iso8601.ctime(time.mktime(t))
|
Python
| 0
|
@@ -472,16 +472,44 @@
ort time
+%0Afrom Pyblosxom import tools
%0A%0Adef cb
@@ -513,15 +513,13 @@
cb_
-prepare
+story
(arg
@@ -540,100 +540,39 @@
t =
-args%5B%22request%22%5D%0A form = request.getHttp()%5B'form'%5D%0A config = request.getConfiguration()
+tools.get_registry()%5B%22request%22%5D
%0A
@@ -797,76 +797,86 @@
t
- = t%5B0%5D,t%5B1%5D,t%5B2%5D,t%5B3%5D+time.localtime()%5B-1%5D,t%5B4%5D,t%5B5%5D,t%5B6%5D,t%5B7%5D,t%5B8%5D
+zoffset = 0%0A if time.timezone != 0:%0A tzoffset = time.altzone
%0A
@@ -921,13 +921,16 @@
601.
-ctime
+tostring
(tim
@@ -940,10 +940,19 @@
ktime(t)
+,tzoffset
)%0A
|
f40a8e5b475ba2f82836166f70ae75aab0c269c8
|
Change permissions on tagging
|
core/web/api/observable.py
|
core/web/api/observable.py
|
from __future__ import unicode_literals
from flask_classy import route
from flask import request
from core.web.api.crud import CrudApi, CrudSearchApi
from core import observables
from core.web.api.api import render
from core.web.helpers import get_object_or_404
from core.helpers import refang
from core.web.helpers import requires_permissions
class Observable(CrudApi):
objectmanager = observables.Observable
def _modify_observable(self, observable, params={}):
source = params.pop('source', None)
context = params.pop('context', None)
tags = params.pop('tags', None)
strict = bool(params.pop('strict', False))
if source:
observable.add_source(source)
if context:
observable.add_context(context)
if tags is not None:
observable.tag(tags, strict)
if params:
observable.clean_update(**params)
info = observable.info()
return info
@route("/", methods=["POST"])
@requires_permissions('write')
def new(self):
"""Create a new Observable
Create a new Observable from the JSON object passed in the ``POST`` data.
:<json object params: JSON object containing fields to set
:<json boolean refang: If set, the observable will be refanged before being added to the database
"""
params = request.json
if params.pop('refang', None):
obs = self.objectmanager.add_text(refang(params.pop('value')))
else:
obs = self.objectmanager.add_text(params.pop('value'))
return render(self._modify_observable(obs, params))
@route("/bulk", methods=["POST"])
@requires_permissions('write')
def bulk(self):
"""Bulk-add observables
Bulk-add Observables from an array of strings.
:<json [String] observables: Array of Strings representing observables (URLs, IPs, hostnames, etc.)
:<json boolean refang: If set, the observables will be refanged before being added to the database
"""
added = []
params = request.json
observables = params.pop('observables', [])
for item in observables:
if params.pop('refang', None):
obs = self.objectmanager.add_text(refang(item))
else:
obs = self.objectmanager.add_text(item)
added.append(self._modify_observable(obs, params.copy()))
return render(added)
@route("/<id>/context", methods=["POST"])
@requires_permissions('read')
def context(self, id):
"""Add context to an observable
:<json object context: Context JSON to be added. Must include a ``source`` key.
:<json string old_source: String defining the source to be replaced.
:>json object: The context object that was actually added
"""
observable = get_object_or_404(self.objectmanager, id=id)
context = request.json.pop('context', {})
old_source = request.json.pop('old_source', None)
observable.add_context(context, replace_source=old_source)
return render(context)
@route("/<id>/context", methods=["DELETE"])
@requires_permissions('write')
def remove_context(self, id):
"""Removes context from an observable
:<json object context: Context JSON to be added. Must include a ``source`` key.
:>json object: The context object that was actually delete
"""
observable = get_object_or_404(self.objectmanager, id=id)
context = request.json.pop('context', {})
observable.remove_context(context)
return render(context)
@requires_permissions('write')
def post(self, id):
obs = self.objectmanager.objects.get(id=id)
return render(self._modify_observable(obs, request.json))
class ObservableSearch(CrudSearchApi):
template = 'observable_api.html'
objectmanager = observables.Observable
|
Python
| 0
|
@@ -90,16 +90,60 @@
request
+, abort%0Afrom flask_login import current_user
%0A%0Afrom c
@@ -3806,24 +3806,153 @@
.get(id=id)%0A
+ j = request.json%0A if not current_user.has_permission('observable', 'tag') and 'tags' in j:%0A abort(401)%0A
retu
|
e5e83b75e250ee3c6d8084e23ee777d519293cb6
|
Fix for keystone / swift 1.8.0
|
swprobe/__init__.py
|
swprobe/__init__.py
|
# Copyright (c) 2012 Spil Games
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
version_info = (0 , 3, 0)
version = __version__ = ".".join(map(str, version_info))
|
Python
| 0
|
@@ -599,17 +599,17 @@
(0 , 3,
-0
+1
)%0Aversio
|
7e9072e97acdc2c5c873f3abf850dccbfd8d6bac
|
Add a method to shutdown global event loop
|
tartpy/eventloop.py
|
tartpy/eventloop.py
|
"""
Very basic implementation of an event loop
==========================================
Exports
-------
- ``ThreadedEventLoop``: global event loop running in a thread
- ``ManualEventLoop``: global event loop to be run synchronously
- ``individual_loop_step``: process one event
- ``individual_loop``: process events indefinitely
"""
import queue
import sys
import threading
import traceback
def _format_exception(exc_info):
"""Create a message with details on the exception."""
exc_type, exc_value, exc_tb = exc_info
return {'exception': {'type': exc_type,
'value': exc_value,
'traceback': exc_tb},
'traceback': traceback.format_exception(*exc_info)}
def individual_loop_step(queue, actor, block=True):
"""Process one event for ``actor``.
The argument ``block`` decides whether to block or not when there
are no messages. Any exception in the behaviors is reported
through the error mechanism of the actor.
"""
message = queue.get(block=block)
try:
return actor.behavior(message)
except Exception as exc:
err = _format_exception(sys.exc_info())
actor.error(err)
return err
def individual_loop(queue, actor):
"""Process events for ``actor`` indefinitely."""
while True:
individual_loop_step(queue, actor)
def global_loop_step(queue, block=False):
"""Global event loop step.
Process one event extracted from the queue.
"""
actor, message = queue.get(block=block)
try:
actor.behavior(message)
except Exception as exc:
actor.error(_format_exception(sys.exc_info()))
def global_loop(queue):
"""Process events indefinitely."""
while True:
global_loop_step(queue, block=True)
class EventLoop(object):
"""A generic event loop object.
An ``EventLoop`` is a singleton. Get an instance with::
EventLoop.get_loop()
"""
loop = None
def __init__(self):
self.queue = queue.Queue()
def schedule(self, message, target):
"""Schedule an event."""
self.queue.put((target, message))
@classmethod
def get_loop(cls):
"""Obtain a loop.
Start a new one if necessary.
"""
if cls.loop is None:
cls.loop = cls()
return cls.loop
class ThreadedEventLoop(EventLoop):
"""An event loop that dispatches events from a thread."""
def __init__(self):
super().__init__()
self.thread = threading.Thread(
target=global_loop,
args=(self.queue,),
name='global-loop')
self.thread.start()
class ManualEventLoop(EventLoop):
"""An event loop that needs to be run explicitly."""
def run(self):
"""Process all events in the queue until empty.
It doesn't block if the queue is empty.
"""
try:
while True:
global_loop_step(self.queue)
except queue.Empty:
return
|
Python
| 0.000002
|
@@ -377,16 +377,28 @@
reading%0A
+import time%0A
import t
@@ -406,16 +406,16 @@
aceback%0A
-
%0A%0Adef _f
@@ -1573,32 +1573,84 @@
et(block=block)%0A
+ if actor is None:%0A raise StopIteration()%0A
try:%0A
@@ -1811,36 +1811,121 @@
ts indefinitely.
-%22%22%22%0A
+%0A%0A To stop the loop put a %60%60(None, None)%60%60 object in the queue.%0A%0A %22%22%22%0A try:%0A
while True:%0A
@@ -1916,32 +1916,36 @@
while True:%0A
+
global_l
@@ -1972,16 +1972,57 @@
k=True)%0A
+ except StopIteration:%0A return%0A
%0A%0Aclass
@@ -2571,16 +2571,79 @@
cls.loop
+%0A%0A def stop(self):%0A %22%22%22Stop the loop.%22%22%22%0A pass
%0A
@@ -2740,24 +2740,128 @@
thread.%22%22%22%0A%0A
+ # Wait a bit before checking the queue for emptyness%0A WAIT_FOR_EMPTY = 0.1 # seconds%0A %0A
def __in
@@ -3035,16 +3035,16 @@
-loop')%0A
-
@@ -3064,16 +3064,317 @@
tart()%0A%0A
+ def stop(self):%0A %22%22%22Shutdown the eventloop.%0A%0A Make sure the queue is empty.%0A %0A %22%22%22%0A while True:%0A time.sleep(self.WAIT_FOR_EMPTY)%0A if self.queue.empty():%0A break%0A self.queue.put((None, None))%0A self.thread.join()%0A%0A
%0Aclass M
|
8872f069b8aebc6c12fc95070181917136a08ac0
|
Add device name validation
|
tcconfig/_common.py
|
tcconfig/_common.py
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import
from __future__ import unicode_literals
import contextlib
import errno
import sys
import logbook
import six
import typepy
import subprocrunner as spr
from ._const import (
KILO_SIZE,
Network,
Tc,
TcCommandOutput,
)
from ._error import NetworkInterfaceNotFoundError
from ._logger import logger
@contextlib.contextmanager
def logging_context(name):
logger.debug("|---- {:s}: {:s} -----".format("start", name))
try:
yield
finally:
logger.debug("----- {:s}: {:s} ----|".format("complete", name))
def is_anywhere_network(network, ip_version):
try:
network = network.strip()
except AttributeError as e:
raise ValueError(e)
if ip_version == 4:
return network == get_anywhere_network(ip_version)
if ip_version == 6:
return network in (
get_anywhere_network(ip_version), "0:0:0:0:0:0:0:0/0")
raise ValueError("invalid ip version: {}".format(ip_version))
def is_execute_tc_command(tc_command_output):
return tc_command_output == TcCommandOutput.NOT_SET
def get_anywhere_network(ip_version):
ip_version_n = typepy.type.Integer(ip_version).try_convert()
if ip_version_n == 4:
return Network.Ipv4.ANYWHERE
if ip_version_n == 6:
return Network.Ipv6.ANYWHERE
raise ValueError("unknown ip version: {}".format(ip_version))
def check_tc_command_installation():
try:
spr.Which("tc").verify()
except spr.CommandNotFoundError as e:
logger.error(e)
sys.exit(errno.ENOENT)
def verify_network_interface(device):
try:
import netifaces
except ImportError:
return
if device not in netifaces.interfaces():
raise NetworkInterfaceNotFoundError(
"network interface not found: {}".format(device))
def sanitize_network(network, ip_version):
"""
:return: Network string
:rtype: str
:raises ValueError: if the network string is invalid.
"""
import ipaddress
if typepy.is_null_string(network) or network.lower() == "anywhere":
return get_anywhere_network(ip_version)
try:
if ip_version == 4:
ipaddress.IPv4Address(network)
return network + "/32"
if ip_version == 6:
return ipaddress.IPv6Address(network).compressed
except ipaddress.AddressValueError:
pass
# validate network str ---
if ip_version == 4:
return ipaddress.IPv4Network(six.text_type(network)).compressed
if ip_version == 6:
return ipaddress.IPv6Network(six.text_type(network)).compressed
raise ValueError("unexpected ip version: {}".format(ip_version))
def run_command_helper(command, error_regexp, message, exception=None):
if logger.level != logbook.DEBUG:
spr.set_logger(is_enable=False)
proc = spr.SubprocessRunner(command)
proc.run()
if logger.level != logbook.DEBUG:
spr.set_logger(is_enable=True)
if proc.returncode == 0:
return 0
match = error_regexp.search(proc.stderr)
if match is None:
logger.error(proc.stderr)
return proc.returncode
if typepy.is_not_null_string(message):
logger.notice(message)
if exception is not None:
raise exception(command)
return proc.returncode
def run_tc_show(subcommand, device):
if subcommand not in Tc.Subcommand.LIST:
raise ValueError("unexpected tc sub command: {}".format(subcommand))
runner = spr.SubprocessRunner(
"tc {:s} show dev {:s}".format(subcommand, device))
runner.run()
return runner.stdout
def _get_original_tcconfig_command(tcconfig_command):
return " ".join([tcconfig_command] + [
command_item for command_item in sys.argv[1:]
if command_item != "--tc-script"
])
def write_tc_script(tcconfig_command, command_history, filename_suffix=None):
import datetime
import io
import os
filename_item_list = [tcconfig_command]
if typepy.is_not_null_string(filename_suffix):
filename_item_list.append(filename_suffix)
script_line_list = [
"#!/bin/sh",
"",
"# tc script file:",
]
if tcconfig_command != Tc.Command.TCSHOW:
script_line_list.extend([
"# the following command sequence lead to equivalent results as",
"# '{:s}'.".format(
_get_original_tcconfig_command(tcconfig_command)),
])
script_line_list.extend([
"# created by {:s} on {:s}.".format(
tcconfig_command,
datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S%z")),
"",
command_history,
])
filename = "_".join(filename_item_list) + ".sh"
with io.open(filename, "w", encoding="utf8") as fp:
fp.write("\n".join(script_line_list) + "\n")
os.chmod(filename, 0o755)
logger.info("written a tc script to '{:s}'".format(filename))
def get_iproute2_upper_limite_rate():
"""
:return: Upper bandwidth rate limit of iproute2 [Kbps].
:rtype: int
"""
from ._converter import Humanreadable
# upper bandwidth rate limit of iproute2 was 34,359,738,360
# bits per second older than 3.14.0
# http://git.kernel.org/cgit/linux/kernel/git/shemminger/iproute2.git/commit/?id=8334bb325d5178483a3063c5f06858b46d993dc7
return Humanreadable(
"32G", kilo_size=KILO_SIZE).to_kilo_bit()
def read_iface_speed(tc_device):
with open("/sys/class/net/{:s}/speed".format(tc_device)) as f:
return int(f.read().strip())
def get_no_limit_kbits(tc_device):
if typepy.is_null_string(tc_device):
return get_iproute2_upper_limite_rate()
try:
speed_value = read_iface_speed(tc_device)
except IOError:
return get_iproute2_upper_limite_rate()
if speed_value < 0:
# default to the iproute2 upper limit when speed value is -1 in
# paravirtualized network interfaces
return get_iproute2_upper_limite_rate()
return min(
speed_value * KILO_SIZE,
get_iproute2_upper_limite_rate())
|
Python
| 0.000001
|
@@ -3593,16 +3593,126 @@
mand))%0A%0A
+ if typepy.is_null_string(device):%0A raise ValueError(%22device must be a string: %7B%7D%22.format(device))%0A%0A
runn
|
53d37567d592ba47d1918e943a63271be02cfe9c
|
Refactor patching of query
|
test/spambl_test.py
|
test/spambl_test.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import unittest
from spambl import DNSBL, UnknownCodeError, NXDOMAIN, HpHosts, DNSBLService
from mock import Mock, patch
from ipaddress import ip_address as IP
from itertools import cycle
from __builtin__ import classmethod
hostnames = 't1.pl', 't2.com', 't3.com.pl'
ips = IP(u'255.255.0.1'), IP(u'2001:DB8:abc:123::42')
host_collection = Mock()
host_collection.ips = ips
host_collection.hostnames = hostnames
empty_host_collection = Mock()
empty_host_collection.ips = ()
empty_host_collection.hostnames = ()
class DNSBLTest(unittest.TestCase):
code_item_class = {1: 'Class #1', 2: 'Class #2'}
query_suffix = 'query.suffix'
@classmethod
def setUpDNSBLInstance(cls):
''' Create DNSBL instance used for testing '''
cls.dnsbl = DNSBL('test.dnsbl', cls.query_suffix, cls.code_item_class, True, True)
@classmethod
def setUpQueryPatch(cls):
''' Patch query function in spambl module
The query function was originally imported from dns.resolver module
'''
cls.patcher = patch('spambl.query')
cls.mocked_query = cls.patcher.start()
def setUpQuerySideEffect(self, nxdomain = False):
''' Set up side effect of patched query
:param nxdomain: if True, the side effect will be raising NXDOMAIN exception, otherwise
it will be an iterator cycling through supported return values
'''
side_effects = []
for n in self.code_item_class:
m = Mock()
m.to_text.side_effect = '127.0.0.%d' % n
side_effects.append([m])
self.mocked_query.side_effect = NXDOMAIN('test NXDOMAIN exception') if nxdomain else cycle(side_effects)
@classmethod
def setUpClass(cls):
cls.setUpDNSBLInstance()
cls.setUpQueryPatch()
def testGetClassification(self):
''' Test get_classification method of DNSBL instance '''
msg = 'The expected value {} is not equal to received value {}'
for key, value in self.code_item_class.iteritems():
actual = self.dnsbl.get_classification(key)
self.assertEqual(actual, value, msg.format(value, actual))
self.assertRaises(UnknownCodeError, self.dnsbl.get_classification, 4)
def testContainsAny(self):
self.setUpQuerySideEffect()
self.assertTrue(self.dnsbl.contains_any(host_collection))
self.setUpQuerySideEffect(True)
self.assertFalse(self.dnsbl.contains_any(empty_host_collection))
self.assertFalse(self.dnsbl.contains_any(host_collection))
def testLookup(self):
self.setUpQuerySideEffect()
actual_host_strings = [h.host for h in self.dnsbl.lookup(host_collection)]
expected_host_strings = [n for n in ips + hostnames]
self.assertSequenceEqual(actual_host_strings, expected_host_strings)
self.setUpQuerySideEffect(True)
self.assertSequenceEqual(self.dnsbl.lookup(empty_host_collection), [])
self.assertSequenceEqual(self.dnsbl.lookup(host_collection), [])
@classmethod
def tearDownClass(cls):
cls.patcher.stop()
class HpHostsTest(unittest.TestCase):
''' Tests HpHosts methods '''
classification = '[TEST]'
@classmethod
def setUpClass(cls):
cls.hp_hosts = HpHosts('spambl_test_suite')
cls.patcher = patch('spambl.get')
cls.mocked_get = cls.patcher.start()
def prepareGetReturnValue(self, listed, classification = False):
''' Set up return value of get
:param listed: if True, the content will contain 'Listed' string, else it will contain 'Not listed'
:param classification: if True, a classification will be added to the content
'''
if listed:
c = self.classification if classification else ''
content = ','.join(('Listed', c))
else:
content = 'Not listed'
self.mocked_get.return_value.content = content
def testContains(self):
''' Test __contains__ method '''
for listed in True, False:
self.prepareGetReturnValue(listed)
for k in ips:
self.assertEqual(k in self.hp_hosts, listed)
def testLookup(self):
''' Test lookup method'''
self.prepareGetReturnValue(True, True)
for host in ips + hostnames:
self.assertEqual(self.hp_hosts.lookup(host).host, host)
self.prepareGetReturnValue(False)
for host in ips + hostnames:
self.assertEqual(self.hp_hosts.lookup(host), None)
@classmethod
def tearDownClass(cls):
cls.patcher.stop()
class DNSBLServiceTest(unittest.TestCase):
code_item_class = {1: 'Class #1', 2: 'Class #2'}
@classmethod
def setUpClass(cls):
cls.dnsbl_service = DNSBLService('test_service', 'test.suffix', cls.code_item_class, True, True)
def testGetClassification(self):
''' Test get_classification method of DNSBL instance '''
for key, value in self.code_item_class.iteritems():
actual = self.dnsbl_service.get_classification(key)
self.assertEqual(actual, value)
self.assertRaises(UnknownCodeError, self.dnsbl_service.get_classification, 4)
@patch('spambl.query')
def testQuery(self, mocked_query):
''' Test query method
The method is tested against a set of host values, which are expected to be recognized
as spam or not, depending on configuration of side effect of mocked query function.
:param mocked_query: a patched instance of query function
'''
inverted_ips = '1.0.255.255', '2.4.0.0.0.0.0.0.0.0.0.0.0.0.0.0.3.2.1.0.c.b.a.0.8.b.d.0.1.0.0.2'
values = hostnames + inverted_ips
side_effects = []
for n in self.code_item_class:
m = Mock()
m.to_text.return_value = '127.0.0.%d' % n
side_effects.append([m])
mocked_query.side_effect = cycle(side_effects)
return_code_iterator = cycle(self.code_item_class.keys())
for v in values:
self.assertEqual(self.dnsbl_service.query(v), next(return_code_iterator))
mocked_query.side_effect = NXDOMAIN('test NXDOMAIN exception')
for v in values:
self.assertEqual(self.dnsbl_service.query(v), None)
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()
|
Python
| 0.000001
|
@@ -5232,32 +5232,132 @@
True)%0A %0A
+ cls.patcher = patch('spambl.query')%0A cls.mocked_query = cls.patcher.start()%0A %0A
def testGetC
@@ -5727,35 +5727,8 @@
%0A
- @patch('spambl.query')%0A
@@ -5749,22 +5749,8 @@
self
-, mocked_query
):%0A
@@ -6418,32 +6418,37 @@
%0A
+self.
mocked_query.sid
@@ -6690,16 +6690,21 @@
+self.
mocked_q
@@ -6848,32 +6848,117 @@
query(v), None)%0A
+ %0A @classmethod%0A def tearDownClass(cls):%0A cls.patcher.stop()%0A
%0Aif __na
|
9f8b3afec4f3002eaca53437ca0e4915d6da01a6
|
Fix DNSBLServiceTest
|
test/spambl_test.py
|
test/spambl_test.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import unittest
from spambl import DNSBL, UnknownCodeError, NXDOMAIN, HpHosts, DNSBLService
from mock import Mock, patch
from ipaddress import ip_address as IP
from itertools import cycle
from __builtin__ import classmethod
hostnames = 't1.pl', 't2.com', 't3.com.pl'
ips = IP(u'255.255.0.1'), IP(u'2001:DB8:abc:123::42')
host_collection = Mock()
host_collection.ips = ips
host_collection.hostnames = hostnames
empty_host_collection = Mock()
empty_host_collection.ips = ()
empty_host_collection.hostnames = ()
class DNSBLTest(unittest.TestCase):
code_item_class = {1: 'Class #1', 2: 'Class #2'}
query_suffix = 'query.suffix'
@classmethod
def setUpDNSBLInstance(cls):
''' Create DNSBL instance used for testing '''
cls.dnsbl = DNSBL('test.dnsbl', cls.query_suffix, cls.code_item_class, True, True)
@classmethod
def setUpQueryPatch(cls):
''' Patch query function in spambl module
The query function was originally imported from dns.resolver module
'''
cls.patcher = patch('spambl.query')
cls.mocked_query = cls.patcher.start()
def setUpQuerySideEffect(self, nxdomain = False):
''' Set up side effect of patched query
:param nxdomain: if True, the side effect will be raising NXDOMAIN exception, otherwise
it will be an iterator cycling through supported return values
'''
side_effects = []
for n in self.code_item_class:
m = Mock()
m.to_text.side_effect = '127.0.0.%d' % n
side_effects.append([m])
self.mocked_query.side_effect = NXDOMAIN('test NXDOMAIN exception') if nxdomain else cycle(side_effects)
@classmethod
def setUpClass(cls):
cls.setUpDNSBLInstance()
cls.setUpQueryPatch()
def testGetClassification(self):
''' Test get_classification method of DNSBL instance '''
msg = 'The expected value {} is not equal to received value {}'
for key, value in self.code_item_class.iteritems():
actual = self.dnsbl.get_classification(key)
self.assertEqual(actual, value, msg.format(value, actual))
self.assertRaises(UnknownCodeError, self.dnsbl.get_classification, 4)
def testContainsAny(self):
self.setUpQuerySideEffect()
self.assertTrue(self.dnsbl.contains_any(host_collection))
self.setUpQuerySideEffect(True)
self.assertFalse(self.dnsbl.contains_any(empty_host_collection))
self.assertFalse(self.dnsbl.contains_any(host_collection))
def testLookup(self):
self.setUpQuerySideEffect()
actual_host_strings = [h.host for h in self.dnsbl.lookup(host_collection)]
expected_host_strings = [n for n in ips + hostnames]
self.assertSequenceEqual(actual_host_strings, expected_host_strings)
self.setUpQuerySideEffect(True)
self.assertSequenceEqual(self.dnsbl.lookup(empty_host_collection), [])
self.assertSequenceEqual(self.dnsbl.lookup(host_collection), [])
@classmethod
def tearDownClass(cls):
cls.patcher.stop()
class HpHostsTest(unittest.TestCase):
''' Tests HpHosts methods '''
classification = '[TEST]'
@classmethod
def setUpClass(cls):
cls.hp_hosts = HpHosts('spambl_test_suite')
cls.patcher = patch('spambl.get')
cls.mocked_get = cls.patcher.start()
def prepareGetReturnValue(self, listed, classification = False):
''' Set up return value of get
:param listed: if True, the content will contain 'Listed' string, else it will contain 'Not listed'
:param classification: if True, a classification will be added to the content
'''
if listed:
c = self.classification if classification else ''
content = ','.join(('Listed', c))
else:
content = 'Not listed'
self.mocked_get.return_value.content = content
def testContains(self):
''' Test __contains__ method '''
for listed in True, False:
self.prepareGetReturnValue(listed)
for k in ips:
self.assertEqual(k in self.hp_hosts, listed)
def testLookup(self):
''' Test lookup method'''
self.prepareGetReturnValue(True, True)
for host in ips + hostnames:
self.assertEqual(self.hp_hosts.lookup(host).host, host)
self.prepareGetReturnValue(False)
for host in ips + hostnames:
self.assertEqual(self.hp_hosts.lookup(host), None)
@classmethod
def tearDownClass(cls):
cls.patcher.stop()
class DNSBLServiceTest(unittest.TestCase):
code_item_class = {1: 'Class #1', 2: 'Class #2'}
@classmethod
def setUpClass(cls):
cls.dnsbl_service = DNSBL('test_service', 'test.suffix', cls.code_item_class, True, True)
def testGetClassification(self):
''' Test get_classification method of DNSBL instance '''
for key, value in self.code_item_class.iteritems():
actual = self.dnsbl_service.get_classification(key)
self.assertEqual(actual, value)
self.assertRaises(UnknownCodeError, self.dnsbl_service.get_classification, 4)
@patch('spambl.query')
def queryTest(self, mocked_query):
inverted_ips = '1.0.255.255', '2.4.0.0.0.0.0.0.0.0.0.0.0.0.0.0.3.2.1.0.c.b.a.0.8.b.d.0.1.0.0.2'
return_codes = cycle(self.code_item_class.keys())
mocked_query.side_effect = return_codes
self.assertEqual('test.com', next(return_codes))
self.assertEqual('1.0.0.127', next(return_codes))
mocked_query.side_effect = NXDOMAIN('test NXDOMAIN exception')
self.assertEqual('test.com', None)
self.assertEqual('1.0.0.127', None)
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()
|
Python
| 0
|
@@ -5155,24 +5155,31 @@
vice = DNSBL
+Service
('test_servi
|
dde25723c845ad4f6df72729e76174210b16e71c
|
remove redundant code
|
test/test_client.py
|
test/test_client.py
|
import pytest
from pyrelatics.client import *
def test_relaticsapi_raise_exception_with_dummy_url():
with pytest.raises(URLError):
relaticsapi = RelaticsAPI('dummy_company', 'dummy_env_id', 'dummy_wid')
def test_relaticsapi_initializes_properties():
relaticsapi = RelaticsAPI('kb', 'dummy_env_id', 'dummy_wid')
assert relaticsapi.environment_id == 'dummy_env_id'
assert relaticsapi.workspace_id == 'dummy_wid'
assert relaticsapi.__repr__() != ''
def test_relaticsapi_login_returns_token_or_falsy_message():
relaticsapi = RelaticsAPI('kb', 'dummy_env_id', 'dummy_wid')
assert type(relaticsapi.login('dummy_name', 'dummy_password')) != str
with pytest.raises(AttributeError):
relaticsapi.CreateInstancelement('asdas')
def test_relaticsapi_login_dummy_token_raises_exeption():
relaticsapi = RelaticsAPI('kb', 'dummy_env_id', 'dummy_wid')
with pytest.raises(RelaticsException):
relaticsapi.CreateInstanceElement('asdas')
def test_get_result():
relaticsapi = RelaticsAPI('kb', 'dummy_env_id', 'dummy_wid')
relaticsapi.token = '123123'
assert isinstance(relaticsapi.GetResult('dummy_operation', 'dummy_entry_code'), object)
def test_invoke_method_string():
relaticsapi = RelaticsAPI('kb', 'dummy_env_id', 'dummy_wid')
relaticsapi.token = '123123'
assert isinstance(relaticsapi.CreateInstanceElement('dummyCOR'), object)
def test_invoke_method_tuple():
relaticsapi = RelaticsAPI('kb', 'dummy_env_id', 'dummy_wid')
relaticsapi.token = '123123'
assert isinstance(relaticsapi.CreateInstanceRelation(('dummyR1', 'dummyR2', 'dummyRR')), object)
def test_Import():
relaticsapi = RelaticsAPI('kb', 'dummy_env_id', 'dummy_wid')
relaticsapi.token = '123123'
assert isinstance(relaticsapi.Import('dummy_operation', 'dummy', data=[]), object)
|
Python
| 0.999999
|
@@ -138,30 +138,16 @@
%0A
- relaticsapi =
Relatic
|
9fa55bc43a3f83a57318799ba8b9f2769676bd44
|
Include the tags module tests in the full library testsuite.
|
test/test_flvlib.py
|
test/test_flvlib.py
|
import unittest
import test_primitives, test_astypes, test_helpers
def get_suite():
modules = (test_primitives, test_astypes, test_helpers)
suites = [unittest.TestLoader().loadTestsFromModule(module) for
module in modules]
return unittest.TestSuite(suites)
def main():
unittest.TextTestRunner(verbosity=2).run(get_suite())
if __name__ == "__main__":
main()
|
Python
| 0
|
@@ -59,16 +59,27 @@
_helpers
+, test_tags
%0A%0Adef ge
@@ -147,16 +147,27 @@
_helpers
+, test_tags
)%0A su
|
1598a865094591cbfd1e4e37eddb905fffd1d9b0
|
improve and extend unit tests for Logfile
|
test/test_parser.py
|
test/test_parser.py
|
# This file is part of cclib (http://cclib.github.io), a library for parsing
# and interpreting the results of computational chemistry packages.
#
# Copyright (C) 2015, the cclib development team
#
# The library is free software, distributed under the terms of
# the GNU Lesser General Public version 2.1 or later. You should have
# received a copy of the license along with cclib. You can also access
# the full license online at http://www.gnu.org/copyleft/lgpl.html.
"""Run parser unit tests for cclib."""
from __future__ import print_function
import unittest
import numpy
import cclib
class LogfileTest(unittest.TestCase):
"""Unit tests for Logfile class."""
logfile = cclib.parser.logfileparser.Logfile('')
def test_float(self):
self.assertTrue(self.logfile.float("1.0"), 1.0)
self.assertTrue(self.logfile.float("1.2345E+02"), 123.45)
self.assertTrue(self.logfile.float("1.2345D+02"), 123.45)
self.assertTrue(self.logfile.float("*****"), numpy.nan)
if __name__ == "__main__":
unittest.main()
|
Python
| 0
|
@@ -760,79 +760,256 @@
-self.assertTrue(self.logfile.float(%221.0%22), 1.0)%0A self.assertTrue
+%22%22%22Are floats converted from strings correctly?%22%22%22%0A self.assertEqual(self.logfile.float(%220.0%22), 0.0)%0A self.assertEqual(self.logfile.float(%221.0%22), 1.0)%0A self.assertEqual(self.logfile.float(%22-1.0%22), -1.0)%0A self.assertEqual
(sel
@@ -1058,36 +1058,37 @@
self.assert
-True
+Equal
(self.logfile.fl
@@ -1138,16 +1138,28 @@
ertTrue(
+numpy.isnan(
self.log
@@ -1175,25 +1175,236 @@
t(%22*
+%22)))%0A self.assertTrue(numpy.isnan(self.logfile.float(%22
****
+*
%22)
-, numpy.nan
+))%0A%0A def test_normalisesym(self):%0A %22%22%22Does this method return ERROR in base class?%22%22%22%0A self.assertTrue(%22ERROR%22 in self.logfile.normalisesym(%22%22)
)%0A%0A%0A
|
5fa101cfa8f311079742552f451e1194d975bb62
|
test ttlser drop the versioninfo from the last line to make comparison to good.ttl simpler
|
test/test_ttlser.py
|
test/test_ttlser.py
|
import inspect
import os
import random
import rdflib
import re
import subprocess
import sys
import unittest
rdflib.plugin.register('nifttl', rdflib.serializer.Serializer, 'pyontutils.ttlser', 'CustomTurtleSerializer')
class TestTtlser(unittest.TestCase):
def setUp(self):
goodpath = 'test/good.ttl'
self.badpath = 'test/nasty.ttl'
actualpath = 'test/actual.ttl'
self.actualpath2 = 'test/actual2.ttl'
with open(goodpath, 'rb') as f:
self.good = f.read()
self.actual = self.serialize()
with open(actualpath, 'wb') as f:
f.write(self.actual)
def make_ser(self):
header = ('import rdflib\n'
'import sys\n'
"rdflib.plugin.register('nifttl', rdflib.serializer.Serializer, 'pyontutils.ttlser', 'CustomTurtleSerializer')\n"
'class Thing:\n'
' badpath = \'%s\'\n') % self.badpath
src = inspect.getsource(self.serialize)
after = 't = Thing()\nsys.stdout.buffer.write(t.serialize())\n'
return header + src + after
def serialize(self):
graph = rdflib.Graph()
graph.parse(self.badpath, format='turtle')
actual = graph.serialize(format='nifttl')
return actual
def test_ser(self):
assert self.actual == self.good
def test_deterministic(self):
nofail = True
env = os.environ.copy()
seed = None # 'random'
for _ in range(10):
if seed is not None:
env['PYTHONHASHSEED'] = str(seed)
else:
env.pop('PYTHONHASHSEED', None)
cmd_line = [sys.executable, '-c', self.make_ser()]
p = subprocess.Popen(cmd_line, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
env=env)
out, err = p.communicate()
out = re.sub(br"\[\d+ refs, \d+ blocks\]\r?\n?", b"", out) # nose can't import strip_python_stderr from any test submodule :/
out = out.split(b'\n', 1)[1]
actual2 = out
if self.actual != actual2:
print('Determinism failure!')
nofail = False
with open(self.actualpath2, 'wb') as f:
f.write(actual2)
break
assert nofail
|
Python
| 0
|
@@ -1264,16 +1264,79 @@
ifttl')%0A
+ actual = actual.rsplit(b'%5Cn',2)%5B0%5D # drop versioninfo%0A
|
2250fcaefc1b69116684c72c559a44ee1d6721b6
|
change component count back to 4 in dp 2-cluster test
|
test_dp_2cluster.py
|
test_dp_2cluster.py
|
from dpconverge.data_set import DataSet
from sklearn.datasets.samples_generator import make_blobs
n_features = 2
points_per_feature = 100
centers = [[2, 2], [4, 4]]
ds = DataSet(parameter_count=2)
for i, center in enumerate(centers):
X, y = make_blobs(
n_samples=points_per_feature,
n_features=n_features,
centers=center,
cluster_std=0.3,
random_state=5
)
ds.add_blob(i, X)
ds.plot(ds.classifications, x_lim=[0, 6], y_lim=[0, 6])
ds.cluster(
component_count=2,
burn_in=2,
iteration_count=50,
random_seed=123
)
ds.plot_iteration_traces(0)
ds.plot_iteration_traces(1)
ds.plot_iteration_traces(2)
ds.plot_iteration_traces(3)
|
Python
| 0.000001
|
@@ -513,17 +513,17 @@
t_count=
-2
+4
,%0A bu
|
3ded2f462c859542b871c1311ce3b801f41370e7
|
use get_model from apps
|
adhocracy4/api/mixins.py
|
adhocracy4/api/mixins.py
|
from django.conf import settings
from django.contrib.contenttypes.models import ContentType
from django.db.models.loading import get_model
from django.http import Http404
from django.shortcuts import get_object_or_404
from adhocracy4.modules import models as module_models
class ContentTypeMixin:
"""
Should be used in combination with ContentTypeRouter to fetch the
decode content_type and object_pk of an request.
Currently only numeric object_pk are supported.
"""
content_type_filter = []
def dispatch(self, request, *args, **kwargs):
content_type = kwargs.get('content_type', '')
object_pk = kwargs.get('object_pk', '')
if not content_type.isdigit() or not object_pk.isdigit():
raise Http404
else:
self.content_type_id = int(content_type)
self.object_pk = int(object_pk)
current_ct_strs = (
self.content_type.app_label,
self.content_type.model,
)
if current_ct_strs not in self.content_type_filter:
raise Http404
return super().dispatch(request, *args, **kwargs)
@property
def content_type(self):
try:
return ContentType.objects.get_for_id(self.content_type_id)
except ContentType.DoesNotExist:
raise Http404
@property
def content_object(self):
return get_object_or_404(
self.content_type.model_class(),
pk=self.object_pk
)
class ModuleMixin:
"""
Should be used in combination with ModuleRouter to fetch the module.
"""
def dispatch(self, request, *args, **kwargs):
self.module_pk = kwargs.get('module_pk', '')
return super().dispatch(request, *args, **kwargs)
@property
def module(self):
return get_object_or_404(
module_models.Module,
pk=self.module_pk
)
class OrganisationMixin:
"""
Should be used in combination with OrganisationRouter to fetch the
organisation.
"""
def dispatch(self, request, *args, **kwargs):
self.organisation_pk = kwargs.get('organisation_pk', '')
return super().dispatch(request, *args, **kwargs)
@property
def organisation(self):
return get_object_or_404(
get_model(settings.A4_ORGANISATIONS_MODEL),
pk=self.organisation_pk
)
|
Python
| 0.000001
|
@@ -1,20 +1,49 @@
+from django.apps import apps%0A
from django.conf imp
@@ -118,54 +118,8 @@
ype%0A
-from django.db.models.loading import get_model
%0Afro
@@ -2288,16 +2288,21 @@
+apps.
get_mode
|
97f07316471f28d9f56987837accae01f121c180
|
raise Exception if vBNG service fails to return a routeable_subnet
|
xos/observers/vbng/steps/sync_vbngtenant.py
|
xos/observers/vbng/steps/sync_vbngtenant.py
|
import os
import requests
import socket
import sys
import base64
from django.db.models import F, Q
from xos.config import Config
from observer.syncstep import SyncStep
from observer.ansible import run_template_ssh
from core.models import Service
from cord.models import VCPEService, VCPETenant, VBNGTenant, VBNGService
from hpc.models import HpcService, CDNPrefix
from util.logger import Logger, logging
# VBNG_API = "http://10.0.3.136:8181/onos/virtualbng/privateip/"
# hpclibrary will be in steps/..
parentdir = os.path.join(os.path.dirname(__file__),"..")
sys.path.insert(0,parentdir)
logger = Logger(level=logging.INFO)
class SyncVBNGTenant(SyncStep):
provides=[VCPETenant]
observes=VCPETenant
requested_interval=0
def __init__(self, **args):
SyncStep.__init__(self, **args)
def fetch_pending(self, deleted):
if (not deleted):
objs = VBNGTenant.get_tenant_objects().filter(Q(enacted__lt=F('updated')) | Q(enacted=None),Q(lazy_blocked=False))
else:
objs = VBNGTenant.get_deleted_tenant_objects()
return objs
def defer_sync(self, o, reason):
logger.info("defer object %s due to %s" % (str(o), reason))
raise Exception("defer object %s due to %s" % (str(o), reason))
def get_vbng_service(self, o):
if not o.provider_service:
raise Exception("vBNG tenant %s has no provider_service" % str(o.id))
services = VBNGService.get_service_objects().filter(id = o.provider_service.id)
if not services:
raise Exception("vBNG tenant %s is associated with the wrong kind of provider_service" % str(o.id))
return services[0]
def get_vbng_url(self, o):
service = o.get_vbng_service()
if not service.vbng_url:
raise Exception("vBNG service does not have vbng_url set")
return service.vbng_url
def get_private_interface(self, o):
vcpes = VCPETenant.get_tenant_objects().all()
vcpes = [x for x in vcpes if (x.vbng is not None) and (x.vbng.id == o.id)]
if not vcpes:
raise Exception("No vCPE tenant is associated with vBNG %s" % str(o.id))
if len(vcpes)>1:
raise Exception("More than one vCPE tenant is associated with vBNG %s" % str(o.id))
vcpe = vcpes[0]
sliver = vcpe.sliver
if not sliver:
raise Exception("No sliver associated with vBNG %s" % str(o.id))
if not vcpe.wan_ip:
self.defer_sync(o, "does not have a WAN IP yet")
if not vcpe.wan_mac:
# this should never happen; WAN MAC is computed from WAN IP
self.defer_sync(o, "does not have a WAN MAC yet")
return (vcpe.wan_ip, vcpe.wan_mac, vcpe.sliver.node.name)
def sync_record(self, o):
logger.info("sync'ing VBNGTenant %s" % str(o))
if not o.routeable_subnet:
(private_ip, private_mac, private_hostname) = self.get_private_interface(o)
logger.info("contacting vBNG service to request mapping for private ip %s mac %s host %s" % (private_ip, private_mac, private_hostname) )
url = self.get_vbng_url(o) + "privateip/%s/%s/%s" % (private_ip, private_mac, private_hostname)
logger.info( "vbng url: %s" % url )
r = requests.post(url )
if (r.status_code != 200):
raise Exception("Received error from bng service (%d)" % r.status_code)
logger.info("received public IP %s from private IP %s" % (r.text, private_ip))
o.routeable_subnet = r.text
o.mapped_ip = private_ip
o.mapped_mac = private_mac
o.mapped_hostname = private_hostname
o.save()
def delete_record(self, o):
logger.info("deleting VBNGTenant %s" % str(o))
if o.mapped_ip:
private_ip = o.mapped_ip
logger.info("contacting vBNG service to delete private ip %s" % private_ip)
r = requests.delete(self.get_vbng_url(o) + "privateip/%s" % private_ip, )
if (r.status_code != 200):
raise Exception("Received error from bng service (%d)" % r.status_code)
|
Python
| 0.000001
|
@@ -1722,17 +1722,20 @@
rvice =
-o
+self
.get_vbn
@@ -1744,16 +1744,17 @@
service(
+o
)%0A
@@ -3536,16 +3536,151 @@
te_ip))%0A
+%0A if r.text == %220%22:%0A raise Exception(%22VBNG service failed to return a routeable_subnet (probably ran out)%22)%0A%0A
|
59b9e41ebe70df35902a7d328ad879bd988aa7c0
|
remove getServerList -- it belongs to VLDbDAO
|
afs/dao/FileServerDAO.py
|
afs/dao/FileServerDAO.py
|
import re,string,os,sys
import afs.dao.bin
from afs.model.FileServer import FileServer
from afs.model.Partition import Partition
from afs.util import afsutil
class FileServerDAO() :
"""
Provides Information about a FileServer
"""
def __init__(self) :
pass
def getServer(self,servername,cellname):
"""
List of Servers
"""
CmdList=[afs.dao.bin.VOSBIN,"listaddrs", "-host","%s" % servername, "-printuuid", "-cell","%s" % cellname ]
rc,output,outerr=afs.dao.bin.execute(CmdList,dryrun=0,lethal=1)
if rc :
return rc,output,outerr
server = FileServer()
for i in range (0,len(output)) :
if output[i].startswith("UUID:"):
splits = output[i].split()
server.uuid = splits[1]
i = i +1
server.name = output[i]
return server
def getServerList(self, cellname):
"""
List of Servers
"""
CmdList=[afs.dao.bin.VOSBIN,"listaddrs", "-printuuid", "-cell","%s" % cellname ]
rc,output,outerr=afs.dao.bin.execute(CmdList,dryrun=0,lethal=1)
if rc :
return rc,output,outerr
serverList = []
for i in range (0,len(output)) :
if output[i].startswith("UUID:"):
server = FileServer()
splits = output[i].split()
server.uuid = splits[1]
i = i +1
server.name = output[i]
serverList.append(server)
return serverList
def getPartList(self, servername, cellname) :
"""
return attribute Partitions
"""
RX=re.compile("Free space on partition /vicep(\S+): (\d+) K blocks out of total (\d+)")
CmdList=[afs.dao.bin.VOSBIN,"partinfo", "%s" % servername, "-cell","%s" % cellname]
rc,output,outerr=afs.dao.bin.execute(CmdList,dryrun=0,lethal=1)
if rc :
return rc,output,outerr
partitions= []
for line in output :
m=RX.match(line)
if not m :
return rc,"Error parsing output %s" % line
part = Partition()
part.name, part.free, part.total=m.groups()
part.name = afsutil.canonicalizePartition(part.name)
part.used = long(part.total)-long(part.free)
partitions.append(part)
return partitions
def getVolIdList(self, part, server, cell):
"""
return Volumes in partitions
"""
RX=re.compile("^(\d+)")
if part:
CmdList=[afs.dao.bin.VOSBIN,"listvol", "-server", "%s" % server, "-partition", "%s" % part ,"-fast" , "-cell","%s" % cell]
rc,output,outerr=afs.dao.bin.execute(CmdList,dryrun=0,lethal=1)
if rc :
return rc,output,outerr
volIds = {}
for line in output :
m=RX.match(line)
if m :
vid = m.groups()
volIds[vid] = vid
return volIds
def getServerByName(self, name, cell):
pass
def getServerByUUID(self, uuid, cell):
pass
def getServerByIP(self, ip, cell):
pass
|
Python
| 0.000002
|
@@ -153,17 +153,16 @@
fsutil%0A%0A
-%0A
class Fi
@@ -201,36 +201,30 @@
-Provides Information about a
+low level access to a
Fil
@@ -302,782 +302,8 @@
%0A
- def getServer(self,servername,cellname):%0A %22%22%22%0A List of Servers%0A %22%22%22%0A %0A CmdList=%5Bafs.dao.bin.VOSBIN,%22listaddrs%22, %22-host%22,%22%25s%22 %25 servername, %22-printuuid%22, %22-cell%22,%22%25s%22 %25 cellname %5D%0A rc,output,outerr=afs.dao.bin.execute(CmdList,dryrun=0,lethal=1)%0A if rc :%0A return rc,output,outerr%0A %0A server = FileServer()%0A for i in range (0,len(output)) :%0A if output%5Bi%5D.startswith(%22UUID:%22):%0A splits = output%5Bi%5D.split()%0A server.uuid = splits%5B1%5D%0A i = i +1%0A server.name = output%5Bi%5D %0A %0A return server%0A %0A
|
22800afbb6186c45dc806c933fcb502108a92251
|
Update airbrake client for Django 2.0
|
airbrake/utils/client.py
|
airbrake/utils/client.py
|
from django.conf import settings
from django.core.urlresolvers import resolve
import sys
from six.moves import urllib
import traceback
from lxml import etree
class Client(object):
API_URL = '%s://airbrake.io/notifier_api/v2/notices'
ERRORS = {
403: "Cannot use SSL",
422: "Invalid XML sent to Airbrake",
500: "Airbrake has braked too hard",
}
DEFAULTS = {
'TIMEOUT': 5,
'USE_SSL': False,
}
@property
def url(self):
scheme = 'http'
if self.settings.get('USE_SSL', False):
scheme = 'https'
if 'API_URL' in self.settings:
url = self.settings['API_URL'] + '/notifier_api/v2/notices'
else:
url = Client.API_URL % scheme
return url
@property
def settings(self):
if getattr(self, '_settings', None):
return self._settings
self._settings = Client.DEFAULTS
self._settings.update(getattr(settings, 'AIRBRAKE', {}))
return self._settings
def notify(self, exception=None, request=None):
headers = {
'Content-Type': 'text/xml'
}
payload = self._generate_xml(exception=exception, request=request)
req = urllib.request.Request(self.url, payload.encode('utf8'), headers)
resp = urllib.request.urlopen(req, timeout=self.settings['TIMEOUT'])
status = resp.getcode()
if status == 200:
return True
elif status in Client.ERRORS:
raise Exception(Client.ERRORS[status])
def _generate_xml(self, exception=None, request=None):
_,_,trace = sys.exc_info()
notice_em = etree.Element('notice', version='2.0')
tb = traceback.extract_tb(trace)
api_key = etree.SubElement(notice_em, 'api-key').text = self.settings['API_KEY']
notifier_em = etree.SubElement(notice_em, 'notifier')
etree.SubElement(notifier_em, 'name').text = 'django-airbrake'
etree.SubElement(notifier_em, 'version').text = '0.0.2'
etree.SubElement(notifier_em, 'url').text = 'http://example.com'
if request:
request_em = etree.SubElement(notice_em, 'request')
if request.is_secure():
scheme = 'https'
else:
scheme = 'http'
url = '%s://%s%s' % (scheme, request.get_host(),
request.get_full_path())
etree.SubElement(request_em, 'url').text = str(url)
cb,_,_ = resolve(request.path)
etree.SubElement(request_em, 'component').text = str(cb.__module__)
etree.SubElement(request_em, 'action').text = str(cb.__name__)
if len(request.POST):
params_em = etree.SubElement(request_em, 'params')
for key, val in request.POST.items():
var = etree.SubElement(params_em, 'var')
var.set('key', str(key))
var.text = str(val)
session = request.session.items()
if len(session):
session_em = etree.SubElement(request_em, 'session')
for key, val in session:
var = etree.SubElement(session_em, 'var')
var.set('key', str(key))
var.text = str(val)
if exception:
error_em = etree.SubElement(notice_em, 'error')
etree.SubElement(error_em, 'class').text = str(exception.__class__.__name__)
etree.SubElement(error_em, 'message').text = str(exception)
backtrace_em = etree.SubElement(error_em, 'backtrace')
for line in tb:
etree.SubElement(backtrace_em, 'line',
file=str(line[0]),
number=str(line[1]),
method=str(line[2]))
env_em = etree.SubElement(notice_em, 'server-environment')
etree.SubElement(env_em, 'environment-name').text = self.settings.get('ENVIRONMENT', 'development')
return '<?xml version="1.0" encoding="UTF-8"?>%s' % etree.tostring(notice_em, encoding="unicode")
|
Python
| 0
|
@@ -30,53 +30,8 @@
ngs%0A
-from django.core.urlresolvers import resolve%0A
impo
@@ -83,16 +83,16 @@
aceback%0A
-
from lxm
@@ -106,16 +106,153 @@
t etree%0A
+try:%0A%C2%A0%C2%A0%C2%A0 # Django 2.0%0A%C2%A0%C2%A0%C2%A0 from django.urls.base import resolve%0Aexcept:%0A%C2%A0%C2%A0%C2%A0 # Django 1.0%0A%C2%A0%C2%A0%C2%A0 from django.core.urlresolvers import resolve%0A
%0A%0Aclass
|
2ee45754c73a344d2cdbc0007a5a7877ba45288e
|
improve output, calculate frequencies at non-singletons
|
scripts/calculate_methylation_frequency.py
|
scripts/calculate_methylation_frequency.py
|
#! /usr/bin/env python
import math
import sys
import csv
import argparse
from collections import namedtuple
class SiteStats:
def __init__(self):
self.num_reads = 0
self.posterior_methylated = 0
self.called_sites = 0
self.called_sites_methylated = 0
parser = argparse.ArgumentParser( description='Calculate methylation frequency at genomic CpG sites')
parser.add_argument('-c', '--call-threshold', type=float, required=False, default=0)
parser.add_argument('-i', '--input', type=str, required=False)
args = parser.parse_args()
assert(args.call_threshold is not None)
sites = dict()
if args.input:
in_fh = open(args.input)
else:
in_fh = sys.stdin
csv_reader = csv.DictReader(in_fh, delimiter='\t')
for record in csv_reader:
num_sites = int(record['num_cpgs'])
# skip non-singletons for now
if num_sites > 1:
continue
key = record['chromosome'] + ":" + record['start'] + "-" + record['end']
if key not in sites:
sites[key] = SiteStats()
llr = float(record['log_lik_ratio'])
# is the evidence strong enough at this site to make a call?
if abs(llr) >= args.call_threshold:
sites[key].num_reads += 1
sites[key].called_sites += num_sites
if llr > 0:
sites[key].called_sites_methylated += num_sites
# header
print "\t".join(["key", "called_sites", "called_sites_methylated", "methylated_frequency"])
for key in sites:
if sites[key].called_sites > 0:
f = float(sites[key].called_sites_methylated) / sites[key].called_sites
print "\t".join([str(x) for x in [key, sites[key].called_sites, sites[key].called_sites_methylated, f]])
|
Python
| 0.999204
|
@@ -141,16 +141,31 @@
t__(self
+, g_size, g_seq
):%0A
@@ -294,16 +294,79 @@
ated = 0
+%0A self.group_size = g_size%0A self.sequence = g_seq
%0A%0Aparser
@@ -543,17 +543,19 @@
default=
-0
+2.5
)%0Aparser
@@ -890,81 +890,8 @@
%5D) %0A
- # skip non-singletons for now%0A if num_sites %3E 1:%0A continue%0A
@@ -949,17 +949,17 @@
rt'%5D + %22
--
+:
%22 + reco
@@ -1024,16 +1024,54 @@
teStats(
+num_sites, record%5B'sequence'%5D.rstrip()
)%0A%0A l
@@ -1400,19 +1400,63 @@
.join(%5B%22
-key
+chromosome%22, %22start%22, %22end%22, %22num_cpgs_in_group
%22, %22call
@@ -1515,16 +1515,34 @@
equency%22
+, %22group_sequence%22
%5D)%0A%0Afor
@@ -1587,24 +1587,59 @@
_sites %3E 0:%0A
+ (c, s, e) = key.split(%22:%22)%0A
f =
@@ -1748,19 +1748,46 @@
r x in %5B
-key
+c, s, e, sites%5Bkey%5D.group_size
, sites%5B
@@ -1842,13 +1842,34 @@
lated, f
+, sites%5Bkey%5D.sequence
%5D%5D)%0A%0A
|
6fe0ec420008bca5c5a04f51f0c1896b65d73ead
|
remove crpdate model traces
|
tnp/consent/views.py
|
tnp/consent/views.py
|
from django.shortcuts import render
from django.http import HttpResponse, HttpResponseRedirect
from django.contrib.auth import authenticate, login, logout
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
from company.models import Company, Job, JobLocation, CRPDate, Attachment, Branch, month_list
from consent.models import PersonalDetail, EducationDetail, CGPA, UserConsent
from datetime import date
import itertools as it
def index(request):
#return render(request, 'base.html')
return HttpResponse("Aakash says hello world!")
def login_user(request):
if request.method == 'POST':
email = request.POST.get('email')
password = request.POST.get('password')
if (email):
username = User.objects.get(email=email).username
else:
username = None
user = authenticate(username=username, password=password)
if user:
if user.is_active:
login(request, user)
return HttpResponseRedirect('/consent/home')
else:
return HttpResponse("Your TnP account is disabled.")
else:
#print "Invalid login details: {0}, {1}".format(username, password)
return HttpResponse("Invalid login details supplied.")
else:
return render(request, 'consent/login_user.html', {})
def grouper(n, iterable):
"""
>>> list(grouper(3, 'ABCDEFG'))
[['A', 'B', 'C'], ['D', 'E', 'F'], ['G']]
"""
iterable = iter(iterable)
return iter(lambda: list(it.islice(iterable, n)), [])
@login_required
def home(request):
branch = EducationDetail.objects.get(user=request.user).branch
jobs = Job.objects.filter(eligible_branches=branch).order_by('-updated_at')
print (jobs)
companies_list = []
for job in jobs:
job_dict = {}
consent = UserConsent.objects.filter(user=request.user, job=job)
if(consent and consent[0].is_valid == True):
job_dict['button_type'] = 'cancel'
else:
job_dict['button_type'] = 'apply'
job_dict["company"] = job.company.name
job_dict["designation"] = job.designation
job_dict["ctc"] = str(job.ctc)
job_dict["url"] = job.slug
if(job.created_at >= request.user.last_login):
job_dict["badge"] = 'NEW'
if(job.updated_at >= request.user.last_login):
job_dict["badge"] = 'UPDATED'
crp = job.crpdate
if (crp.datatype == 'DAT'):
crpdate_str = str(crp.date)
elif (crp.datatype == 'MON'):
crpdate_str = month_list[crp.month]
elif (crp.datatype == 'WOM'):
crpdate_str = crp + ' week of ' + crp.month
else:
crpdate_str = 'Not Available'
job_dict["date"] = crpdate_str
companies_list.append(job_dict)
companies_list = list(grouper(3,companies_list))
print (companies_list)
return render(request, 'consent/home.html', {'companies_list': companies_list})
@login_required
def apply(request):
job_slug = request.GET['job']
job = Job.objects.get(slug=job_slug)
obj, created = UserConsent.objects.update_or_create(user=request.user, job=job, defaults={'is_valid':True})
return HttpResponse('{ "message": "success" }')
@login_required
def cancel(request):
job_slug = request.GET['job']
job = Job.objects.get(slug=job_slug)
UserConsent.objects.filter(user=request.user, job=job).update(is_valid=False)
return HttpResponse('{ "message": "success" }')
@login_required
def user_logout(request):
logout(request)
return HttpResponseRedirect('/')
|
Python
| 0
|
@@ -309,17 +309,8 @@
ion,
- CRPDate,
Att
@@ -2477,395 +2477,8 @@
%0A
- crp = job.crpdate%0A %0A if (crp.datatype == 'DAT'):%0A crpdate_str = str(crp.date)%0A elif (crp.datatype == 'MON'):%0A crpdate_str = month_list%5Bcrp.month%5D%0A elif (crp.datatype == 'WOM'):%0A crpdate_str = crp + ' week of ' + crp.month%0A else:%0A crpdate_str = 'Not Available'%0A%0A job_dict%5B%22date%22%5D = crpdate_str%0A
|
6a0c3d0dc5f0106fdc1f7682fa65eabfb5c9d250
|
Set version as 0.6.12
|
alignak_webui/version.py
|
alignak_webui/version.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015-2017:
# Frederic Mohier, frederic.mohier@alignak.net
#
"""
Alignak - Web User Interface
"""
# Package name
__pkg_name__ = u"alignak_webui"
# Checks types for PyPI keywords
# Used for:
# - PyPI keywords
# - directory where to store files in the Alignak configuration (eg. arbiter/packs/checks_type)
__checks_type__ = u"demo"
# Application manifest
__application__ = u"Alignak-WebUI"
VERSION = (0, 6, 11)
__version__ = '.'.join((str(each) for each in VERSION[:4]))
__short_version__ = '.'.join((str(each) for each in VERSION[:2]))
__author__ = u"Frédéric Mohier"
__author_email__ = u"frederic.mohier@alignak.net"
__copyright__ = u"(c) 2015-2017 - %s" % __author__
__license__ = u"GNU Affero General Public License, version 3"
__git_url__ = "https://github.com/Alignak-monitoring-contrib/alignak-webui"
__doc_url__ = "http://alignak-web-ui.readthedocs.io/?badge=latest"
__description__ = u"Alignak - Web User Interface"
__releasenotes__ = u"""Alignak monitoring framework Web User Interface"""
__classifiers__ = [
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Bottle',
'Intended Audience :: Developers',
'Intended Audience :: Customer Service',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
'Natural Language :: English',
'Programming Language :: Python',
'Topic :: System :: Monitoring',
'Topic :: System :: Systems Administration'
]
# Application manifest
__manifest__ = {
'name': __application__,
'version': __version__,
'author': __author__,
'description': __description__,
'copyright': __copyright__,
'license': __license__,
'release': __releasenotes__,
'url': __git_url__,
'doc': __doc_url__
}
|
Python
| 0.000247
|
@@ -471,17 +471,17 @@
(0, 6, 1
-1
+2
)%0A__vers
|
2ce3d1677ccc40a33ae49c80d1f131ffa1320a25
|
Set static root
|
almostfunded/settings.py
|
almostfunded/settings.py
|
"""
Django settings for untitled1 project.
Generated by 'django-admin startproject' using Django 1.8.3.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '--$s76_#ll@vz8ya#sfk@7!w9f^=ih(7i0ckamu^v)m$o&z-tx'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'campaigns',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'almostfunded.urls'
PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
PROJECT_PATH = os.path.abspath(os.path.dirname(__name__))
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(PROJECT_ROOT, 'templates').replace('\\','/')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
{
'BACKEND': "django.template.backends.jinja2.Jinja2",
'DIRS': [os.path.join(PROJECT_PATH, 'campaigns/templates').replace('\\','/'),
os.path.join(PROJECT_PATH, 'almostfunded/templates').replace('\\','/')],
"APP_DIRS": True,
"OPTIONS": {
'environment': 'jinja2env.environment',
}
},
]
WSGI_APPLICATION = 'almostfunded.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
# 'ENGINE': 'django.db.backends.sqlite3',
# 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'almostfunded',
'USER': 'lorenamesa',
'PASSWORD': '',
'HOST': 'localhost',
'PORT': '',
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
# Celery Config
BROKER_URL = 'redis://localhost:6379'
CELERY_RESULT_BACKEND = 'redis://localhost:6379'
CELERY_ACCEPT_CONTENT = ['application/json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CELERY_TIMEZONE = 'America/Chicago'
|
Python
| 0.000001
|
@@ -3395,16 +3395,66 @@
tatic/'%0A
+STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static')
%0A# Celer
|
74744534c332186fa8165c90e91d71b7f77f2ad6
|
fix iobs calculation
|
vivado.py
|
vivado.py
|
import os
import subprocess
import time
import collections
import json
import re
import shutil
import sys
import glob
import datetime
import asciitable
import edalize
from toolchain import Toolchain
from utils import Timed
class Vivado(Toolchain):
'''Vivado toolchain (synth and PnR)'''
carries = (False, False)
def __init__(self, rootdir):
Toolchain.__init__(self, rootdir)
self.toolchain = 'vivado'
self.synthtool = 'vivado'
self.files = []
self.edam = None
self.backend = None
def run(self):
with Timed(self, 'bitstream'):
os.makedirs(self.out_dir, exist_ok=True)
for f in self.srcs:
self.files.append(
{
'name': os.path.realpath(f),
'file_type': 'verilogSource'
}
)
self.files.append(
{
'name': os.path.realpath(self.pcf),
'file_type': 'xdc'
}
)
chip = self.family + self.device + self.package
self.edam = {
'files': self.files,
'name': self.project_name,
'toplevel': self.top,
'parameters':
{
'VIVADO':
{
'paramtype': 'vlogdefine',
'datatype': 'int',
'default': 1,
},
},
'tool_options':
{
'vivado': {
'part': chip,
'synth': self.synthtool,
}
}
}
self.backend = edalize.get_edatool('vivado')(
edam=self.edam, work_root=self.out_dir
)
self.backend.configure("")
self.backend.build()
@staticmethod
def seedable():
return False
@staticmethod
def check_env():
return {
'vivado': have_exec('vivado'),
}
def max_freq(self):
processing = False
group = ""
delay = ""
freq = 0
freqs = {}
report_file = self.out_dir + "/" + self.project_name + ".runs/impl_1/top_timing_summary_routed.rpt"
with open(report_file, 'r') as fp:
for l in fp:
if l == "Max Delay Paths\n":
processing = True
if processing is True:
fields = l.split()
if len(fields) > 1 and fields[1].startswith('----'):
processing = False
# check if this is a timing we want
if group not in requirement.split():
continue
freqs[group] = freq
data = l.split(':')
if len(data) > 1:
if data[0].strip() == 'Data Path Delay':
delay = data[1].split()[0].strip('ns')
freq = 1e9 / float(delay)
if data[0].strip() == 'Path Group':
group = data[1].strip()
if data[0].strip() == 'Requirement':
requirement = data[1].strip()
return freqs
def vivado_resources(self, report_file):
with open(report_file, 'r') as fp:
report_data = fp.read()
report_data = report_data.split('\n\n')
report = dict()
section = None
for d in report_data:
match = re.search(r'\n-+$', d)
if match is not None:
match = re.search(r'\n?[0-9\.]+ (.*)', d)
if match is not None:
section = match.groups()[0]
if d.startswith('+--'):
if section is not None:
# cleanup the table
d = re.sub(r'\+-.*-\+\n', '', d)
d = re.sub(r'\+-.*-\+$', '', d)
d = re.sub(r'^\|\s+', '', d, flags=re.M)
d = re.sub(r'\s\|\n', '\n', d)
report[section.lower()] = asciitable.read(
d,
delimiter='|',
guess=False,
comment=r'(\+.*)|(\*.*)',
numpy=False
)
return report
def resources(self, report_file=None):
lut = 0
dff = 0
carry = 0
iob = 0
pll = 0
bram = 0
if report_file is None:
report_file = self.out_dir + "/" + self.project_name + ".runs/impl_1/top_utilization_placed.rpt"
report = self.vivado_resources(report_file)
for prim in report['primitives']:
if prim[2] == 'Flop & Latch':
dff += int(prim[1])
if prim[2] == 'CarryLogic':
carry += int(prim[1])
if prim[2] == 'IO':
iob += int(prim[1])
if prim[2] == 'LUT':
lut += int(prim[1])
for prim in report['clocking']:
if prim[0] == 'MMCME2_ADV' or prim[0] == 'PLLE2_ADV':
pll += prim[1]
for prim in report['memory']:
if prim[0] == 'Block RAM Tile':
bram += prim[1]
ret = {
"LUT": str(lut),
"DFF": str(dff),
"BRAM": str(bram),
"CARRY": str(carry),
"GLB": "unsupported",
"PLL": str(pll),
"IOB": str(iob),
}
return ret
def versions(self):
return self.backend.get_version()
class VivadoYosys(Vivado):
'''Vivado PnR + Yosys synthesis'''
carries = (False, False)
def __init__(self, rootdir):
Vivado.__init__(self, rootdir)
self.synthtool = 'yosys'
self.toolchain = 'yosys-vivado'
@staticmethod
def yosys_ver():
# Yosys 0.7+352 (git sha1 baddb017, clang 3.8.1-24 -fPIC -Os)
return subprocess.check_output(
"yosys -V", shell=True, universal_newlines=True
).strip()
def resources(self):
report_file = self.out_dir + "/top_utilization_placed.rpt"
return super(VivadoYosys, self).resources(report_file)
def versions(self):
return {
'yosys': self.yosys_ver(),
'vivado': super(VivadoYosys, self).versions()
}
|
Python
| 0
|
@@ -5273,24 +5273,105 @@
2%5D == 'IO':%0A
+ if prim%5B0%5D.startswith('OBUF') or prim%5B0%5D.startswith('IBUF'):%0A
|
b4013acd97851b041a47afa87e0da137e556ca3f
|
sort the output
|
tools/gen_gallery.py
|
tools/gen_gallery.py
|
#!/usr/bin/python
import os,sys,re
# this sucks
patches = [ x for x in os.listdir('.') if re.match(r'.*\.gif$', x) ]
print '''<style type="text/css">
div {
float: left;
width: 20%;
}
</style>
'''
print ''.join(['<div><img src="%s" /><br />%s</div>' % (x,x) for x in patches])
|
Python
| 1
|
@@ -111,16 +111,31 @@
$', x) %5D
+%0Apatches.sort()
%0A%0Aprint
|
5c89a1dd3917a2d54a79dc702e4c53f63e6bd0f2
|
Add option to check compute count
|
webapp/apps/test_assets/utils.py
|
webapp/apps/test_assets/utils.py
|
import json
import os
import sys
from ..taxbrain.compute import MockCompute
from django.core.files.uploadedfile import SimpleUploadedFile
NUM_BUDGET_YEARS = int(os.environ.get("NUM_BUDGET_YEARS", "10"))
def get_dropq_compute_from_module(module_import_path, num_times_to_wait=None):
module_views = sys.modules[module_import_path]
module_views.dropq_compute = MockCompute(
num_times_to_wait=num_times_to_wait
)
return module_views.dropq_compute
def do_micro_sim(client, data, tb_dropq_compute=None, dyn_dropq_compute=None):
'''do the proper sequence of HTTP calls to run a microsim'''
#Monkey patch to mock out running of compute jobs
if tb_dropq_compute is None:
tb_dropq_compute = get_dropq_compute_from_module(
'webapp.apps.taxbrain.views',
num_times_to_wait=0
)
if dyn_dropq_compute is None:
dyn_dropq_compute = get_dropq_compute_from_module(
'webapp.apps.dynamic.views',
num_times_to_wait=1
)
# dynamic_views.dropq_compute = MockCompute(num_times_to_wait=1)
response = client.post('/taxbrain/', data)
# Check that redirect happens
assert response.status_code == 302
idx = response.url[:-1].rfind('/')
assert response.url[:idx].endswith("taxbrain")
# return response
return {"response": response,
"tb_dropq_compute": tb_dropq_compute,
"dyn_dropq_compute": dyn_dropq_compute,
"pk": response.url[idx+1:-1]}
def do_micro_sim_from_file(client, start_year, reform_text, assumptions_text=None):
# Monkey patch to mock out running of compute jobs
import sys
from webapp.apps.taxbrain import views
webapp_views = sys.modules['webapp.apps.taxbrain.views']
webapp_views.dropq_compute = MockCompute()
tc_file = SimpleUploadedFile("test_reform.json", reform_text)
data = {u'docfile': tc_file,
u'has_errors': [u'False'],
u'start_year': start_year, 'csrfmiddlewaretoken':'abc123'}
if assumptions_text:
tc_file2 = SimpleUploadedFile("test_assumptions.json",
assumptions_text)
data['assumpfile'] = tc_file2
response = client.post('/taxbrain/file/', data)
# Check that redirect happens
assert response.status_code == 302
return response
def check_posted_params(mock_compute, params_to_check, start_year):
"""
Make sure posted params match expected results
user_mods: parameters that are actually passed to taxcalc
params_to_check: gives truth value for parameters that we want to check
(formatted as taxcalc dict style reform)
"""
last_posted = mock_compute.last_posted
user_mods = json.loads(last_posted["user_mods"])
assert last_posted["first_budget_year"] == start_year
for year in params_to_check:
for param in params_to_check[year]:
assert user_mods[str(year)][param] == params_to_check[year][param]
def get_post_data(start_year, _ID_BenefitSurtax_Switches=True, quick_calc=False):
data = {u'has_errors': [u'False'],
u'start_year': unicode(start_year),
'csrfmiddlewaretoken':'abc123'}
if _ID_BenefitSurtax_Switches:
switches = {u'ID_BenefitSurtax_Switch_0': [u'True'],
u'ID_BenefitSurtax_Switch_1': [u'True'],
u'ID_BenefitSurtax_Switch_2': [u'True'],
u'ID_BenefitSurtax_Switch_3': [u'True'],
u'ID_BenefitSurtax_Switch_4': [u'True'],
u'ID_BenefitSurtax_Switch_5': [u'True'],
u'ID_BenefitSurtax_Switch_6': [u'True']}
data.update(switches)
if quick_calc:
data['quick_calc'] = 'Quick Calculation!'
return data
def get_file_post_data(start_year, reform_text, assumptions_text=None, quick_calc=False):
tc_file = SimpleUploadedFile("test_reform.json", reform_text)
data = {u'docfile': tc_file,
u'has_errors': [u'False'],
u'start_year': unicode(start_year),
u'quick_calc': quick_calc,
'csrfmiddlewaretoken':'abc123'}
if assumptions_text is not None:
tc_file2 = SimpleUploadedFile("test_assumptions.json",
assumptions_text)
data['assumpfile'] = tc_file2
return data
|
Python
| 0.000001
|
@@ -536,24 +536,61 @@
compute=None
+,%0A compute_count=None
):%0A '''do
@@ -1053,77 +1053,8 @@
)
-%0A # dynamic_views.dropq_compute = MockCompute(num_times_to_wait=1)
%0A%0A
@@ -1261,16 +1261,171 @@
brain%22)%0A
+ print('compute_count', tb_dropq_compute.count, compute_count)%0A if compute_count is not None:%0A assert tb_dropq_compute.count == compute_count%0A
# re
|
35f8ac20ec5ef830f264ba51bcb5df5af72b24d6
|
mask out HC3N features
|
analysis/masked_cubes.py
|
analysis/masked_cubes.py
|
import numpy as np
from spectral_cube import SpectralCube,BooleanArrayMask
from astropy import units as u
from paths import hpath
from astropy.io import fits
import time
from astropy import log
t0 = time.time()
cube303 = SpectralCube.read(hpath('APEX_H2CO_303_202_bl.fits')).with_spectral_unit(u.km/u.s, velocity_convention='radio')
cube321 = SpectralCube.read(hpath('APEX_H2CO_321_220_bl.fits')).with_spectral_unit(u.km/u.s, velocity_convention='radio')
mask = (fits.getdata(hpath('APEX_H2CO_303_202_bl_mask.fits')).astype('bool') &
cube303.mask.include(cube303._data, cube303.wcs) &
cube321.mask.include(cube321._data, cube321.wcs))
bmask = BooleanArrayMask(mask, cube303.wcs)
cube303m = cube303.with_mask(bmask)
cube321m = cube321.with_mask(bmask)
cube303sm = SpectralCube.read(hpath('APEX_H2CO_303_202_smooth_bl.fits')).with_spectral_unit(u.km/u.s, velocity_convention='radio')
cube321sm = SpectralCube.read(hpath('APEX_H2CO_321_220_smooth_bl.fits')).with_spectral_unit(u.km/u.s, velocity_convention='radio')
masksm = (fits.getdata(hpath('APEX_H2CO_303_202_smooth_bl_mask.fits')).astype('bool') &
cube303sm.mask.include(cube303sm._data, cube303sm.wcs) &
cube321sm.mask.include(cube321sm._data, cube321sm.wcs))
bmasksm = BooleanArrayMask(masksm, cube303sm.wcs)
cube303msm = cube303sm.with_mask(bmasksm)
cube321msm = cube321sm.with_mask(bmasksm)
# resample smoothed mask onto original grid
masksm_rs = np.zeros_like(mask, dtype='bool')
masksm_rs[::2,:,:] = masksm
masksm_rs[1::2,:,:] = masksm
bmasksm_rs = BooleanArrayMask(masksm_rs, cube303.wcs)
sncube = SpectralCube.read(hpath('APEX_H2CO_303_202_signal_to_noise_cube.fits'))
sncube._wcs = cube303._wcs
sncube.mask._wcs = cube303._wcs
sncubesm = SpectralCube.read(hpath('APEX_H2CO_303_202_smooth_signal_to_noise_cube.fits'))
sncubesm._wcs = cube303sm._wcs
sncubesm.mask._wcs = cube303sm._wcs
log.info("Masked cube creation took {0:0.1f} seconds".format(time.time()-t0))
|
Python
| 0
|
@@ -206,16 +206,567 @@
time()%0A%0A
+hc3n_regions = %5B%7B'v':(-101,55),%0A 'x':(500,533),%0A 'y':(108,133),%7D,%0A %7B'v':(-133,-70),%0A 'x':(787,884),%0A 'y':(87,120),%7D%5D%0A%0Adef mask_out_region(mask_array, cube, regions=hc3n_regions):%0A%0A for region in regions:%0A z = %5Bcube.closest_spectral_channel(v*u.km/u.s)%0A for v in region%5B'v'%5D%5D%0A view = %5Bslice(*z),%0A slice(*region%5B'y'%5D),%0A slice(*region%5B'x'%5D)%0A %5D%0A%0A mask_array%5Bview%5D = False%0A%0A return mask_array%0A%0A
cube303
@@ -1005,19 +1005,37 @@
o')%0Amask
- =
+arr = mask_out_region
(fits.ge
@@ -1087,32 +1087,58 @@
).astype('bool')
+, cube303)%0Amask = (maskarr
&%0A cube3
@@ -1160,22 +1160,16 @@
(cube303
-._data
, cube30
@@ -1213,22 +1213,16 @@
(cube321
-._data
, cube32
@@ -1608,25 +1608,43 @@
radio')%0A
+sm
mask
-sm =
+arr = mask_out_region
(fits.ge
@@ -1711,16 +1711,48 @@
('bool')
+, cube303sm)%0Amasksm = (smmaskarr
&%0A
@@ -1788,22 +1788,16 @@
ube303sm
-._data
, cube30
@@ -1853,14 +1853,8 @@
21sm
-._data
, cu
|
c73033031e6167c13af6014d49a733ac713aabd2
|
Remove comment I had pasted all the names into
|
services/migrations/0021_populate_areas.py
|
services/migrations/0021_populate_areas.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, connection
from django.db.models import Max
MOUNT_LEBANON = 'Mount Lebanon'
TRIPOLI = 'Tripoli and surroundings'
INITIAL_AREAS = [
# Numbers are arbitrary, just so we can know which ones we created and remove
# them again or at least not duplicate them if we migrate forward and back.
(1, MOUNT_LEBANON, "Baabda", "بعبدا"),
(2, MOUNT_LEBANON, "Beirut", "بيروت"),
(3, MOUNT_LEBANON, "Aley", "عاليه"),
(4, MOUNT_LEBANON, "Chouf", "الشوف"),
(5, MOUNT_LEBANON, "Keserwane", "كسروان"),
(6, MOUNT_LEBANON, "El-Metn", "المتن"),
(7, MOUNT_LEBANON, "Jbeil", "جبيل"),
(21, TRIPOLI, "Mineih-Dinniyi", "المنيه-الضنية"),
(22, TRIPOLI, "Zgharta", "زغرتا"),
(23, TRIPOLI, "Bcharri", "بشري"),
(24, TRIPOLI, "Tripoli", "طرابلس"),
(25, TRIPOLI, "Koura", "الكورة"),
(26, TRIPOLI, "Batroun", "البترون"),
]
def update_postgres_sequence_generator(model):
"""
Update the sequence generator for a model's primary key
to the max current value of that key, so that Postgres
will know not to try to use the previously-used values again.
Apparently this is needed because when we create objects
during the migration, we specify the primary key's value,
so the Postgres sequence doesn't get used or incremented.
"""
table_name = model._meta.db_table
attname, colname = model._meta.pk.get_attname_column()
seq_name = "%s_%s_seq" % (table_name, colname)
max_val = model.objects.aggregate(maxkey=Max(attname))['maxkey']
cursor = connection.cursor()
cursor.execute("select setval(%s, %s);", [seq_name, max_val])
def no_op(apps, schema_editor):
# When we back up the migration, don't remove any records.
pass
def create_areas(apps, schema_editor):
ServiceArea = apps.get_model('services', 'ServiceArea')
for number, parent, english, arabic in INITIAL_AREAS:
# If the area already exists, do not change it because someone might
# have edited it and we don't want to lose their changes.
ServiceArea.objects.get_or_create(
pk=number,
defaults=dict(
name_en="%s / %s" % (parent, english),
name_ar="%s / %s" % (parent, arabic),
)
)
update_postgres_sequence_generator(ServiceArea)
class Migration(migrations.Migration):
dependencies = [
('services', '0020_auto_20150209_2237'),
]
operations = [
migrations.RunPython(create_areas, no_op),
]
"""
Number
Mount Lebanon
Tripoli and surroundings
1
Baabda بعبدا
Mineih-Dinniyiالمنيه-الضنية
2
Beirutبيروت
Zghartaزغرتا
3
Aleyعاليه
Bcharriبشري
4
Choufالشوف
Tripoliطرابلس
5
Keserwaneكسروان
Kouraالكورة
6
El-Metnالمتن
Batroun البترون
7
Jbeilجبيل
"""
|
Python
| 0
|
@@ -2579,258 +2579,4 @@
%5D%0A
-%0A%0A%22%22%22%0A%0ANumber%0AMount Lebanon%0ATripoli and surroundings%0A1%0ABaabda %D8%A8%D8%B9%D8%A8%D8%AF%D8%A7%0AMineih-Dinniyi%D8%A7%D9%84%D9%85%D9%86%D9%8A%D9%87-%D8%A7%D9%84%D8%B6%D9%86%D9%8A%D8%A9%0A2%0ABeirut%D8%A8%D9%8A%D8%B1%D9%88%D8%AA%0AZgharta%D8%B2%D8%BA%D8%B1%D8%AA%D8%A7%0A3%0AAley%D8%B9%D8%A7%D9%84%D9%8A%D9%87%0ABcharri%D8%A8%D8%B4%D8%B1%D9%8A%0A4%0AChouf%D8%A7%D9%84%D8%B4%D9%88%D9%81%0ATripoli%D8%B7%D8%B1%D8%A7%D8%A8%D9%84%D8%B3%0A5%0AKeserwane%D9%83%D8%B3%D8%B1%D9%88%D8%A7%D9%86%0AKoura%D8%A7%D9%84%D9%83%D9%88%D8%B1%D8%A9%0A6%0AEl-Metn%D8%A7%D9%84%D9%85%D8%AA%D9%86%0A Batroun %D8%A7%D9%84%D8%A8%D8%AA%D8%B1%D9%88%D9%86%0A7%0AJbeil%D8%AC%D8%A8%D9%8A%D9%84%0A%0A%0A%22%22%22%0A
|
79f8f6c922e6f0be3f6bf62c13cbe6dc9c50366a
|
Remove project_config dependency
|
budget_proj/budget_proj/settings/production.py
|
budget_proj/budget_proj/settings/production.py
|
import requests
from .base import *
from .. import project_config
SECRET_KEY = project_config.DJANGO_SECRET_KEY
ALLOWED_HOSTS = project_config.ALLOWED_HOSTS
# Get the IPV4 address we're working with on AWS
# The Loadbalancer uses this ip address for healthchecks
EC2_PRIVATE_IP = None
try:
EC2_PRIVATE_IP = requests.get('http://169.254.169.254/latest/meta-data/local-ipv4', timeout=0.01).text
except requests.exceptions.RequestException:
pass
if EC2_PRIVATE_IP:
ALLOWED_HOSTS.append(EC2_PRIVATE_IP)
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': project_config.AWS['ENGINE'],
'NAME': project_config.AWS['NAME'],
'HOST': project_config.AWS['HOST'],
'PORT': project_config.AWS['PORT'],
'USER': project_config.AWS['USER'],
'PASSWORD': project_config.AWS['PASSWORD'],
}
}
|
Python
| 0.000001
|
@@ -30,16 +30,28 @@
mport *%0A
+import os%0A#
from ..
@@ -87,31 +87,32 @@
T_KEY =
-project_config.
+os.environ.get('
DJANGO_S
@@ -120,18 +120,22 @@
CRET_KEY
-%0A%0A
+')%0A%0A#
ALLOWED_
@@ -170,16 +170,38 @@
ED_HOSTS
+%0AALLOWED_HOSTS = %5B'*'%5D
%0A%0A# Get
@@ -300,16 +300,18 @@
hchecks%0A
+#
EC2_PRIV
@@ -328,17 +328,21 @@
one%0A
+#
try:%0A
+#
+
EC2_
@@ -440,16 +440,18 @@
1).text%0A
+#
except r
@@ -487,16 +487,18 @@
eption:%0A
+#
pass
@@ -499,16 +499,18 @@
pass%0A%0A
+#
if EC2_P
@@ -520,16 +520,18 @@
ATE_IP:%0A
+#
ALLO
@@ -693,212 +693,263 @@
E':
-project_config.AWS%5B'ENGINE'%5D,%0A 'NAME': project_config.AWS%5B'NAME'%5D,%0A 'HOST': project_config.AWS%5B'HOST'%5D,%0A 'PORT': project_config.AWS%5B'PORT'%5D,%0A 'USER': project_config.AWS%5B'USER'%5D
+'django_db_geventpool.backends.postgresql_psycopg2',%0A 'PASSWORD': os.environ.get('POSTGRES_PASSWORD'),%0A 'NAME': os.environ.get('POSTGRES_NAME'),%0A 'USER': os.environ.get('POSTGRES_USER'),%0A 'HOST': os.environ.get('POSTGRES_HOST')
,%0A
@@ -960,50 +960,46 @@
'P
-ASSW
OR
-D': project_config.AWS%5B'PASSWORD'%5D,%0A
+T': os.environ.get('POSTGRES_PORT'),
|
3fb2986bc4f344e7c0ec751c38d257ce99681ebe
|
add tests for -L/--list-engines option
|
anytemplate/tests/cli.py
|
anytemplate/tests/cli.py
|
#
# Copyright (C) 2015 Satoru SATOH <ssato at redhat.com>
# License: MIT
#
"""Tests of anytempalte.cli
"""
import os.path
import os
import subprocess
import unittest
import anytemplate.cli as TT
import anytemplate.tests.common
from anytemplate.engine import find_by_name
CLI_SCRIPT = os.path.join(anytemplate.tests.common.selfdir(), "..", "cli.py")
def run(args=None):
"""
:throw: subprocess.CalledProcessError if something goes wrong
"""
args = ["python", CLI_SCRIPT] + ([] if args is None else args)
devnull = open("/dev/null", 'w')
env = os.environ.copy()
env["PYTHONPATH"] = os.path.join(anytemplate.tests.common.selfdir(), "..")
subprocess.check_call(args, stdout=devnull, stderr=devnull, env=env)
def run_and_check_exit_code(args=None, code=0):
"""
Run main() and check its exit code.
"""
try:
TT.main(["dummy"] + ([] if args is None else args))
except SystemExit as exc:
return exc.code == code
return True
class Test_00(unittest.TestCase):
def run_and_check_exit_code(self, args=[], code=0, _not=False):
if _not:
self.assertFalse(run_and_check_exit_code(args, code))
else:
self.assertTrue(run_and_check_exit_code(args, code))
def test_10_main__wo_args(self):
self.run_and_check_exit_code()
def test_12__show_usage(self):
self.run_and_check_exit_code(["--help"])
def test_14__wrong_option(self):
self.run_and_check_exit_code(["--wrong-option-xyz"], _not=True)
def test_20_main__wo_args(self):
try:
TT.main()
except SystemExit:
pass
def test_22_main__show_usage(self):
try:
TT.main(["dummy", "--help"])
except SystemExit:
pass
def test_24_main__show_usage(self):
try:
TT.main(["dummy", "--wrong-option-xyz"])
except SystemExit:
pass
class Test_10_with_workdir(unittest.TestCase):
def setUp(self):
self.workdir = anytemplate.tests.common.setup_workdir()
def tearDown(self):
anytemplate.tests.common.cleanup_workdir(self.workdir)
def run_and_check_exit_code(self, args=[], code=0, _not=False):
if _not:
self.assertFalse(run_and_check_exit_code(args, code))
else:
self.assertTrue(run_and_check_exit_code(args, code))
def test_10_main__stringTemplate(self):
tmpl = os.path.join(self.workdir, "test.tmpl")
ctx = os.path.join(self.workdir, "ctx.yml")
output = os.path.join(self.workdir, "output.txt")
open(tmpl, 'w').write("$a\n")
open(ctx, 'w').write("a: aaa\n")
self.run_and_check_exit_code(["-E", "string.Template", "-C", ctx,
"-o", output, tmpl])
self.assertEquals(open(output).read(), "aaa\n")
def test_20_main__jinja2(self):
if find_by_name("jinja2"):
tmpl = os.path.join(self.workdir, "test.j2")
output = os.path.join(self.workdir, "output.txt")
open(tmpl, 'w').write("{{ hello|default('hello') }}")
self.run_and_check_exit_code(["-o", output, tmpl])
self.assertEquals(open(output).read(), "hello")
# vim:sw=4:ts=4:et:
|
Python
| 0.000001
|
@@ -1937,16 +1937,165 @@
pass%0A%0A
+ def test_26_main__list_engines(self):%0A try:%0A TT.main(%5B%22dummy%22, %22--list-engines%22%5D)%0A except SystemExit:%0A pass%0A%0A
%0Aclass T
|
02360f5251ac308f45cb210a305fa225a056e1be
|
add travis config for keen public read key
|
website/settings/local-travis.py
|
website/settings/local-travis.py
|
# -*- coding: utf-8 -*-
'''Example settings/local.py file.
These settings override what's in website/settings/defaults.py
NOTE: local.py will not be added to source control.
'''
import inspect
from . import defaults
import os
DB_PORT = 27017
DEV_MODE = True
DEBUG_MODE = True # Sets app to debug mode, turns off template caching, etc.
SECURE_MODE = not DEBUG_MODE # Disable osf secure cookie
PROTOCOL = 'https://' if SECURE_MODE else 'http://'
DOMAIN = PROTOCOL + 'localhost:5000/'
API_DOMAIN = PROTOCOL + 'localhost:8000/'
SEARCH_ENGINE = 'elastic'
USE_EMAIL = False
USE_CELERY = False
USE_GNUPG = False
# Email
MAIL_SERVER = 'localhost:1025' # For local testing
MAIL_USERNAME = 'osf-smtp'
MAIL_PASSWORD = 'CHANGEME'
# Session
COOKIE_NAME = 'osf'
SECRET_KEY = "CHANGEME"
SESSION_COOKIE_SECURE = SECURE_MODE
OSF_SERVER_KEY = None
OSF_SERVER_CERT = None
##### Celery #####
## Default RabbitMQ broker
BROKER_URL = 'amqp://'
# In-memory result backend
CELERY_RESULT_BACKEND = 'cache'
CELERY_CACHE_BACKEND = 'memory'
USE_CDN_FOR_CLIENT_LIBS = False
SENTRY_DSN = None
TEST_DB_NAME = DB_NAME = 'osf_test'
VARNISH_SERVERS = ['http://localhost:8080']
# if ENABLE_VARNISH isn't set in python read it from the env var and set it
locals().setdefault('ENABLE_VARNISH', os.environ.get('ENABLE_VARNISH') == 'True')
KEEN = {
'public': {
'project_id': '123456789abcdef101112131415161718191a1b1c1d1e1f20212223242526272',
'master_key': '123456789abcdef101112131415161718191a1b1c1d1e1f20212223242526272',
'write_key': '123456789abcdef101112131415161718191a1b1c1d1e1f20212223242526272',
},
'private': {
'project_id': '123456789abcdef101112131415161718191a1b1c1d1e1f20212223242526272',
'write_key': '123456789abcdef101112131415161718191a1b1c1d1e1f20212223242526272',
'read_key': '123456789abcdef101112131415161718191a1b1c1d1e1f20212223242526272',
},
}
|
Python
| 0
|
@@ -1604,24 +1604,112 @@
242526272',%0A
+ 'read_key': '123456789abcdef101112131415161718191a1b1c1d1e1f20212223242526272',%0A
%7D,%0A '
|
84234fd72c70eac5228d835de79a12cf4515204e
|
Add material to be an attribute for all styles
|
simlammps/common/atom_style_description.py
|
simlammps/common/atom_style_description.py
|
import itertools
from simphony.core.cuba import CUBA
from simlammps.common.atom_style import AtomStyle
class AtomStyleDescription(object):
""" Class describes atom style
Each atom style has a particular set of attributes that it supports
(or provides). This class contains a list of what attributes it
contains. Note that the order of items in 'attributes' corresponds to
the order they appear in lammps-data file.
Attributes
----------
attributes : list of ValueInfo
ordered list of what attributes each particle/atom contains
velocity_attributes : list of ValueInfo
ordered list of what velocity related attributes each atom contains
(CUBA.VELOCITY is always included)
has_mass_per_type : bool (optional)
True if this style requires a mass (specifically mass-per-atom_type)
"""
def __init__(self,
attributes=None,
velocity_attributes=None,
has_mass_per_type=False):
if attributes is None:
self.attributes = []
else:
self.attributes = attributes
self.velocity_attributes = [ValueInfo(cuba_key=CUBA.VELOCITY)]
if velocity_attributes:
self.velocity_attributes.extend(velocity_attributes)
self.has_mass_per_type = has_mass_per_type
class ValueInfo(object):
""" Class describes cuba value
Class information on cuba value and provides conversion
between LAMMPS/SIMPHONY
Attributes
----------
cuba_key : CUBA
CUBA key
convert_to_cuba : function (optional)
method to convert from LAMMPS value to SimPhoNy-CUBA
convert_from_cuba : function (optional)
method to convert from SimPhoNy-CUBA to LAMMPS value
"""
def __init__(self,
cuba_key,
convert_to_cuba=None,
convert_from_cuba=None):
self.cuba_key = cuba_key
self.convert_to_cuba = convert_to_cuba
self.convert_from_cuba = convert_from_cuba
# description of each atom-style
ATOM_STYLE_DESCRIPTIONS = {
AtomStyle.ATOMIC:
AtomStyleDescription(
# attributes has default (i.e. coordinates)
# , velocity..)
has_mass_per_type=True), # but with mass
AtomStyle.GRANULAR:
AtomStyleDescription(
attributes=[
ValueInfo(cuba_key=CUBA.RADIUS, # but diameter in LAMMPS
convert_to_cuba=lambda x: x / 2, # d to radius
convert_from_cuba=lambda x: x * 2), # radius to d
ValueInfo(cuba_key=CUBA.MASS)],
velocity_attributes=[ValueInfo(cuba_key=CUBA.ANGULAR_VELOCITY)],
has_mass_per_type=False)
}
def get_attributes(atom_style):
""" Return list of CUBA-key expected on particle
"""
atom_style_description = ATOM_STYLE_DESCRIPTIONS[atom_style]
return [attribute.cuba_key for attribute in
itertools.chain(
atom_style_description.attributes,
atom_style_description.velocity_attributes)]
|
Python
| 0
|
@@ -2764,16 +2764,122 @@
lse)%0A%7D%0A%0A
+# all particles will have a material type%0A_default_attributes = %5BValueInfo(cuba_key=CUBA.MATERIAL_TYPE)%5D%0A%0A
%0Adef get
@@ -3216,11 +3216,48 @@
tributes
+,%0A _default_attributes
)%5D%0A
|
47c2936e65d00a08896b4e60060ff737b7a2f675
|
Check that the permission migrations work
|
app/tests/workstations_tests/test_migrations.py
|
app/tests/workstations_tests/test_migrations.py
|
import pytest
from django.db import connection
from django.db.migrations.executor import MigrationExecutor
@pytest.mark.django_db(transaction=True)
def test_workstation_group_migration():
executor = MigrationExecutor(connection)
app = "workstations"
migrate_from = [(app, "0001_initial")]
migrate_to = [(app, "0004_auto_20190813_1302")]
executor.migrate(migrate_from)
old_apps = executor.loader.project_state(migrate_from).apps
Workstation = old_apps.get_model(app, "Workstation")
old_ws = Workstation.objects.create(title="foo")
assert not hasattr(old_ws, "editors_group")
assert not hasattr(old_ws, "users_group")
# Reload
executor.loader.build_graph()
# Migrate forwards
executor.migrate(migrate_to)
new_apps = executor.loader.project_state(migrate_to).apps
Workstation = new_apps.get_model(app, "Workstation")
new_ws = Workstation.objects.get(title="foo")
assert new_ws.editors_group
assert new_ws.users_group
assert new_ws.slug == old_ws.slug
assert new_ws.title == old_ws.title
|
Python
| 0
|
@@ -100,16 +100,157 @@
xecutor%0A
+from guardian.shortcuts import get_perms%0A%0Afrom grandchallenge.workstations.models import Workstation%0Afrom tests.factories import UserFactory%0A
%0A%0A@pytes
@@ -586,32 +586,60 @@
from).apps%0A%0A
+user = UserFactory()%0A Old
Workstation = ol
@@ -684,24 +684,27 @@
old_ws =
+Old
Workstation.
@@ -935,128 +935,8 @@
o)%0A%0A
- new_apps = executor.loader.project_state(migrate_to).apps%0A%0A Workstation = new_apps.get_model(app, %22Workstation%22)%0A
@@ -972,32 +972,63 @@
get(title=%22foo%22)
+%0A new_ws.add_user(user=user)
%0A%0A assert new
@@ -1113,16 +1113,16 @@
ws.slug%0A
-
asse
@@ -1153,8 +1153,65 @@
s.title%0A
+ assert %22view_workstation%22 in get_perms(user, new_ws)%0A
|
f92b27c1ea241f381e41ef9b20bc6e75fc03c159
|
Add OCA as author
|
account_invoice_merge_payment/__openerp__.py
|
account_invoice_merge_payment/__openerp__.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# This file is part of account_invoice_merge_payment,
# an Odoo module.
#
# Copyright (c) 2015 ACSONE SA/NV (<http://acsone.eu>)
#
# account_invoice_merge_payment is free software:
# you can redistribute it and/or modify it under the terms of the GNU
# Affero General Public License as published by the Free Software
# Foundation,either version 3 of the License, or (at your option) any
# later version.
#
# account_invoice_merge_payment is distributed
# in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
# even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with account_invoice_merge_payment.
# If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': "account_invoice_merge_payment",
'summary': """
Use invoice merge regarding fields on Account Payment Partner""",
'author': "ACSONE SA/NV",
'website': "http://acsone.eu",
'category': 'Invoicing & Payments',
'version': '0.1',
'license': 'AGPL-3',
'depends': [
'account_invoice_merge',
'account_payment_partner',
],
'data': [],
'auto_install': True
}
|
Python
| 0
|
@@ -1245,16 +1245,49 @@
NE SA/NV
+,Odoo Community Association (OCA)
%22,%0A '
|
34205bc818742374a531fe2126fd91dd355cb2c2
|
Update storage.py
|
db_file_storage/storage.py
|
db_file_storage/storage.py
|
# python
import base64
import sys
# django
from django import VERSION as DJ_VERSION
from django.core.files.base import ContentFile
from django.core.files.storage import Storage
from django.core.urlresolvers import reverse
from django.utils.crypto import get_random_string
if sys.version_info.major == 2: # python2
from urllib import urlencode
else: # python3
from urllib.parse import urlencode
class DatabaseFileStorage(Storage):
""" File storage system that saves models' FileFields in the database.
Intended for use with Models' FileFields.
Uses a specific model for each FileField of each Model.
"""
def __init__(self, *args, **kwargs):
super(DatabaseFileStorage, self).__init__(*args, **kwargs)
# As of Django 1.7, the utilities in django.db.models.loading are
# deprecated (to be removed in 1.9) in favor of the the new
# application loading system. Check here:
# https://github.com/django/django/blob/1.7/django/db/models/loading.py
if DJ_VERSION[0] == 1 and DJ_VERSION[1] < 7:
from django.db.models.loading import get_model
else:
from django.apps import apps
get_model = apps.get_model
self._get_model = get_model
def _get_model_cls(self, model_class_path):
app_label, model_name = model_class_path.rsplit('.', 1)
return self._get_model(app_label, model_name)
def _get_encoded_bytes_from_file(self, _file):
_file.seek(0)
file_content = _file.read()
return base64.b64encode(file_content)
def _get_file_from_encoded_bytes(self, encoded_bytes):
file_buffer = base64.b64decode(encoded_bytes)
return ContentFile(file_buffer)
def _get_unique_filename(self, model_cls, filename_field, filename):
final_name = filename
if ('.' in filename.rsplit('/', 1)[-1]):
stem, extension = final_name.rsplit('.', 1)
else:
stem, extension = (final_name, '')
random_str = get_random_string(7)
while model_cls.objects.filter(
**{filename_field: final_name}
).exists():
final_name = '%s_(%s)%s' % (
stem, random_str,
('.%s' % extension) if extension else ''
)
random_str = get_random_string(7)
return final_name
def _get_storage_attributes(self, name):
(model_class_path, content_field, filename_field,
mimetype_field, filename) = name.split('/')
return {
'model_class_path': model_class_path,
'content_field': content_field,
'filename_field': filename_field,
'mimetype_field': mimetype_field,
'filename': filename,
}
def _open(self, name, mode='rb'):
assert mode[0] in 'rwab'
storage_attrs = self._get_storage_attributes(name)
model_class_path = storage_attrs['model_class_path']
content_field = storage_attrs['content_field']
filename_field = storage_attrs['filename_field']
mimetype_field = storage_attrs['mimetype_field']
filename = storage_attrs['filename']
model_cls = self._get_model_cls(model_class_path)
model_instance = model_cls.objects.only(
content_field, mimetype_field
).get(**{filename_field: name})
encoded_bytes = getattr(model_instance, content_field)
_file = self._get_file_from_encoded_bytes(encoded_bytes)
_file.filename = filename
_file.mimetype = getattr(model_instance, mimetype_field)
return _file
def _save(self, name, content):
storage_attrs = self._get_storage_attributes(name)
model_class_path = storage_attrs['model_class_path']
content_field = storage_attrs['content_field']
filename_field = storage_attrs['filename_field']
mimetype_field = storage_attrs['mimetype_field']
model_cls = self._get_model_cls(model_class_path)
new_filename = self._get_unique_filename(model_cls,
filename_field, name)
encoded_bytes = self._get_encoded_bytes_from_file(content)
mimetype = getattr(content.file, 'content_type', 'text/plain')
model_cls.objects.create(**{
content_field: encoded_bytes,
filename_field: new_filename,
mimetype_field: mimetype,
})
return new_filename
def delete(self, name):
storage_attrs = self._get_storage_attributes(name)
model_class_path = storage_attrs['model_class_path']
filename_field = storage_attrs['filename_field']
model_cls = self._get_model_cls(model_class_path)
model_cls.objects.filter(**{filename_field: name}).delete()
def exists(self, name):
storage_attrs = self._get_storage_attributes(name)
model_class_path = storage_attrs['model_class_path']
filename_field = storage_attrs['filename_field']
filename = storage_attrs['filename']
model_cls = self._get_model_cls(model_class_path)
return model_cls.objects.filter(
**{filename_field: filename}
).exists()
def url(self, name):
_url = reverse('db_file_storage.download_file')
return _url + '?' + urlencode({'name': name})
class FixedModelDatabaseFileStorage(DatabaseFileStorage):
""" File storage system that saves files in the database.
Intended for use without Models' FileFields, e.g. with Form Wizards.
Uses a fixed Model to store the all the saved files.
"""
def __init__(self, *args, **kwargs):
try:
self.model_class_path = kwargs.pop('model_class_path')
self.content_field = kwargs.pop('content_field')
self.filename_field = kwargs.pop('filename_field')
self.mimetype_field = kwargs.pop('mimetype_field')
except KeyError:
raise KeyError(
"keyword args 'model_class_path', 'content_field', "
"'filename_field' and 'mimetype_field' are required."
)
super(FixedModelDatabaseFileStorage, self).__init__(*args, **kwargs)
def _get_storage_attributes(self, name):
return {
'model_class_path': self.model_class_path,
'content_field': self.content_field,
'filename_field': self.filename_field,
'mimetype_field': self.mimetype_field,
'filename': name,
}
|
Python
| 0.000001
|
@@ -27,16 +27,26 @@
ort sys%0A
+import os%0A
# django
@@ -1883,19 +1883,22 @@
.rsplit(
-'/'
+os.sep
, 1)%5B-1%5D
@@ -2540,11 +2540,14 @@
lit(
-'/'
+os.sep
)%0A
|
ca27ff5efa987ce413d7e7f43c49fad189930aed
|
Fix missing migration dependency
|
entities/migrations/0045_auto_20160922_1330.py
|
entities/migrations/0045_auto_20160922_1330.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-09-22 11:30
from __future__ import unicode_literals
from django.db import migrations
def set_groupcontent_group(apps, schema_editor):
Group1 = apps.get_model('entities.Group')
Group2 = apps.get_model('groups.Group')
GroupContent = apps.get_model('entities.GroupContent')
for gc in GroupContent.objects.values('id', 'group_id'):
g1 = Group1.objects.get(id=gc['group_id'])
g2 = Group2.objects.get(slug=g1.slug)
GroupContent.objects.filter(id=gc['id']).update(group_id=g2.id)
class Migration(migrations.Migration):
dependencies = [
('entities', '0044_auto_20160922_1118'),
]
operations = [
migrations.RunPython(set_groupcontent_group)
]
|
Python
| 0.000351
|
@@ -629,24 +629,71 @@
dencies = %5B%0A
+ ('groups', '0002_auto_20160922_1108'),%0A
('en
|
7de8c75cc5a90ecd87f1bb5ccf72c0ec79c9f531
|
With empty string
|
examples/parse_context_free_grammar_example.py
|
examples/parse_context_free_grammar_example.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, division
import codecs
from nltk import parse_cfg
from nltk import word_tokenize
from nltk import RecursiveDescentParser
def encode(string, char_type='utf8', errors=''):
return codecs.encode(string, char_type, errors)
cfg = encode("""
S -> NP VP NP
NP -> 'Péter' | 'Marit'
VP -> 'szereti'
""")
parsed_cfg = parse_cfg(cfg)
parser = RecursiveDescentParser(parsed_cfg)
uni_text = encode('Péter szereti Marit')
sentence = word_tokenize(uni_text)
print parser.parse(sentence)
|
Python
| 0.9999
|
@@ -331,16 +331,24 @@
NP VP NP
+ %7C NP VP
%0ANP -%3E '
@@ -363,16 +363,20 @@
'Marit'
+ %7C e
%0AVP -%3E '
@@ -383,16 +383,20 @@
szereti'
+ %7C e
%0A%22%22%22)%0A%0Ap
|
72ba3a0401ad08d4df2fdc03b326eab16af47832
|
Bump version to 0.4.2.dev1
|
django_backend/__init__.py
|
django_backend/__init__.py
|
from .backend.renderable import Renderable # noqa
from .group import Group # noqa
from .sitebackend import SiteBackend
__version__ = '0.4.1'
default_app_config = 'django_backend.apps.DjangoBackendConfig'
site = SiteBackend(id='backend')
|
Python
| 0
|
@@ -135,16 +135,21 @@
= '0.4.
+2.dev
1'%0A%0A%0Adef
|
de39d46fcbe51e29e008ebc24c3fbec347a75edd
|
Make BasicEmailTest work with any DEFAULT_FROM_EMAIL setting
|
django_send_email/tests.py
|
django_send_email/tests.py
|
from contextlib import contextmanager
from django.test import TestCase
from django.conf import settings
from django.core.management import CommandError, call_command
from django.core import mail
class SettingDoesNotExist:
pass
@contextmanager
def patch_settings(**kwargs):
old_settings = []
for key, new_value in kwargs.items():
old_value = getattr(settings, key, SettingDoesNotExist)
old_settings.append((key, old_value))
setattr(settings, key, new_value)
yield
for key, old_value in old_settings:
if old_value is SettingDoesNotExist:
delattr(settings, key)
else:
setattr(settings, key, old_value)
class ErrorTest(TestCase):
def test_noargs(self):
self.assertRaises(CommandError, call_command, 'send_email_message')
def test_bad_address(self):
self.assertRaises(CommandError, call_command, 'send_email_message', 'subject', 'message', 'bogus')
def test_bad_cop_addresses(self):
self.assertRaises(CommandError, call_command, 'send_email_message', 'subject', 'message', 'user@example.com',
bcc='bogus', cc='bogus')
class BaseEmailTest(TestCase):
subject = 'subject'
body = 'message'
recipients = ['user@example.com']
options = {}
message = {}
new_settings = {}
def test_send_email(self):
self.options.update(interactive=False)
with patch_settings(**self.new_settings):
if self.__class__.__name__ != 'BaseEmailTest':
call_command('send_email_message', self.subject, self.body, *self.recipients, **self.options)
self.validate_outbox(mail.outbox)
def validate_outbox(self, outbox):
self.assertEqual(len(outbox), 1 if self.message else 0)
for key, value in self.message.items():
self.assertEqual(getattr(outbox[0], key), value)
class BasicEmailTest(BaseEmailTest):
message = {
'from_email': 'webmaster@localhost',
'to': ['user@example.com'],
'body': 'message',
'subject': '[Django] subject'
}
class SubjectEmailTest(BaseEmailTest):
subject = 'Text'
new_settings = {
'EMAIL_SUBJECT_PREFIX': 'Prefix '
}
message = {
'subject': 'Prefix Text'
}
class NoPrefixEmailTest(BaseEmailTest):
options = {
'noprefix': True
}
message = {
'subject': 'subject'
}
class MessageFileEmailTest(BaseEmailTest):
body = __file__
message = {
'body': open(__file__).read()
}
class RecipientsEmailTest(BaseEmailTest):
recipients = ['user1@example.com', 'user1@example.com']
message = {
'to': ['user1@example.com', 'user1@example.com']
}
class DefaultFromEmailTest(BaseEmailTest):
new_settings = {
'DEFAULT_FROM_EMAIL': 'user@example.com'
}
message = {
'from_email': 'user@example.com'
}
class FromEmailTest(BaseEmailTest):
options = {
'from_email': 'webmaster@example.com'
}
message = {
'from_email': 'webmaster@example.com'
}
class CCEmailTest(BaseEmailTest):
options = {
'cc': 'user1@example.com,user2@example.com'
}
message = {
'cc': ['user1@example.com', 'user2@example.com']
}
class BCCEmailTest(BaseEmailTest):
options = {
'bcc': 'user1@example.com,user2@example.com'
}
message = {
'bcc': ['user1@example.com', 'user2@example.com']
}
|
Python
| 0.000001
|
@@ -1971,29 +1971,35 @@
l':
-'webmaster@localhost'
+settings.DEFAULT_FROM_EMAIL
,%0A
|
afdb387cd8f9f54ee666cf1f64416d877f4eba19
|
Add support for subtitles
|
youtube_dl/extractor/streamcz.py
|
youtube_dl/extractor/streamcz.py
|
# coding: utf-8
from __future__ import unicode_literals
import hashlib
import time
from .common import InfoExtractor
from ..utils import (
int_or_none,
sanitized_Request,
)
def _get_api_key(api_path):
if api_path.endswith('?'):
api_path = api_path[:-1]
api_key = 'fb5f58a820353bd7095de526253c14fd'
a = '{0:}{1:}{2:}'.format(api_key, api_path, int(round(time.time() / 24 / 3600)))
return hashlib.md5(a.encode('ascii')).hexdigest()
class StreamCZIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?stream\.cz/.+/(?P<id>[0-9]+)'
_API_URL = 'http://www.stream.cz/API'
_TESTS = [{
'url': 'http://www.stream.cz/peklonataliri/765767-ecka-pro-deti',
'md5': '6d3ca61a8d0633c9c542b92fcb936b0c',
'info_dict': {
'id': '765767',
'ext': 'mp4',
'title': 'Peklo na talíři: Éčka pro děti',
'description': 'Taška s grónskou pomazánkou a další pekelnosti ZDE',
'thumbnail': 're:^http://im.stream.cz/episode/52961d7e19d423f8f06f0100',
'duration': 256,
},
}, {
'url': 'http://www.stream.cz/blanik/10002447-tri-roky-pro-mazanka',
'md5': 'e54a254fb8b871968fd8403255f28589',
'info_dict': {
'id': '10002447',
'ext': 'mp4',
'title': 'Kancelář Blaník: Tři roky pro Mazánka',
'description': 'md5:3862a00ba7bf0b3e44806b544032c859',
'thumbnail': 're:^http://im.stream.cz/episode/537f838c50c11f8d21320000',
'duration': 368,
},
}]
def _real_extract(self, url):
video_id = self._match_id(url)
api_path = '/episode/%s' % video_id
req = sanitized_Request(self._API_URL + api_path)
req.add_header('Api-Password', _get_api_key(api_path))
data = self._download_json(req, video_id)
formats = []
for quality, video in enumerate(data['video_qualities']):
for f in video['formats']:
typ = f['type'].partition('/')[2]
qlabel = video.get('quality_label')
formats.append({
'format_note': '%s-%s' % (qlabel, typ) if qlabel else typ,
'format_id': '%s-%s' % (typ, f['quality']),
'url': f['source'],
'height': int_or_none(f['quality'].rstrip('p')),
'quality': quality,
})
self._sort_formats(formats)
image = data.get('image')
if image:
thumbnail = self._proto_relative_url(
image.replace('{width}', '1240').replace('{height}', '697'),
scheme='http:',
)
else:
thumbnail = None
stream = data.get('_embedded', {}).get('stream:show', {}).get('name')
if stream:
title = '%s: %s' % (stream, data['name'])
else:
title = data['name']
return {
'id': video_id,
'title': title,
'thumbnail': thumbnail,
'formats': formats,
'description': data.get('web_site_text'),
'duration': int_or_none(data.get('duration')),
'view_count': int_or_none(data.get('views')),
}
|
Python
| 0
|
@@ -716,40 +716,40 @@
': '
-6d3ca61a8d0633c9c542b92fcb936b0c
+934bb6a6d220d99c010783c9719960d5
',%0A
@@ -1190,40 +1190,40 @@
': '
-e54a254fb8b871968fd8403255f28589
+849a88c1e1ca47d41403c2ba5e59e261
',%0A
@@ -2929,16 +2929,214 @@
name'%5D%0A%0A
+ subtitles = %7B%7D%0A srt_url = data.get('subtitles_srt')%0A if srt_url:%0A subtitles%5B'cs'%5D = %5B%7B%0A 'ext': 'srt',%0A 'url': srt_url,%0A %7D%5D%0A%0A
@@ -3435,22 +3435,58 @@
('views')),%0A
+ 'subtitles': subtitles,%0A
%7D%0A
|
6dd3b2f7844e670b8774aa0afd25b83f60753703
|
make sure django is setup before importing
|
dblistener.py
|
dblistener.py
|
import os
import time
import logging
import django
from rethinkdb.errors import RqlDriverError
from leaderboardapp.models import Board, Player
from leaderboardapp.views import publish_board
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'leaderboard.settings')
django.setup()
logger = logging.getLogger('dblistener')
while True:
try:
for change in Player.get_all_changes():
logger.debug('got change: %s' % change)
try:
row = change['new_val']
board = Board.get(row['board'])
publish_board(board)
except Exception:
logger.exception('failed to handle')
except RqlDriverError:
logger.exception('failed to connect')
time.sleep(1)
|
Python
| 0
|
@@ -6,28 +6,112 @@
t os
-%0Aimport time
+, django%0Aos.environ.setdefault('DJANGO_SETTINGS_MODULE', 'leaderboard.settings')%0Adjango.setup()%0A
%0Aimport
logg
@@ -110,23 +110,20 @@
ort
-logging
+time
%0Aimport
djan
@@ -118,22 +118,23 @@
%0Aimport
-dja
+loggi
ng
-o
%0Afrom re
@@ -270,96 +270,8 @@
rd%0A%0A
-os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'leaderboard.settings')%0Adjango.setup()%0A%0A
logg
|
b57e79fadf54bd0a34eaf81ec57059fe964e3be8
|
rewrite semalock
|
decorators.py
|
decorators.py
|
import os
from functools import wraps
from requests import Timeout, ConnectionError
from socket import timeout as socket_timeout
import logging
from .models import ArbitraryAccessObject
from shutil import get_terminal_size
timeouts = (Timeout, socket_timeout, ConnectionError)
__author__ = 'zz'
def threading_lock(lock):
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
with lock:
return func(*args, **kwargs)
return wrapper
return decorator
def retry_connect(retry_times, timeout, error=None):
if error is None:
error=ArbitraryAccessObject()
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
try_times = 0
while True:
try:
ret = func(*args, timeout=timeout, **kwargs)
if ret.status_code != 200:
logging.warning('%s is %s', ret.url, ret.status_code)
if ret.status_code == 404:
raise Timeout
except timeouts:
try_times += 1
error.reconnect(try_times)
else:
return ret
if try_times >= retry_times:
raise Timeout
return wrapper
return decorator
def semalock_for_class(func):
@wraps(func)
def wrapper(self, s, *args, **kwargs):
with s:
return func(self, *args, **kwargs)
return wrapper
def semalock(func):
@wraps(func)
def wrapper(s, *args, **kwargs):
with s:
return func(*args, **kwargs)
return wrapper
def loop(func):
@wraps(func)
def wrapper(*args, **kwargs):
while True:
ret = func(*args, **kwargs)
if ret:
break
return wrapper
def resolve_timeout(replace_value):
"""
return replace value instead of raise timeout
"""
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except timeouts as e:
return replace_value
return wrapper
return decorator
def clear_output(func):
terminal_width, _ = get_terminal_size()
@wraps(func)
def wrapper(*args, **kwargs):
print(' ' * terminal_width, end='\r')
return func(*args, **kwargs)
return wrapper
def prepare_dir(dirname):
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
if not os.path.exists(dirname):
os.mkdir(dirname)
return func(*args, **kwargs)
return wrapper
return decorator
|
Python
| 0.999991
|
@@ -3,17 +3,16 @@
port os%0A
-%0A
from fun
@@ -604,17 +604,19 @@
error
-=
+ =
Arbitrar
@@ -1383,18 +1383,30 @@
lock
-_for_class
+(s):%0A def decorator
(fun
@@ -1405,32 +1405,36 @@
ator(func):%0A
+
+
@wraps(func)%0A
@@ -1426,32 +1426,36 @@
wraps(func)%0A
+
+
def wrapper(self
@@ -1454,17 +1454,8 @@
per(
-self, s,
*arg
@@ -1460,32 +1460,36 @@
rgs, **kwargs):%0A
+
with s:%0A
@@ -1492,32 +1492,36 @@
s:%0A
+
return func(self
@@ -1520,108 +1520,8 @@
unc(
-self, *args, **kwargs)%0A return wrapper%0A%0A%0Adef semalock(func):%0A @wraps(func)%0A def wrapper(s,
*arg
@@ -1524,33 +1524,32 @@
*args, **kwargs)
-:
%0A with s:
@@ -1545,56 +1545,22 @@
-with s:%0A return func(*args, **kwargs)
+return wrapper
%0A
@@ -1559,38 +1559,40 @@
pper%0A return
-wrappe
+decorato
r%0A%0A%0Adef loop(fun
@@ -2155,24 +2155,24 @@
tput(func):%0A
-
terminal
@@ -2203,16 +2203,17 @@
_size()%0A
+%0A
@wra
|
0eb08689906556951bacf82166d13cda7a8d720b
|
Update pylsy_test.py
|
tests/pylsy_test.py
|
tests/pylsy_test.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
__author__ = 'choleraehyq'
import unittest
from pylsy.pylsy import PylsyTable
class PylsyTableTests(unittest.TestCase):
def setUp(self):
attributes=["name","age"]
self.table = PylsyTable(attributes)
def tearDown(self):
self.table = None
def testCreateTable(self):
name=["a", "b"]
self.table.add_data("name",name)
age=[1, 2]
self.table.add_data("age",age)
correct_file = open('correct.out', 'r')
correctPrint = correct_file.read()
try:
import io
from contextlib import redirect_stdout
with io.StringIO() as buf, redirect_stdout(buf):
print('redirected')
output = buf.getvalue()
self.assertEqual(output, correctPrint)
except ImportError:
import sys
f_handler = open('test.out', 'w')
sys.stdout=f_handler
self.table.create_table()
f_handler.close()
f_handler = open('test.out', 'r')
self.assertEqual(f_handler.read(), correctPrint)
if __name__ == '__main__':
unittest.main()
|
Python
| 0
|
@@ -60,35 +60,8 @@
ort%0A
-__author__ = 'choleraehyq'%0A
impo
|
c4d784f1b478ca80697e9bbe843ebf84fe124f2b
|
update legacy scripted test to use new syntax
|
lib/rapidsms/tests/scripted.py
|
lib/rapidsms/tests/scripted.py
|
import warnings
from django.test import TestCase
from rapidsms.tests.harness import TestScript as TestScriptMixin
class TestScript(TestScriptMixin, TestCase):
def startRouter(self):
warnings.warn("startRouter is deprecated and will be removed in a future "
"release. Please, see the release notes.", DeprecationWarning, stacklevel=2)
self.clear() # make sure the outbox is clean
def stopRouter(self):
warnings.warn("stopRouter is deprecated and will be removed in a future "
"release. Please, see the release notes.", DeprecationWarning, stacklevel=2)
|
Python
| 0
|
@@ -14,41 +14,8 @@
gs%0A%0A
-from django.test import TestCase%0A
from
@@ -113,18 +113,8 @@
ixin
-, TestCase
):%0A%0A
|
5f8a3a5b02fdc7c1bbadc5a3c739bac2aee75176
|
Add extra check for invalid boxes in random_transform.
|
keras_retinanet/utils/image.py
|
keras_retinanet/utils/image.py
|
"""
Copyright 2017-2018 Fizyr (https://fizyr.com)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import division
import keras
import time
import numpy as np
import cv2
import PIL
def read_image_bgr(path):
image = np.asarray(PIL.Image.open(path).convert('RGB'))
return image[:, :, ::-1].copy()
def preprocess_image(x):
# mostly identical to "https://github.com/fchollet/keras/blob/master/keras/applications/imagenet_utils.py"
# except for converting RGB -> BGR since we assume BGR already
x = x.astype(keras.backend.floatx())
if keras.backend.image_data_format() == 'channels_first':
if x.ndim == 3:
x[0, :, :] -= 103.939
x[1, :, :] -= 116.779
x[2, :, :] -= 123.68
else:
x[:, 0, :, :] -= 103.939
x[:, 1, :, :] -= 116.779
x[:, 2, :, :] -= 123.68
else:
x[..., 0] -= 103.939
x[..., 1] -= 116.779
x[..., 2] -= 123.68
return x
def random_transform(
image,
boxes,
image_data_generator,
seed=None
):
if seed is None:
seed = np.random.randint(10000)
image = image_data_generator.random_transform(image, seed=seed)
# set fill mode so that masks are not enlarged
fill_mode = image_data_generator.fill_mode
image_data_generator.fill_mode = 'constant'
for index in range(boxes.shape[0]):
# generate box mask and randomly transform it
mask = np.zeros_like(image, dtype=np.uint8)
b = boxes[index, :4].astype(int)
assert(b[0] < b[2] and b[1] < b[3]), 'Annotations contain invalid box: {}'.format(b)
mask[b[1]:b[3], b[0]:b[2], :] = 255
mask = image_data_generator.random_transform(mask, seed=seed)[..., 0]
mask = mask.copy() # to force contiguous arrays
# find bounding box again in augmented image
[i, j] = np.where(mask == 255)
boxes[index, 0] = float(min(j))
boxes[index, 1] = float(min(i))
boxes[index, 2] = float(max(j))
boxes[index, 3] = float(max(i))
# restore fill_mode
image_data_generator.fill_mode = fill_mode
return image, boxes
def resize_image(img, min_side=600, max_side=1024):
(rows, cols, _) = img.shape
smallest_side = min(rows, cols)
# rescale the image so the smallest side is min_side
scale = min_side / smallest_side
# check if the largest side is now greater than max_side, wich can happen
# when images have a large aspect ratio
largest_side = max(rows, cols)
if largest_side * scale > max_side:
scale = max_side / largest_side
# resize the image with the computed scale
img = cv2.resize(img, None, fx=scale, fy=scale)
return img, scale
|
Python
| 0
|
@@ -2109,16 +2109,154 @@
ormat(b)
+%0A assert(b%5B2%5D %3C image.shape%5B1%5D and b%5B3%5D %3C image.shape%5B0%5D), 'Annotation (%7B%7D) is outside of image shape (%7B%7D).'.format(b, image.shape)
%0A%0A
|
11134b6c71ce8285797ad659e218873b4a061d8e
|
Fix tests on Python 3
|
tests/sync_tests.py
|
tests/sync_tests.py
|
# -*- coding: utf-8 -*-
# Scheduling interface library
# (C) 2016 VRT Systems
#
# vim: set ts=4 sts=4 et tw=78 sw=4 si:
import pyat.sync
import time
import random
from .util import a_task, a_failing_task, make_args, make_kwargs, \
FailedTaskException
MIN_TIME = 1.0
MAX_TIME = 2.0
DELTA_TIME = MAX_TIME - MIN_TIME
STEP_TIME = 0.1
TIMEOUT_DELAY = 0.5
def test_future():
'''
Test a task scheduled N seconds in the future is executed close to the
requested time.
'''
# Create scheduler
scheduler = pyat.sync.SynchronousTaskScheduler()
# Pick some random arguments
args = make_args()
kwargs = make_kwargs()
# Pick a time between 1-5 seconds in the future
delay = MIN_TIME + (random.random() * DELTA_TIME)
at_time = time.time() + delay
# Schedule task
task = scheduler.schedule(at_time, a_task, *args, **kwargs)
# Pick a timeout
timeout = at_time + TIMEOUT_DELAY
# Wait for timeout
while time.time() < timeout:
time.sleep(STEP_TIME)
scheduler.poll()
# Did our function get run?
try:
(run_at, run_args, run_kwargs) = task.result
assert run_args == args, 'args does not match'
assert run_kwargs == kwargs, 'kwargs does not match'
assert run_at > at_time, 'Ran too early'
assert run_at < (at_time + STEP_TIME), 'Ran too late'
except pyat.sync.NotExecutedYet:
assert False, 'Did not get executed'
def test_future_exception():
'''
Test a task scheduled N seconds in the future that fails.
'''
# Create scheduler
scheduler = pyat.sync.SynchronousTaskScheduler()
# Pick some random arguments
args = make_args()
kwargs = make_kwargs()
# Pick a time between 1-5 seconds in the future
delay = MIN_TIME + (random.random() * DELTA_TIME)
at_time = time.time() + delay
# Schedule task
task = scheduler.schedule(at_time, a_failing_task, *args, **kwargs)
# Pick a timeout
timeout = at_time + TIMEOUT_DELAY
# Wait for timeout
while time.time() < timeout:
time.sleep(STEP_TIME)
scheduler.poll()
# Did our function get run?
try:
(run_at, run_args, run_kwargs) = task.result
assert False, 'Did not fail'
except FailedTaskException, e:
(msg, run_at, run_args, run_kwargs) = e.args
assert run_args == args, 'args does not match'
assert run_kwargs == kwargs, 'kwargs does not match'
assert run_at > at_time, 'Ran too early'
assert run_at < (at_time + STEP_TIME), 'Ran too late'
except pyat.sync.NotExecutedYet:
assert False, 'Did not get executed'
def test_future_cancelled():
'''
Test a task scheduled N seconds in the future then cancelled doesn't
execute.
'''
# Create scheduler
scheduler = pyat.sync.SynchronousTaskScheduler()
# Pick some random arguments
args = make_args()
kwargs = make_kwargs()
# Pick a time between 1-5 seconds in the future
delay = MIN_TIME + (random.random() * DELTA_TIME)
at_time = time.time() + delay
# Schedule task
task = scheduler.schedule(at_time, a_task, *args, **kwargs)
# Poll once
scheduler.poll()
# Cancel the task
task.cancel()
assert task.cancelled, 'Not cancelled'
# Pick a timeout
timeout = at_time + TIMEOUT_DELAY
# Wait for timeout
while time.time() < timeout:
time.sleep(STEP_TIME)
scheduler.poll()
# Did our function get run?
try:
(run_at, run_args, run_kwargs) = task.result
assert False, 'Task executed'
except pyat.sync.NotExecutedYet:
pass
def test_cancel_all():
'''
Test we can cancel all tasks.
'''
# Create scheduler
scheduler = pyat.sync.SynchronousTaskScheduler()
# Pick some random arguments
args = make_args()
kwargs = make_kwargs()
# Pick a time between 1-5 seconds in the future
delay = MIN_TIME + (random.random() * DELTA_TIME)
at_time = time.time() + delay
# Schedule tasks
task1 = scheduler.schedule(at_time, a_task, *args, **kwargs)
task2 = scheduler.schedule(at_time + 1, a_task, *args, **kwargs)
task3 = scheduler.schedule(at_time + 2, a_task, *args, **kwargs)
# Poll once
scheduler.poll()
# Cancel all tasks
scheduler.cancel_all()
# Pick a timeout
timeout = at_time + TIMEOUT_DELAY
# Wait for timeout
while time.time() < timeout:
time.sleep(STEP_TIME)
scheduler.poll()
# Did our functions get run?
for task in (task1, task2, task3):
try:
(run_at, run_args, run_kwargs) = task.result
assert False, 'Task executed'
except pyat.sync.NotExecutedYet:
pass
def test_not_yet_executed():
'''
Test NotYetExecuted gets raised if we ask the task early.
'''
# Create scheduler
scheduler = pyat.sync.SynchronousTaskScheduler()
# Pick some random arguments
args = make_args()
kwargs = make_kwargs()
# Pick a time between 1-5 seconds in the future
delay = MIN_TIME + (random.random() * DELTA_TIME)
at_time = time.time() + delay
# Schedule task
task = scheduler.schedule(at_time, a_task, *args, **kwargs)
# Poll once
scheduler.poll()
# Did our function get run?
try:
(run_at, run_args, run_kwargs) = task.result
assert False, 'Task executed early'
except pyat.sync.NotExecutedYet:
pass
|
Python
| 0.000991
|
@@ -2285,17 +2285,19 @@
xception
-,
+ as
e:%0A
|
e7dd2d6680bc01776a7a8fc0be1b8bd1985cdfe3
|
add docstrings. fix small bugs
|
lib/neuroimaging/core/image/image.py
|
lib/neuroimaging/core/image/image.py
|
"""
The core Image class.
"""
import types
import numpy as N
from neuroimaging import flatten
from neuroimaging.data_io import DataSource, splitzipext
from neuroimaging.data_io.formats import getformats, Format
from neuroimaging.core.image.base_image import ArrayImage
class Image(object):
"""
The Image class provides the core object type used in nipy. An Image
represents a volumetric brain image and provides means for manipulating and
reading and writing this data to file.
"""
@staticmethod
def fromurl(url, datasource=DataSource(), format=None, grid=None, mode="r",
clobber=False, **keywords):
"""
Create an Image from the given url/filename
"""
# remove any zip extensions
url = splitzipext(url)[0]
if not format:
valid = getformats(url)
else:
valid = [format]
for format in valid:
try:
return format(filename=url,
datasource=datasource, mode=mode, clobber=clobber,
grid=grid, **keywords)
except Exception, e:
# print e
pass
raise NotImplementedError, 'no valid reader found for URL %s' % url
def __init__(self, image, datasource=DataSource(), grid=None, **keywords):
'''
Create an Image (volumetric image) object from either a file, an
existing Image object, or an array.
'''
# from existing Image
if isinstance(image, Image):
self._source = image._source
# from existing Format instance
elif isinstance(image, Format):
self._source = image
# from array
elif isinstance(image, N.ndarray) or isinstance(image, N.core.memmap):
self._source = ArrayImage(image, grid=grid)
# from filename or url
elif type(image) == types.StringType:
self._source = \
self.fromurl(image, datasource, grid=grid, **keywords)
else:
raise ValueError(
"Image input must be a string, array, or another image.")
# Find spatial grid -- this is the one that will be used generally
self.grid = self._source.grid
self.shape = list(self.grid.shape)
self.ndim = len(self.shape)
# Attach memory-mapped array or array as buffer attr
self.buffer = self._source.data
def __getitem__(self, slice_):
return self._source[slice_]
def __setitem__(self, slice_, data):
self._source[slice_] = data
def __iter__(self):
""" Create an iterator over an image based on its grid's iterator."""
iter(self.grid)
return self
def compress(self, where, axis=None):
"""
Call the compress method on the underlying data array
"""
return self.buffer.compress(where, axis=axis)
def put(self, indices, data):
"""
Call the put method on the underlying data array
"""
return self.buffer.put(indices, data)
def next(self):
value = self.grid.next()
itertype = self.grid.get_iter_param("itertype")
if itertype is 'slice':
result = N.squeeze(self[value.slice])
elif itertype is 'parcel':
flatten(value.where)
result = self.compress(value.where)
elif itertype == 'slice/parcel':
result = self[value.slice].compress(value.where)
return result
def set_next(self, data):
value = self.grid.next()
itertype = self.grid.get_iter_param("itertype")
if itertype is 'slice':
self[value.slice] = data
elif itertype in ('parcel', "slice/parcel"):
self.put(N.nonzero(value.where.flatten()), data)
def toarray(self, clean=True, **keywords):
"""
Return a Image instance that has an ArrayImage as its _source attribute.
>>> from numpy import *
>>> from BrainSTAT import *
>>> test = Image(testfile('anat+orig.HEAD'))
>>> _test = test.toarray()
>>> print _test.source.data.shape
(124, 256, 256)
>>> test = Image(testfile('test_fmri.img'))
>>> _test = test.toarray(slice=(2,), grid=test.grid)
>>> print _test.shape
(13, 128, 128)
"""
data = self.readall()
if clean and \
data.dtype.type in N.sctypes['float'] + N.sctypes['complex']:
data = N.nan_to_num(data)
return Image(data, grid=self.grid, **keywords)
def tofile(self, filename, clobber=False,
sctype=None, **keywords):
"""
Write the image to a file. Returns a new Image object
of the newly written file.
"""
sctype = sctype or self._source.sctype
outimage = Image(filename, mode='w', grid=self.grid,
clobber=clobber,
sctype=sctype,
**keywords)
tmp = self.toarray(**keywords)
outimage[:] = tmp[:]
return outimage
def readall(self, clean=False):
"""
Read an entire Image object, returning a numpy, not another instance of
Image. By default, it does not read 4d images. Missing values are
filled in with 0
"""
value = self[self.grid.allslice()]
if clean:
value = N.nan_to_num(value)
return value
class ImageSequenceIterator(object):
"""
Take a sequence of images, and an optional grid (which defaults to
imgs[0].grid) and create an iterator whose next method returns array with
shapes (len(imgs),) + self.imgs[0].next().shape Very useful for voxel-based
methods, i.e. regression, one-sample t.
"""
def __init__(self, imgs, grid=None):
self.imgs = imgs
if grid is None:
self.grid = iter(self.imgs[0].grid)
else:
self.grid = iter(grid)
def __iter__(self):
""" Return self as an iterator. """
_ = [iter(img) for img in self.imgs]
return self
def next(self):
val = [img.next() for img in self.imgs]
return N.array(val, N.float64)
|
Python
| 0.000761
|
@@ -614,31 +614,16 @@
- clobber=False,
**keywo
@@ -704,25 +704,16 @@
%22%22%22%0A
- %0A
@@ -1042,33 +1042,16 @@
de=mode,
- clobber=clobber,
%0A
@@ -1141,16 +1141,20 @@
+
# pri
@@ -3108,32 +3108,97 @@
def next(self):%0A
+ %22%22%22%0A Return the next iterator value.%0A %22%22%22 %0A
value =
@@ -3550,32 +3550,43 @@
slice%5D.compress(
+N.asarray(%5B
value.where)%0A
@@ -3572,32 +3572,34 @@
ray(%5Bvalue.where
+%5D)
)%0A return
@@ -3629,32 +3629,254 @@
xt(self, data):%0A
+ %22%22%22%0A Set the next iterator value.%0A%0A%0A This method works in the same way as next(), in that it%0A requires __iter__ to have been called, and will advance%0A the iterator.%0A %22%22%22%0A %0A
value =
@@ -6496,32 +6496,80 @@
def next(self):%0A
+ %22%22%22 Return the next iterator value. %22%22%22%0A
val = %5Bi
|
0c8e92d78983635778043095740cc508259101ba
|
Fix typos in tests
|
tests/test_async.py
|
tests/test_async.py
|
# -*- coding: utf-8 -*-
from asyncio import Future, gather, new_event_loop, sleep
from mock import Mock
from twisted.internet.defer import ensureDeferred
from pyee import EventEmitter
class PyeeTestException(Exception):
pass
def test_asyncio_emit():
"""Test that event_emitters can handle wrapping coroutines as used with
asyncio.
"""
loop = new_event_loop()
ee = EventEmitter(loop=loop)
should_call = Future(loop=loop)
@ee.on('event')
async def event_handler():
should_call.set_result(True)
async def create_timeout(loop=loop):
await sleep(0.1, loop=loop)
if not should_call.done():
raise Exception('should_call timed out!')
return should_call.cancel()
timeout = create_timeout(loop=loop)
@should_call.add_done_callback
def _done(result):
assert result
ee.emit('event')
loop.run_until_complete(gather(should_call, timeout, loop=loop))
loop.close()
def test_asyncio_error():
"""Test that event_emitters can handle errors when wrapping coroutines as
used with asyncio.
"""
loop = new_event_loop()
ee = EventEmitter(loop=loop)
should_call = Future(loop=loop)
@ee.on('event')
async def event_handler():
raise PyeeTestException()
@ee.on('error')
def handle_error(exc):
should_call.set_result(exc)
async def create_timeout(loop=loop):
await sleep(0.1, loop=loop)
if not should_call.done():
raise Exception('should_call timed out!')
return should_call.cancel()
timeout = create_timeout(loop=loop)
@should_call.add_done_callback
def _done(result):
assert isinstance(result, PyeeTestError)
ee.emit('event')
loop.run_until_complete(gather(should_call, timeout, loop=loop))
loop.close()
def test_twisted_emit():
"""Test that event_emitters can handle wrapping coroutines when using
twisted and ensureDeferred.
"""
ee = EventEmitter(scheduler=ensureDeferred)
should_call = Mock()
@ee.on('event')
async def event_handler():
should_call(True)
ee.emit('event')
should_call.assert_called_once()
def test_twisted_error():
"""Test that event_emitters can handle wrapping coroutines when using
twisted and ensureDeferred.
"""
ee = EventEmitter(scheduler=ensureDeferred)
should_call = Mock()
@ee.on('event')
async def event_handler():
raise PyeeTestException()
@ee.on('error')
def handle_error(e):
should_call(e)
ee.emit('event')
should_call.assert_called_once()
|
Python
| 0.034948
|
@@ -195,24 +195,20 @@
yeeTestE
-xception
+rror
(Excepti
@@ -1276,32 +1276,28 @@
se PyeeTestE
-xception
+rror
()%0A%0A @ee.
@@ -2481,24 +2481,20 @@
yeeTestE
-xception
+rror
()%0A%0A
|
cf7b7e015dd8e428317cdfd8000bb49073a5758f
|
Update tih.py
|
tih.py
|
tih.py
|
__author__ = '@abhinavbom a.k.a Darkl0rd'
#local imports
from lib.parse import *
from api.vt import *
from api.urlvoid import *
from lib.updatefeed import gather
#stdlib imports
import argparse
banner = ''' Threat Intelligence Hunter framework Begins now '''
print banner
def main():
print "Intel test"
parser = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument('-ip', type=str, nargs='+', help="Search for a"
"single IP through OSINT threat feeds")
parser.add_argument('-list', type=str, nargs='?', help="Search for a list of IP vectors. Accepted formats is .txt")
parser.add_argument('-md5', type=str, nargs='+', help="Search for a single of space separated multiple MD5s. "
"This search is performed on Virustotal hence only 4 searches "
"per minute is allowed. Please add your public key to bin/vt.py")
parser.add_argument('-url', type=str, nargs='+', help="Search for a single of space separated multiple urls. "
"This search is performed on Virustotal hence only 4 searches "
"per minute is allowed. Please add your public key to bin/vt.py")
parser.add_argument('-repo', type=str, nargs='?', help="Search for the reputation of a list of URLs. The script"
"accepts a txt file containing list of domains and searches it"
"against popular reputation tools like URLVoid, Bluecoat etc.")
parser.add_argument('-update', action='store_true', help='Update the local storage of feeds data.')
args = parser.parse_args()
if args.ip:
if len(args.ip) > 4:
print "Too many argument values specified. Maximum arguments per minute is 4."
sys.exit(1)
parse_ip(args.ip)
vt_ip(args.ip)
if args.list:
parse_ipList(list)
if args.md5:
if len(args.md5) > 4:
print "Too many argument values specified. Maximum arguments per minute is 4."
sys.exit(1)
vt_md5(args.md5)
if args.url:
if len(args.url) > 4:
print "Too many argument values specified. Maximum arguments per minute is 4."
sys.exit(1)
vt_url(args.url)
if args.repo:
urlvoid(args.repo)
if args.update:
print "updating"
gather()
if __name__ == '__main__':
main()
|
Python
| 0
|
@@ -1021,36 +1021,32 @@
r public key to
-bin/
vt.py%22)%0A%0A par
@@ -1442,33 +1442,33 @@
ype=str, nargs='
-?
++
', help=%22Search
@@ -1589,29 +1589,23 @@
pts
-a txt file containing
+comma separated
lis
@@ -1617,16 +1617,23 @@
domains
+or IPs
and sear
|
8067af0c58ad3815fb15b530708bcb96a1874f3c
|
Add unit test for removing an element from an Ordering
|
tests/test_basic.py
|
tests/test_basic.py
|
from unittest import TestCase
from ordering import Ordering
class TestOrderingBasic(TestCase):
def test_empty_insert_start(self) -> None:
ordering = Ordering[int]()
ordering.insert_start(0)
self.assertIn(0, ordering)
self.assertNotIn(1, ordering)
def test_empty_insert_end(self) -> None:
ordering = Ordering[int]()
ordering.insert_end(0)
self.assertIn(0, ordering)
self.assertNotIn(1, ordering)
def test_basic_insert_after(self) -> None:
ordering = Ordering[int]()
ordering.insert_start(0)
ordering.insert_after(0, 1)
self.assertIn(0, ordering)
self.assertIn(1, ordering)
self.assertNotIn(2, ordering)
def test_basic_insert_before(self) -> None:
ordering = Ordering[int]()
ordering.insert_start(0)
ordering.insert_before(0, 1)
self.assertIn(0, ordering)
self.assertIn(1, ordering)
self.assertNotIn(2, ordering)
def test_basic_compare(self) -> None:
ordering = Ordering[int]()
ordering.insert_start(0)
ordering.insert_after(0, 1)
ordering.insert_before(0, 2)
self.assertTrue(ordering.compare(0, 1))
self.assertFalse(ordering.compare(1, 0))
self.assertTrue(ordering.compare(2, 0))
self.assertFalse(ordering.compare(0, 2))
|
Python
| 0
|
@@ -459,32 +459,275 @@
n(1, ordering)%0A%0A
+ def test_remove(self) -%3E None:%0A ordering = Ordering%5Bint%5D()%0A%0A self.assertNotIn(0, ordering)%0A ordering.insert_start(0)%0A self.assertIn(0, ordering)%0A ordering.remove(0)%0A self.assertNotIn(0, ordering)%0A%0A
def test_bas
|
44429e709c9ac047d7a311dac44dde57d9d2d403
|
Add test case to ensure filtering is applied on `Calls.iter` method.
|
tests/test_calls.py
|
tests/test_calls.py
|
from datetime import date
from mock import patch, Mock
from nose.tools import assert_true
from twilio.rest.resources import Calls, Call
from tools import create_mock_json
BASE_URI = "https://api.twilio.com/2010-04-01/Accounts/AC123"
ACCOUNT_SID = "AC123"
AUTH = (ACCOUNT_SID, "token")
CALL_SID = "CA47e13748ed59a5733d2c1c1c69a83a28"
list_resource = Calls(BASE_URI, AUTH)
@patch("twilio.rest.resources.base.make_twilio_request")
def test_create_call(mock):
resp = create_mock_json("tests/resources/calls_instance.json")
resp.status_code = 201
mock.return_value = resp
uri = "%s/Calls" % (BASE_URI)
list_resource.create("TO", "FROM", "url", record=True, application_sid='APPSID')
exp_params = {
'To': "TO",
'From': "FROM",
'Url': "url",
'Record': "true",
'ApplicationSid': 'APPSID',
}
mock.assert_called_with("POST", uri, data=exp_params, auth=AUTH)
@patch("twilio.rest.resources.base.make_twilio_request")
def test_paging(mock):
resp = create_mock_json("tests/resources/calls_list.json")
mock.return_value = resp
uri = "%s/Calls" % (BASE_URI)
list_resource.list(started_before=date(2010, 12, 5))
exp_params = {'StartTime<': '2010-12-05'}
mock.assert_called_with("GET", uri, params=exp_params, auth=AUTH)
@patch("twilio.rest.resources.base.make_twilio_request")
def test_get(mock):
resp = create_mock_json("tests/resources/calls_instance.json")
mock.return_value = resp
uri = "%s/Calls/%s" % (BASE_URI, CALL_SID)
list_resource.get(CALL_SID)
mock.assert_called_with("GET", uri, auth=AUTH)
@patch("twilio.rest.resources.base.make_twilio_request")
def test_hangup(mock):
resp = create_mock_json("tests/resources/calls_instance.json")
resp.status_code = 204
mock.return_value = resp
uri = "%s/Calls/%s" % (BASE_URI, CALL_SID)
r = list_resource.hangup(CALL_SID)
exp_data = {"Status": "completed"}
mock.assert_called_with("POST", uri, data=exp_data, auth=AUTH)
assert_true(r)
@patch("twilio.rest.resources.base.make_twilio_request")
def test_cancel(mock):
resp = create_mock_json("tests/resources/calls_instance.json")
resp.status_code = 204
mock.return_value = resp
uri = "%s/Calls/%s" % (BASE_URI, CALL_SID)
r = list_resource.cancel(CALL_SID)
exp_data = {"Status": "canceled"}
mock.assert_called_with("POST", uri, data=exp_data, auth=AUTH)
assert_true(r)
@patch("twilio.rest.resources.base.Resource.request")
def test_delete(req):
""" Deleting a call should work """
resp = Mock()
resp.content = ""
resp.status_code = 204
req.return_value = resp, {}
app = Call(list_resource, "CA123")
app.delete()
uri = "https://api.twilio.com/2010-04-01/Accounts/AC123/Calls/CA123"
req.assert_called_with("DELETE", uri)
|
Python
| 0
|
@@ -1300,32 +1300,427 @@
s, auth=AUTH)%0A%0A%0A
+@patch(%22twilio.rest.resources.base.make_twilio_request%22)%0Adef test_paging_iter(mock):%0A resp = create_mock_json(%22tests/resources/calls_list.json%22)%0A mock.return_value = resp%0A%0A uri = %22%25s/Calls%22 %25 (BASE_URI)%0A list_resource.iter(started_before=date(2010, 12, 5)).next()%0A exp_params = %7B'StartTime%3C': '2010-12-05'%7D%0A%0A mock.assert_called_with(%22GET%22, uri, params=exp_params, auth=AUTH)%0A%0A%0A
@patch(%22twilio.r
|
fc2bd8f2434f841f69b992614e2d34071eee71f7
|
Rename class name in test to make it clear what it is.
|
tests/test_cases.py
|
tests/test_cases.py
|
from mock import sentinel, MagicMock
from unittest2 import TestCase
import itertools
from exam.decorators import fixture, before, after, around, patcher
from exam.cases import Exam
from describe import expect
from dummy import get_thing, get_it
class FakeTest(object):
def __init__(self):
self.cleanups = []
self.setups = 0
self.teardowns = 0
def setUp(self):
self.setups += 1
def tearDown(self):
self.teardowns += 1
def run(self, *args, **kwargs):
self.state_when_run = list(self.calls)
def addCleanup(self, func):
self.cleanups.append(func)
class DummyTest(Exam, FakeTest):
@patcher('tests.dummy.thing')
def dummy_thing(self):
return sentinel.mock
dummy_it = patcher('tests.dummy.it', return_value=12)
def __init__(self):
self.calls = []
super(DummyTest, self).__init__()
@before
def append_one(self):
self.calls.append(1)
@after
def append_two(self):
self.calls.append(2)
@around
def append_5_then_6(self):
self.calls.append(5)
yield
self.calls.append(6)
class ExtendedDummy(DummyTest):
@before
def append_3(self):
self.calls.append(3)
@after
def append_4(self):
self.calls.append(4)
@around
def append_7_then_8(self):
self.calls.append(7)
yield
self.calls.append(8)
# TODO: Make the subclass checking just be a subclass of the test case
class TestExam(Exam, TestCase):
@fixture
def case(self):
return DummyTest()
@fixture
def subclass_case(self):
return ExtendedDummy()
@after
def stop_patchers(self):
cleanups = (self.case.cleanups, self.subclass_case.cleanups)
for cleanup in itertools.chain(*cleanups):
if hasattr(cleanup.im_self, 'is_local'): # Is the mock started?
cleanup()
@property
def other_thing(self):
return get_thing()
@property
def other_it(self):
return get_it()
def test_before_adds_each_method_to_set_up(self):
expect(self.case.calls).to == []
self.case.setUp()
expect(self.case.calls).to == [1]
def test_after_adds_each_method_to_tear_down(self):
expect(self.case.calls).to == []
self.case.tearDown()
expect(self.case.calls).to == [2]
def test_around_calls_methods_before_and_after_run(self):
expect(self.case.calls).to == []
self.case.run()
expect(self.case.state_when_run).to == [5]
expect(self.case.calls).to == [5, 6]
def test_before_works_on_subclasses(self):
expect(self.subclass_case.calls).to == []
self.subclass_case.setUp()
expect(self.subclass_case.calls).to == [3, 1]
def test_after_works_on_subclasses(self):
expect(self.subclass_case.calls).to == []
self.subclass_case.tearDown()
expect(self.subclass_case.calls).to == [4, 2]
def test_around_works_with_subclasses(self):
expect(self.subclass_case.calls).to == []
self.subclass_case.run()
expect(self.subclass_case.state_when_run).to == [7, 5]
expect(self.subclass_case.calls).to == [7, 5, 8, 6]
def test_patcher_start_value_is_added_to_case_dict_on_setup(self):
self.case.setUp()
expect(self.case.dummy_thing).to == sentinel.mock
def test_patcher_patches_object_on_setup_and_adds_patcher_to_cleanup(self):
expect(self.other_thing).to != sentinel.mock
self.case.setUp()
expect(self.other_thing).to == sentinel.mock
[cleanup() for cleanup in self.case.cleanups]
expect(self.other_thing).to != sentinel.mock
def test_patcher_lifecycle_works_on_subclasses(self):
expect(self.other_thing).to != sentinel.mock
self.subclass_case.setUp()
expect(self.other_thing).to == sentinel.mock
[cleanup() for cleanup in self.subclass_case.cleanups]
expect(self.other_thing).to != sentinel.mock
def test_patcher_patches_with_a_magic_mock_if_no_function_decorated(self):
expect(self.other_it()).to != 12
self.case.setUp()
expect(self.other_it()).to == 12
self.case.cleanups[0]()
expect(self.other_thing).to != 12
def test_calls_super_setup(self):
expect(self.case.setups).to == 0
self.case.setUp()
expect(self.case.setups).to == 1
def test_calls_super_teardown(self):
expect(self.case.teardowns).to == 0
self.case.tearDown()
expect(self.case.teardowns).to == 1
|
Python
| 0
|
@@ -249,24 +249,30 @@
%0A%0Aclass
-Fak
+Simpl
eTest
+Case
(object)
@@ -656,16 +656,22 @@
am,
-Fak
+Simpl
eTest
+Case
):%0A%0A
|
95f2eaa662fc5650608447cb18f8b1df4be5f7c3
|
ImportError take 2
|
landlab/components/__init__.py
|
landlab/components/__init__.py
|
from .craters import CratersComponent
from .chi_index import ChiFinder
from .diffusion import LinearDiffuser
from .fire_generator import FireGenerator
from .flexure import Flexure
from .flow_accum import AccumFlow
from .flow_routing import FlowRouter, DepressionFinderAndRouter
from .glacier_thin_ice_model import Glacier
from .nonlinear_diffusion import PerronNLDiffuse
from .overland_flow import OverlandFlowBates, OverlandFlow
from .pet import PotentialEvapotranspiration
from .potentiality_flowrouting import PotentialityFlowRouter
from .radiation import Radiation
from .sed_trp_shallow_flow import SurfaceFlowTransport
from .single_vegetation import Vegetation
from .sink_fill import SinkFiller
from .soil_moisture import SoilMoisture
from .steepness_index import SteepnessFinder
from .stream_power import StreamPowerEroder, FastscapeEroder, SedDepEroder
from .transport_limited_fluvial import TransportLimitedEroder
from .uniform_precip import PrecipitationDistribution
from .vegetation_ca import VegCA
COMPONENTS = [CratersComponent, ChiFinder, LinearDiffuser, FireGenerator,
Flexure, AccumFlow, FlowRouter, DepressionFinderAndRouter,
Glacier, PerronNLDiffuse, OverlandFlowBates,
OverlandFlow, PotentialEvapotranspiration,
PotentialityFlowRouter, Radiation, SurfaceFlowTransport,
PowerLawIncision, Vegetation, SinkFiller, SoilMoisture,
StreamPowerEroder, FastscapeEroder, SedDepEroder,
TransportLimitedEroder, SteepnessFinder,
PrecipitationDistribution, VegCA]
__all__ = [cls.__name__ for cls in COMPONENTS]
|
Python
| 0.999076
|
@@ -1355,26 +1355,8 @@
- PowerLawIncision,
Veg
|
c6afad83c3269995bd5914e2835dc10dba33ad3e
|
Fix tests after class rename
|
tests/test_clone.py
|
tests/test_clone.py
|
import os
from os.path import dirname, join, isfile
from shutil import rmtree
import unittest
from cvsgit.command.init import init
from cvsgit.command.clone import clone
from cvsgit.command.pull import pull
from cvsgit.command.verify import verify
from cvsgit.git import Git
from cvsgit.utils import Tempdir
class Test(unittest.TestCase):
def test_clone(self):
"""Clone the greek tree and verify it.
"""
with Tempdir(cwd=True) as tempdir:
source = join(dirname(__file__), 'data', 'greek', 'tree')
self.assertEquals(clone().eval('--quiet', source), 0)
os.chdir('tree')
self.assertEquals(0, verify().eval())
def test_clone_bare(self):
"""Clone the greek tree into a bare repository.
"""
with Tempdir(cwd=True) as tempdir:
source = join(dirname(__file__), 'data', 'greek', 'tree')
self.assertEquals(clone().eval('--quiet', '--bare', source), 0)
self.assertTrue(isfile(join(tempdir, 'tree', 'config')))
def test_clone_with_zombie_rcs_file(self):
"""Clone a repository that has a misplaced RCS file.
This repository has a zombie copy of a file that was actually
moved to Attic.
"""
with Tempdir(cwd=True):
source = join(dirname(__file__), 'data', 'zombie')
self.assertEquals(0, clone().eval('--quiet', source))
os.chdir('zombie')
# FIXME: zombie repository fails verification
#self.assertEquals(0, verify().eval())
def test_clone_partial_alternative(self):
"""Calling "pull --limit" several times is basically the same
as cloning everything (given that it's done enough times or
that limit is high enough.)
"""
head1 = None
with Tempdir(cwd=True) as tempdir:
source = join(dirname(__file__), 'data', 'zombie')
self.assertEquals(0, clone().eval('--quiet', source))
os.chdir('zombie')
head1 = Git().rev_parse('HEAD')
head2 = None
with Tempdir(cwd=True) as tempdir:
source = join(dirname(__file__), 'data', 'zombie')
self.assertEquals(0, init().eval('--quiet', source))
self.assertEquals(0, pull().eval('--quiet', '--limit=1'))
self.assertNotEqual(head1, Git().rev_parse('HEAD'))
self.assertEquals(0, pull().eval('--quiet', '--limit=2'))
self.assertNotEqual(head1, Git().rev_parse('HEAD'))
self.assertEquals(0, pull().eval('--quiet', '--limit=3'))
self.assertEqual(head1, Git().rev_parse('HEAD'))
def test_git_clone_from_cvs_clone(self):
"""Cloning a new Git repo from a bare CVS tracking repo.
"""
head1 = None
with Tempdir(cwd=True) as tempdir:
source = join(dirname(__file__), 'data', 'zombie')
self.assertEquals(0, clone().eval('--quiet', source, 'test.git'))
Git().check_command('clone', '--quiet', 'test.git')
self.assertEquals(Git('test.git').rev_parse('HEAD'),
Git('test').rev_parse('HEAD'))
self.assertEquals('refs/heads/master',
Git('test').symbolic_ref('HEAD'))
|
Python
| 0.000001
|
@@ -158,17 +158,17 @@
import
-c
+C
lone%0Afro
@@ -235,17 +235,17 @@
import
-v
+V
erify%0Afr
@@ -555,33 +555,33 @@
lf.assertEquals(
-c
+C
lone().eval('--q
@@ -653,33 +653,33 @@
assertEquals(0,
-v
+V
erify().eval())%0A
@@ -921,17 +921,17 @@
tEquals(
-c
+C
lone().e
@@ -1372,33 +1372,33 @@
assertEquals(0,
-c
+C
lone().eval('--q
@@ -1536,17 +1536,17 @@
uals(0,
-v
+V
erify().
@@ -1938,33 +1938,33 @@
assertEquals(0,
-c
+C
lone().eval('--q
@@ -2929,17 +2929,17 @@
uals(0,
-c
+C
lone().e
|
c5c2d462731e79d9b7d8836e47bae10116c1ee81
|
test cancelled callback
|
tests/test_defer.py
|
tests/test_defer.py
|
import sys
sys.path.insert(0, '..')
import unittest
import pyev
loop = pyev.default_loop()
from whizzer.defer import Deferred, CancelledError, AlreadyCalledError, TimeoutError
def throw_always(result):
raise Exception("success")
def one_always(result):
return 1
def add(a, b):
return a+b
class FakeLogger(object):
def __init__(self):
self.info_msg = ""
self.debug_msg = ""
self.warn_msg = ""
self.error_msg = ""
def info(self, msg):
self.info_msg = msg
def debug(self, msg):
self.debug_msg = msg
def warn(self, msg):
self.warn_msg = msg
def error(self, msg):
self.error_msg = msg
class TestDeferred(unittest.TestCase):
def setUp(self):
self.logger = FakeLogger()
self.deferred = Deferred(loop, logger=self.logger)
self.result = None
def tearDown(self):
self.deferred = None
self.result = None
def set_result(self, result):
self.result = result
def set_exception(self, exception):
self.exception = exception
def test_callback(self):
self.deferred.add_callback(self.set_result)
self.deferred.callback(5)
self.assertTrue(self.result==5)
def test_callback_chain(self):
d = self.deferred
d.add_callback(add, 1)
d.add_callback(self.set_result)
self.deferred.callback(5)
self.assertTrue(self.result==6)
def test_log_error(self):
"""Unhandled exceptions should be logged if the deferred is deleted."""
self.deferred.add_callback(throw_always)
self.deferred.callback(None)
self.deferred = None # delete it
self.assertTrue(self.logger.error_msg != "")
def test_errback(self):
self.deferred.add_errback(self.set_result)
self.deferred.errback(5)
self.assertTrue(self.result==5)
def test_callback_skips(self):
"""When a callback raises an exception
all callbacks without errbacks are skipped until the next
errback is found.
"""
self.deferred.add_callback(throw_always)
self.deferred.add_callback(one_always)
self.deferred.add_callback(add, 2)
self.deferred.add_errback(one_always)
self.deferred.add_callback(self.set_result)
self.deferred.callback(None)
self.assertTrue(self.result==1)
def test_errback_reraised(self):
"""If an errback raises, then the next errback is called."""
self.deferred.add_errback(throw_always)
self.deferred.add_errback(self.set_result)
self.deferred.errback(None)
self.assertTrue(isinstance(self.result, Exception))
def test_cancelled(self):
self.deferred.cancel()
self.assertRaises(CancelledError, self.deferred.errback, None)
self.assertRaises(CancelledError, self.deferred.callback, None)
def test_already_called(self):
self.deferred.callback(None)
self.assertRaises(AlreadyCalledError, self.deferred.errback, None)
self.assertRaises(AlreadyCalledError, self.deferred.callback, None)
if __name__ == '__main__':
unittest.main()
|
Python
| 0.000001
|
@@ -3125,16 +3125,207 @@
None)%0A%0A
+ def test_cancel_callback(self):%0A self.deferred = Deferred(loop, cancelled_cb=self.set_result)%0A self.deferred.cancel()%0A self.assertTrue(self.result == self.deferred)%0A%0A
%0Aif __na
|
3b1d82170e0e29f1096e95393b4e0cea55a2dce8
|
fix broken test
|
tests/test_hnapi.py
|
tests/test_hnapi.py
|
"""
Tests
"""
from __future__ import unicode_literals
import unittest
from hnapi.connectors.api_connector import ApiConnector
from hnapi.connectors.api_connector import NetworkError
#pylint: disable=too-many-public-methods
class HnapiTest(unittest.TestCase):
"""
Test hnapi
"""
def test_get_item_by(self):
"""
Test item retrieval and 'by' field
"""
con = ApiConnector()
item = con.get_item(8863)
byline = item.get('by')
self.assertEqual(byline, 'dhouston')
def test_get_comment(self):
"""
Test retrieval of a comment
"""
con = ApiConnector()
comment = con.get_item(15)
byline = comment.get('by')
self.assertEqual(byline, 'sama')
def test_get_max_item(self):
"""
Test retrieval of the max item without error
"""
con = ApiConnector()
max_item_id = con.get_max_item()
max_item = con.get_item(max_item_id)
self.assertTrue(max_item.get('id') > 0)
def test_get_updates_users(self):
"""
Test retrieval of new users
"""
con = ApiConnector()
updates = con.get_updates()
self.assertTrue(len(updates.get('profiles')) > 1)
user = con.get_user(updates.get('profiles')[0])
year_2001 = 1000000000
self.assertTrue(user.get('created') > year_2001)
def test_get_updates_item(self):
"""
Test retrieval of new items
"""
con = ApiConnector()
updates = con.get_updates()
self.assertTrue(len(updates.get('items')) > 1)
item = con.get_item(updates.get('items')[0])
year_2001 = 1000000000
self.assertTrue(item.get('time') > year_2001)
def test_get_top(self):
"""
Test retrieval of first and last items from /top endpoint
"""
con = ApiConnector()
top = con.get_top()
self.assertTrue(len(top) == 500)
item_0 = con.get_item(top[0])
self.assertTrue(con.is_api_item(item_0))
item_100 = con.get_item(top[-1])
self.assertTrue(con.is_api_item(item_100))
def test_bad_api_request(self):
"""
Test that api fails with appropriate error
"""
con = ApiConnector()
self.assertRaises(NetworkError, \
con.request, "http://hacker-news.firebaseio.com/v0/foobar")
def test_set_timeout_error(self):
"""
Test that set_timeout throws a RuntimeError
"""
con = ApiConnector()
self.assertRaises(RuntimeError, con.set_timeout, -1)
def test_set_timeout(self):
"""
Test set_timeout
"""
con = ApiConnector()
con.set_timeout(4)
self.assertEqual(con.timeout, 4)
def test_get_kids(self):
"""
Test retrieval of comment usernames from a story
"""
con = ApiConnector()
item = con.get_item(8863)
user_dict = con.get_kids(item)
self.assertEqual(user_dict['noisemaker'], 'noisemaker')
self.assertEqual(user_dict['jganetsk'], 'jganetsk')
self.assertEqual(user_dict['vlad'], 'vlad')
def test_get_surrogate_item(self):
"""
Test retrieval of item that isn't really an item
"""
con = ApiConnector()
item = con.get_item(8847790)
self.assertTrue(con.is_valid_item(item))
byline = item.get('by')
self.assertEqual(byline, '')
def test_get_poll_item(self):
"""
Test retrieval of 'poll'
"""
con = ApiConnector()
item = con.get_item(7059569)
self.assertTrue(con.is_valid_item(item))
self.assertEqual(item.get('type'), 'poll')
def test_is_dead_true(self):
"""
Test that a dead item is determined to be dead
"""
con = ApiConnector()
item = con.get_item(8937830)
self.assertTrue(con.is_dead_item(item))
def test_is_dead_false(self):
"""
Test that a non-dead item is determined to be not dead
"""
con = ApiConnector()
item = con.get_item(2549)
self.assertFalse(con.is_dead_item(item))
def test_make_item_endpoint_error(self):
"""
Test that make_item_endpoint throws an error when it takes a
non-integer parameter
"""
con = ApiConnector()
self.assertRaises(RuntimeError, con.make_item_endpoint, "asdf")
def test_request_retry(self):
"""Test that the retry occurs"""
con = ApiConnector()
self.assertRaises(NetworkError, \
con.request, 'https://hacker-news.firebaseio.com/v0/foobar/1.json')
if __name__ == '__main__':
unittest.main()
|
Python
| 0.000005
|
@@ -1967,12 +1967,11 @@
op)
-== 5
+%3E 1
00)%0A
|
014184197b6eeede4a8681a446aa1a8e7bdce9fa
|
Update views.py
|
demo/views.py
|
demo/views.py
|
import flask
import sys
from flask import request
app = flask.Flask(__name__)
@app.route('/')
def hello_world():
return flask.jsonify({
'message': 'Hello Worlb!',
'python': sys.version,
'headers': str(request.headers)
})
|
Python
| 0
|
@@ -170,9 +170,9 @@
Worl
-b
+d
!',%0A
|
08d8018f613bad8e07c510fdecc7392c798de8b8
|
Make deploy search in config dir first.
|
deployment.py
|
deployment.py
|
from __future__ import with_statement
import shutil
import os
import sys
import logging
logging.basicConfig(stream=sys.stderr, format="%(levelname)s:: %(message)s", level=logging.INFO)
from string import Template
class DeploySite(object):
def __init__(self, **env):
self.env = env
for arg in ('ROOT', 'PROJECT_NAME', 'PYTHON_VERSION'):
if arg not in self.env:
raise ValueError("Argument '%s' is required." % arg)
if 'PYTHON_BASE' not in self.env:
self.env['PYTHON_BASE'] = os.path.join(self.env['ROOT'], 'pythonenv')
if 'PYTHON_BIN' not in self.env:
self.env['PYTHON_BIN'] = os.path.join(
self.env['PYTHON_BASE'], 'bin', 'python') + self.env['PYTHON_VERSION']
if 'PYTHON_SITE' not in self.env:
self.env['PYTHON_SITE'] = os.path.join(
self.env['PYTHON_BASE'], 'lib',
'python' + self.env['PYTHON_VERSION'], 'site-packages')
if 'APP_DIR' not in self.env:
self.env['APP_DIR'] = os.path.join(self.env['ROOT'], 'application')
if 'CONFIG_DIR' not in self.env:
self.env['CONFIG_DIR'] = os.path.join(self.env['ROOT'], 'etc')
if 'MEDIA_DIR' not in self.env:
self.env['MEDIA_DIR'] = os.path.join(self.env['ROOT'], 'www', 'media')
self._logger = logging.getLogger("deployment")
def info(self, *args, **kwargs):
self._logger.info(*args, **kwargs)
def render_template(self, source, dest, extra_context={}):
self.info("Rendering template: %s", source)
with open(source, 'rb') as source_file:
t = Template(source_file.read())
context = dict(self.env)
context.update(extra_context)
with open(dest, 'wb') as dest_file:
dest_file.write(t.safe_substitute(context))
self.info("Done.")
def restart_app(self):
pass
def update_app(self):
pass
def install_dependencies(self):
pass
def deploy(self):
self.update_app()
self.install_dependencies()
self.update_config()
self.restart_app()
def find_resource(self, path):
full_path = os.path.join(self.env['APP_DIR'], path)
if os.path.isfile(full_path):
return full_path
raise ValueError("Resource '%s' not found" % path)
@classmethod
def run_deploy(cls, *args, **kwargs):
site = cls(*args, **kwargs)
return site.deploy()
class WSGISite(DeploySite):
def __init__(self, **env):
super(WSGISite, self).__init__(**env)
if 'WSGI_FILE' not in self.env:
self.env['WSGI_FILE'] = os.path.join(self.env['ROOT'], 'www',
'wsgi', self.env['PROJECT_NAME']) + '.wsgi'
if 'WSGI_SOURCE_FILE' not in self.env:
self.env['WSGI_SOURCE_FILE'] = 'wsgi_app.template'
if 'WSGI_USER' not in self.env:
self.env['WSGI_USER'] = 'www-data'
def restart_app(self):
self.info("Restarting wsgi application: %s", self.env['WSGI_FILE'])
os.system("touch %s" % self.env['WSGI_FILE'])
def update_config(self):
source = self.find_resource(self.env['WSGI_SOURCE_FILE'])
self.render_template(source, self.env['WSGI_FILE'])
class PIPSite(DeploySite):
def install_dependencies(self):
self.info("Installing requirements")
os.system("pip install -r %s" % self.find_resource('requirements.txt'))
try:
self.info("Installing local requirements")
os.system("pip install -r %s" % self.find_resource('requirements_local.txt'))
except ValueError:
pass
class GitSite(DeploySite):
def update_app(self):
self.info("Updating repository.")
os.system("cd %s; git pull" % self.env['APP_DIR'])
class ApacheSite(DeploySite):
def __init__(self, **env):
super(ApacheSite, self).__init__(**env)
if 'VHOST_SOURCE_FILE' not in self.env:
self.env['VHOST_SOURCE_FILE'] = 'apache_vhost.template'
if 'VHOST_FILE' not in self.env:
self.env['VHOST_FILE'] = os.path.join(self.env['CONFIG_DIR'], self.env['PROJECT_NAME'] + '.vhost')
def update_config(self):
source = self.find_resource(self.env['VHOST_SOURCE_FILE'])
self.render_template(source, self.env['VHOST_CONFIG_FILE'])
|
Python
| 0
|
@@ -2216,24 +2216,94 @@
elf, path):%0A
+ for dir in (self.env%5B'CONFIG_DIR'%5D, self.env%5B'APP_DIR'%5D):%0A
full
@@ -2327,35 +2327,19 @@
oin(
-self.env%5B'APP_DIR'%5D
+dir
, path)%0A
@@ -2334,16 +2334,20 @@
, path)%0A
+
@@ -2384,24 +2384,28 @@
+
return full_
@@ -2409,16 +2409,17 @@
ll_path%0A
+%0A
|
b8c497ab8b4be7e637a3b5c61fa65e0cf5c21598
|
Fix to snp table
|
src/scripts/snpTable.py
|
src/scripts/snpTable.py
|
import sys
from tex import *
import xml.etree.ElementTree as ET
def fn(file):
l = {}
for line in [ line.split() for line in open(file, "r").readlines()[1:] ]:
l[line[0]] = line[1:]
referenceLine = l.pop("reference")
aggregateLine = l.pop("aggregate")
chimpLine = l.pop("panTro3")
k = l.keys()
k.sort()
samples = 0
truePositives = 3
sampleTruePositives = 10
sampleTrueNegatives = 12
for key in k:
if len(l[key]) > 8:
print "gooo", key, l[key][samples], l[key][truePositives], l[key][sampleTruePositives], l[key][sampleTrueNegatives]
yield (key, int(l[key][samples]), int(l[key][truePositives]), int(l[key][sampleTruePositives]), int(l[key][sampleTrueNegatives]))
else:
yield (key, int(l[key][samples]), int(l[key][truePositives]), None, None)
yield "panTro3", int(chimpLine[samples]), int(chimpLine[truePositives]), None, None
yield "aggregate", int(aggregateLine[samples]), int(aggregateLine[truePositives]), None, None
yield "reference", int(referenceLine[samples]), int(referenceLine[truePositives]), None, None
fileHandle = open(sys.argv[3], "w")
writeDocumentPreliminaries(fileHandle)
writePreliminaries(9, fileHandle)
#writeRow(("samples", "sequence", "\% mapped", "\% mapped and contiguous", "\% contigious that mapped"), fileHandle)
writeLine(9, 1, (("Single Nucleotide Polymorphisms", 0, 8, 0, 0),), fileHandle)
writeLine(9, 2, (("Sample", 0, 0, 0, 1),
("Unfiltered", 1, 4, 0, 0),
("T\#", 1, 1, 1, 1),
("TP", 2, 2, 1, 1),
("STP", 3, 3, 1, 1),
("SFN", 4, 4, 1, 1),
("Filtered", 5, 8, 0, 0),
("T\#", 5, 5, 1, 1),
("TP", 6, 6, 1, 1),
("STP", 7, 7, 1, 1),
("SFN", 8, 8, 1, 1)), fileHandle)
for sampleName, samples, truePositives, \
sampleTruePositives, sampleTrueNegatives, \
filteredSamples, filteredTruePositives,\
filteredSampleTruePositives, filteredSampleTrueNegatives in [ tuple(list(i) + list(j[1:])) for i, j in zip(fn(sys.argv[1]), fn(sys.argv[2])) if i[0] == j[0] ]:
def fn2(i, j=samples):
if i != None:
return "%.0f" % (100.0*float(i)/float(j))
return "NA"
def fn3(i, j=filteredSamples):
return fn2(i, j)
def fn4(i, j):
if i == None:
return "NA"
return "%.0f" % (100.0*float(j)/(float(i) + float(j)))
writeLine(9, 1, ((sampleName, 0, 0, 0, 0),
(samples, 1, 1, 0, 0),
(fn2(truePositives), 2, 2, 0, 0),
(fn2(sampleTruePositives), 3, 3, 0, 0),
(fn4(sampleTruePositives, sampleTrueNegatives), 4, 4, 0, 0),
(filteredSamples, 5, 5, 0, 0),
(fn3(filteredTruePositives), 6, 6, 0, 0),
(fn3(filteredSampleTruePositives), 7, 7, 0, 0),
(fn4(filteredSampleTruePositives, filteredSampleTrueNegatives), 8, 8, 0, 0)), fileHandle, trailingLines=0)
writeEnd(fileHandle, "snpTable", "Unfiltered: all SNPs detected in each sample with respect to HG19. \
Filtered: as unfiltered, but excluding SNPs detected within 5 bps of an indel within the MSA. \
T\#: Total number of SNPs. \
TP: Percentage true positives, as validated by a matching SNP in dbSNP. \
STP: Percentage (sample) true positives, as validated by those reported for the sample in question. \
SFN: Percentage (sample) false negatives, as validated by those reported for the sample in question. \
An NA entry denotes that the data was not available. \
Aggregate row: gives the total SNPs in human samples (excluding chimp). \
Reference row: gives SNPs in our reference with respect to HG19")
writeDocumentEnd(fileHandle)
fileHandle.close()
|
Python
| 0.999908
|
@@ -154,17 +154,17 @@
lines()%5B
-1
+2
:%5D %5D:%0A
|
308428ac6d3fc588e75dae9d2875cf8b5ddd2d98
|
remove redundant import
|
examples/example_bubble.py
|
examples/example_bubble.py
|
import matplotlib.pyplot as plt
from bubblekicker.bubblekicker import (BubbleKicker, batchbubblekicker,
bubble_properties_calculate,
_bubble_properties_filter,
bubble_properties_plot)
from bubblekicker.pipelines import CannyPipeline, AdaptiveThresholdPipeline
###############
# EXAMPLE 1: pipeline ass such
###############
# CANNY PIPELINE
bubbler = CannyPipeline('drafts/0325097m_0305.tif', channel='red')
result = bubbler.run([120, 180], 3, 3, 1, 1)
# show the resulting image of the detected bubbles
bubbler.plot()
# show the individual steps performed to get this result
bubbler.what_have_i_done()
# ADAPTIVE THRESHOLD PIPELINE
bubbler = AdaptiveThresholdPipeline('drafts/0325097m_0305.tif', channel='red')
result = bubbler.run(91, 18, 3, 1, 1)
# show the resulting image of the detected bubbles
bubbler.plot()
# show the individual steps performed to get this result
bubbler.what_have_i_done()
###############
# EXAMPLE 2: individual sequence
###############
# setup the object
bubbler = BubbleKicker('drafts/0325097m_0305.tif', channel='red')
# using functions (both opencv as skimage are available)
bubbler.edge_detect_canny_opencv([30, 80])
bubbler.dilate_opencv(3)
# show the resulting image of the detected bubbles
bubbler.plot()
# show the individual steps performed to get this result
bubbler.what_have_i_done()
# retry another sequence => reset the image
bubbler.reset_to_raw()
# some alternative settings
bubbler.edge_detect_canny_opencv([100, 150])
bubbler.dilate_opencv(3)
bubbler.clear_border_skimage(3, 1)
# show the resulting image of the detected bubbles
bubbler.plot()
# show the individual steps performed to get this result
# this is the list since the reset to raw
bubbler.what_have_i_done()
bubbler.reset_to_raw()
bubbler.adaptive_threshold_opencv()
bubbler.clear_border_skimage()
bubbler.plot()
bubbler.what_have_i_done()
# switch color channel
bubbler = BubbleKicker('drafts/0325097m_0305.tif', channel='red')
print(bubbler.what_channel())
bubbler.plot()
bubbler.switch_channel('green')
print(bubbler.what_channel())
bubbler.plot()
###############
# EXAMPLE 3: running a batch sequence
###############
res = batchbubblekicker('examples/data', 'red',
AdaptiveThresholdPipeline,
91, 18, 3, 1, 1)
print(res)
###############
# EXAMPLE 4: Property functions
###############
# derive and PLOT the bubble properties as a table with no filter
bubbler = CannyPipeline('drafts/0325097m_0305.tif', channel='red')
result = bubbler.run([120, 180], 3, 3, 1, 1)
id_image, props = bubble_properties_calculate(result, rules={})
fig, axs = bubble_properties_plot(props, "equivalent_diameter")
fig.savefig("examples/output_eq_diameter.png")
fig, axs = bubble_properties_plot(props, "area")
fig.savefig("examples/output_area.png")
# filter bubble properties based on a DEFAULT filter
bubbler = CannyPipeline('drafts/0325097m_0305.tif', channel='red')
result = bubbler.run([120, 180], 3, 3, 1, 1)
id_image, props = bubble_properties_calculate(result)
print(props)
# filter bubble properties based on CUSTOM filter ruleset
custom_filter = {'circularity_reciprocal': {'min': 0.2, 'max': 1.6},
'convexity': {'min': 1.92}}
bubbler = CannyPipeline('drafts/0325097m_0305.tif', channel='red')
result = bubbler.run([120, 180], 3, 3, 1, 1)
id_image, props = bubble_properties_calculate(result, rules=custom_filter)
print(props)
plt.show()
|
Python
| 0.998781
|
@@ -174,75 +174,8 @@
e,%0D%0A
- _bubble_properties_filter,%0D%0A
|
2e18e05659e9ba88f2fcce77259792f84b25e5fa
|
Add ability to disable frame evaluation
|
_pydevd_frame_eval/pydevd_frame_eval_main.py
|
_pydevd_frame_eval/pydevd_frame_eval_main.py
|
import os
import sys
IS_PY36_OR_OLDER = False
if (sys.version_info[0] == 3 and sys.version_info[1] >= 6) or sys.version_info[0] > 3:
IS_PY36_OR_OLDER = True
set_frame_eval = None
stop_frame_eval = None
if IS_PY36_OR_OLDER:
try:
from _pydevd_frame_eval.pydevd_frame_evaluator import frame_eval_func, stop_frame_eval
except ImportError:
from _pydev_bundle.pydev_monkey import log_error_once
dirname = os.path.dirname(__file__)
log_error_once("warning: Debugger speedups for Python 3.6 not found. Run '\"%s\" \"%s\" build_ext --inplace' to build." % (
sys.executable, os.path.join(dirname, 'setup.py')))
|
Python
| 0
|
@@ -202,17 +202,168 @@
= None%0A%0A
-%0A
+use_frame_eval = os.environ.get('PYDEVD_USE_FRAME_EVAL', None)%0A%0Aif use_frame_eval == 'NO':%0A frame_eval_func, stop_frame_eval = None, None%0A%0Aelse:%0A
if IS_PY
@@ -375,16 +375,20 @@
_OLDER:%0A
+
try:
@@ -388,32 +388,36 @@
try:%0A
+
+
from _pydevd_fra
@@ -487,16 +487,20 @@
me_eval%0A
+
exce
@@ -515,32 +515,36 @@
tError:%0A
+
from _pydev_bund
@@ -590,16 +590,20 @@
+
+
dirname
@@ -630,16 +630,20 @@
file__)%0A
+
@@ -766,16 +766,20 @@
d.%22 %25 (%0A
+
|
882c7f4ec9ead975175aabae17c11c9750b5095b
|
add basic support for solidsport
|
lib/svtplay_dl/service/solidtango.py
|
lib/svtplay_dl/service/solidtango.py
|
# ex:ts=4:sw=4:sts=4:et
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
import re
import xml.etree.ElementTree as ET
from urllib.parse import urlparse
from svtplay_dl.error import ServiceError
from svtplay_dl.fetcher.hls import hlsparse
from svtplay_dl.service import Service
class Solidtango(Service):
supported_domains_re = [r"^([^.]+\.)*solidtango.com"]
supported_domains = ["mm-resource-service.herokuapp.com", "solidtango.com"]
def get(self):
data = self.get_urldata()
match = re.search('src="(http://mm-resource-service.herokuapp.com[^"]*)"', data)
if match:
data = self.http.request("get", match.group(1)).text
match = re.search('src="(https://[^"]+solidtango[^"]+)" ', data)
if match:
data = self.http.request("get", match.group(1)).text
match = re.search(r"<title>(http[^<]+)</title>", data)
if match:
data = self.http.request("get", match.group(1)).text
match = re.search("is_livestream: true", data)
if match:
self.config.set("live", True)
match = re.search("isLivestream: true", data)
if match:
self.config.set("live", True)
match = re.search('html5_source: "([^"]+)"', data)
match2 = re.search('hlsURI: "([^"]+)"', data)
if match:
streams = hlsparse(self.config, self.http.request("get", match.group(1)), match.group(1), output=self.output)
for n in list(streams.keys()):
yield streams[n]
elif match2:
streams = hlsparse(self.config, self.http.request("get", match2.group(1)), match2.group(1), output=self.output)
for n in list(streams.keys()):
yield streams[n]
else:
parse = urlparse(self.url)
url2 = "https://{}/api/v1/play/{}.xml".format(parse.netloc, parse.path[parse.path.rfind("/") + 1 :])
data = self.http.request("get", url2)
if data.status_code != 200:
yield ServiceError("Can't find video info. if there is a video on the page. its a bug.")
return
xmldoc = data.text
xml = ET.XML(xmldoc)
elements = xml.findall(".//manifest")
streams = hlsparse(self.config, self.http.request("get", elements[0].text), elements[0].text, output=self.output)
for n in list(streams.keys()):
yield streams[n]
|
Python
| 0
|
@@ -451,16 +451,35 @@
ango.com
+%22, %22solidsport.com
%22%5D%0A%0A
|
b80fee7687b848f198036eeefbe086796a22078e
|
update url
|
examples/flask-echo/app.py
|
examples/flask-echo/app.py
|
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import unicode_literals
import os
import sys
import requests, json
from argparse import ArgumentParser
from flask import Flask, request, abort
from linebot import (
LineBotApi, WebhookParser
)
from linebot.exceptions import (
InvalidSignatureError
)
from linebot.models import (
MessageEvent, TextMessage, TextSendMessage, TemplateSendMessage, ImageSendMessage,
ButtonsTemplate, ConfirmTemplate, CarouselTemplate, CarouselColumn,
TemplateAction, PostbackTemplateAction, MessageTemplateAction, URITemplateAction
)
app = Flask(__name__)
# get channel_secret and channel_access_token from your environment variable
channel_secret = os.getenv('LINE_CHANNEL_SECRET', None)
channel_access_token = os.getenv('LINE_CHANNEL_ACCESS_TOKEN', None)
if channel_secret is None:
print('Specify LINE_CHANNEL_SECRET as environment variable.')
sys.exit(1)
if channel_access_token is None:
print('Specify LINE_CHANNEL_ACCESS_TOKEN as environment variable.')
sys.exit(1)
line_bot_api = LineBotApi(channel_access_token)
parser = WebhookParser(channel_secret)
@app.route("/callback", methods=['POST'])
def callback():
signature = request.headers['X-Line-Signature']
# get request body as text
body = request.get_data(as_text=True)
#app.logger.info("Request body: " + body)
#app.logger.info("Signature: " + signature)
# parse webhook body
try:
events = parser.parse(body, signature)
except InvalidSignatureError:
abort(500)
# if event is MessageEvent and message is TextMessage, then check prefix
for event in events:
text_message = event.message.text
if not isinstance(event, MessageEvent):
continue
if not isinstance(event.message, TextMessage):
continue
# if prefix is @so, check StackOverflow
if text_message.lower().startswith('@so'):
sendMessage = queryStackOverflow(text_message)
# if prefix is @go, check
elif text_message.lower().startswith('@go'):
# do nothing first
sendMessage = None
else:
continue
line_bot_api.reply_message(
event.reply_token, sendMessage
)
return 'OK'
def queryStackOverflow(query):
url = 'https://api.stackexchange.com/2.2/search/advanced'
headers = dict(
order='desc',
sort='relevance',
views='500',
site='stackoverflow',
q=query,
body=query,
answer=1
)
response = requests.get(url=url, params=headers)
data = json.loads(response.text)
app.logger.info(response.text)
index = 0
if data['items'] is not None and data['items'][index] is not None:
template = TemplateSendMessage(
alt_text='This message is only available on your smartphone',
template=ButtonsTemplate(
thumbnail_image_url=data['items'][index]['link'],
title=data['items'][index]['title'],
text='Tags:' + json.dumps(data['items'][index]['tags']),
actions=[
URITemplateAction(
label='Read Article',
uri=data['items'][index]['link']
),
URITemplateAction(
label='Share',
uri='https://lineit.line.me/share/ui?url=' + data['items'][index]['link']
)
]
)
)
else:
template = TextSendMessage(text='No article found.')
return template
def sendText(text):
text_message = TextSendMessage(text=text)
def querySearchEngine(data, type):
index = 0
app.logger.info("type:" + type)
template = ImageSendMessage(
original_content_url='https://upload.wikimedia.org/wikipedia/commons/b/b4/JPEG_example_JPG_RIP_100.jpg',
preview_image_url='https://upload.wikimedia.org/wikipedia/commons/b/b4/JPEG_example_JPG_RIP_100.jpg'
)
return template
if __name__ == "__main__":
arg_parser = ArgumentParser(
usage='Usage: python ' + __file__ + ' [--port <port>] [--help]'
)
arg_parser.add_argument('-p', '--port', default=8000, help='port')
arg_parser.add_argument('-d', '--debug', default=False, help='debug')
options = arg_parser.parse_args()
app.run(debug=options.debug, host='0.0.0.0', port=int(options.port))
|
Python
| 0.000001
|
@@ -2930,16 +2930,139 @@
advanced
+?/2.2/search/advanced?order=desc&sort=relevance&q=AttributeError&answers=1&body=AttributeError&views=200&site=stackoverflow
'%0A he
@@ -3278,24 +3278,8 @@
=url
-, params=headers
)%0A
|
1908a3f4551a0b9fef1b38d8cd4593b8c7e105d2
|
remove debug nonsense
|
src/server/instagram.py
|
src/server/instagram.py
|
# Copyright 2017 Alex K (wtwf.com)
__author__ = 'wtwf.com (Alex K)'
import cgi
import datetime
import json
import logging
import re
import PyRSS2Gen as rss
from google.appengine.api import memcache
from google.appengine.api import urlfetch
from google.appengine.ext import webapp
import gae_memcache_decorator
def find(haystack, *needles):
result = None
for needle in needles:
try:
result = haystack.get(needle)
except AttributeError as ae:
raise ValueError("error: %r no %r in payload %r" %(ae, needle, haystack))
if not result:
raise ValueError("no %r in payload %r" %(needle, haystack))
haystack = result
return result
class RssFeed(webapp.RequestHandler):
"""Make RSS Feed for a (public) instagram user."""
def __repr__(self):
return "instagram.RssFeed"
def getInstaGraphQl(self, igid, page_type):
url = "https://www.instagram.com/%s" % igid
# this is from: https://stackoverflow.com/a/49815744
result = urlfetch.fetch(url)
if result.status_code != 200:
logging.info("Error %r", result.status_code)
return self.error(result.status_code)
payload = None
insta_html = result.content
insta_html_split = insta_html.split('<script type="text/javascript">window._sharedData = ')
if len(insta_html_split) > 1:
insta_html_split_2 = insta_html_split[1].split(';</script>')
if len(insta_html_split_2) > 1:
payload = json.loads(insta_html_split_2[0])
logging.info("DONE!\n%s", json.dumps(payload, sort_keys=True, indent=2, separators=(',', ': ')))
profile_page = find(payload, "entry_data", page_type)
graphql = find(profile_page[0], "graphql")
return graphql
@gae_memcache_decorator.cached(time=60*60*12)
def get(self, user):
"""hello"""
self.response.out.write("<plaintext>%s\n" % str(self.get))
return
graphql = self.getInstaGraphQl(user, "ProfilePage")
user = find(graphql, "user")
if not user: return
edges = find(user, "edge_owner_to_timeline_media", "edges")
if not edges: return
f = None
for item in edges:
item = find(item, "node")
if not item:
continue
if f is None:
# init the feed.
title = "%s (@%s)" % (user["full_name"], user["username"])
f = rss.RSS2(
title=title,
link="https://instagram.com/%s" % user["username"],
description="",
lastBuildDate=datetime.datetime.now(),
)
link = GetLink(item)
if item["__typename"] == "GraphSidecar":
logging.info("getting sidecar for %r", item["shortcode"])
body = self.GetSidecarBody(self.getInstaGraphQl("p/%s" % item["shortcode"], "PostPage"))
else:
body = self.GetBody(item)
try:
caption = find(find(item, "edge_media_to_caption", "edges")[0], "node", "text")
except ValueError:
caption = ""
body = """%s\n<br>\n%s""" % (body, cgi.escape(caption))
rss_item = {
"title": title,
"link": link,
"description": body,
"guid": rss.Guid(item["id"], False),
"pubDate": datetime.datetime.fromtimestamp(int(item["taken_at_timestamp"])),
}
# if item.get("is_video"):
# rss_item["enclosure"] = rss.Enclosure(item["videos"]["standard_resolution"]["url"], 10, "video/mp4")
f.items.append(rss.RSSItem(**rss_item))
self.response.headers['Content-Type'] = 'text/xml'
f.write_xml(self.response.out)
def GetVideoUrl(self, item):
ans = item.get("video_url")
if ans:
return ans
shortcode = item["shortcode"]
# return "VIDEO URL p/%s" % shortcode
key = "GetVideoUrl:" + shortcode
ans = memcache.get(key)
if ans is not None:
return ans
graphql = self.getInstaGraphQl("p/%s" % shortcode, "PostPage")
ans = graphql.get("shortcode_media", {}).get("video_url")
memcache.set(key, ans)
return ans
def GetBody(self, item):
# img_src = re.sub(
# r'c[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+/',
# '',
# item["thumbnail_resources"][-1]["src"]
# )
link = GetLink(item)
img_src = item["display_url"]
width_and_height = """width="%(width)s" height="%(height)s" """ % item["dimensions"]
media = """<a href="%s"><img %s src="%s"></a>""" % (link, width_and_height, img_src)
logging.info("is_video %r %r", item.get("is_video"), bool(item.get("is_video")))
if item.get("is_video"):
media = """<video %s controls="controls">
<source src="%s" type="video/mp4" poster="%s" />
</video><br>%s<br>""" % (
width_and_height,
self.GetVideoUrl(item),
img_src,
media
)
return media
def GetSidecarBody(self, graphql):
body = []
edges = find(graphql, "shortcode_media", "edge_sidecar_to_children", "edges")
for edge in edges:
body.append(self.GetBody(find(edge, "node")))
return "\n\n".join(body)
def GetLink(item):
return "https://www.instagram.com/p/%s" % item["shortcode"]
|
Python
| 0.99893
|
@@ -1765,99 +1765,8 @@
r):%0A
- %22%22%22hello%22%22%22%0A self.response.out.write(%22%3Cplaintext%3E%25s%5Cn%22 %25 str(self.get))%0A return%0A%0A
|
7d37c2200d58faa167c9793229d7eabbc7e51494
|
fix tests for autopep8
|
tests/test_style.py
|
tests/test_style.py
|
# -*- coding: utf-8 -*-
import click
import distutils.spawn
import pytest
import zazu.cli
import zazu.style
import zazu.util
def write_c_file_with_bad_style(file):
with open(file, 'w') as f:
f.write('void main(){\n\n}\n ')
def write_py_file_with_bad_style(file):
with open(file, 'w') as f:
f.write('def main():\tpass\n\n\n ')
@pytest.fixture()
def repo_with_style_errors(repo_with_style):
dir = repo_with_style.working_tree_dir
with zazu.util.cd(dir):
write_c_file_with_bad_style('temp.c')
write_c_file_with_bad_style('temp.cc')
write_c_file_with_bad_style('temp.cpp')
write_c_file_with_bad_style('temp.hpp')
write_c_file_with_bad_style('temp.h')
write_py_file_with_bad_style('temp.py')
return repo_with_style
@pytest.mark.skipif(not distutils.spawn.find_executable('astyle'),
reason="requires astyle")
def test_astyle():
styler = zazu.plugins.astyle_styler.AstyleStyler(options=['-U'])
ret = styler.style_string('void main ( ) {}')
assert ret == 'void main() {}'
assert styler.default_extensions()
def test_autopep8():
styler = zazu.plugins.autopep8_styler.Autopep8Styler()
ret = styler.style_string('def foo ():\n pass')
print ret
assert ['*.py'] == styler.default_extensions()
@pytest.mark.skipif(not distutils.spawn.find_executable('clang-format'),
reason="requires clang-format")
def test_clang_format():
styler = zazu.plugins.clang_format_styler.ClangFormatStyler(options=['-style=google'])
ret = styler.style_string('void main ( ) { }')
assert ret == 'void main() {}'
assert styler.default_extensions()
@pytest.mark.skipif(not distutils.spawn.find_executable('clang-format'),
reason="requires clang-format")
def test_bad_style(repo_with_style_errors):
dir = repo_with_style_errors.working_tree_dir
with zazu.util.cd(dir):
runner = click.testing.CliRunner()
result = runner.invoke(zazu.cli.cli, ['style', '--check', '-v'])
assert result.exit_code
assert result.output.endswith('6 files with violations in 6 files\n')
result = runner.invoke(zazu.cli.cli, ['style', '-v'])
assert result.exit_code == 0
assert result.output.endswith('6 files fixed in 6 files\n')
result = runner.invoke(zazu.cli.cli, ['style', '--check'])
assert result.exit_code == 0
@pytest.mark.skipif(not distutils.spawn.find_executable('clang-format'),
reason="requires clang-format")
def test_dirty_style(repo_with_style_errors, monkeypatch):
dir = repo_with_style_errors.working_tree_dir
with zazu.util.cd(dir):
runner = click.testing.CliRunner()
result = runner.invoke(zazu.cli.cli, ['style', '--check', '--cached', '-v'])
assert result.exit_code == 0
assert result.output == '0 files with violations in 0 files\n'
repo_with_style_errors.git.add('temp.c')
result = runner.invoke(zazu.cli.cli, ['style', '--check', '--cached', '-v'])
assert result.exit_code
assert result.output.endswith('1 files with violations in 1 files\n')
def test_style_no_config(repo_with_missing_style):
dir = repo_with_missing_style.working_tree_dir
with zazu.util.cd(dir):
runner = click.testing.CliRunner()
result = runner.invoke(zazu.cli.cli, ['style'])
assert result.output == 'no style settings found\n'
assert result.exit_code == 0
def test_styler():
uut = zazu.styler.Styler()
with pytest.raises(NotImplementedError):
uut.style_string('')
|
Python
| 0
|
@@ -1263,17 +1263,46 @@
-print ret
+assert ret == 'def foo():%5Cn pass%5Cn'
%0A
|
56675ad39c734993561b47373fa9db39e7f36323
|
Remove spec_set from mock.patch('os.chdir').
|
tests/test_utils.py
|
tests/test_utils.py
|
#
# Unit tests for the viewer.utils module.
#
# Copyright: (c) 2014 by Petr Zemek <s3rvac@gmail.com> and contributors
# License: BSD, see LICENSE for more details
#
import os
import unittest
from unittest import mock
from viewer.utils import chdir
@mock.patch('os.chdir', spec_set=os.chdir)
class ChdirTests(unittest.TestCase):
def setUp(self):
self.orig_cwd = os.getcwd()
self.dst_dir = 'test'
def test_os_chdir_is_called_with_dst_dir_in_entry(self, mock_chdir):
with chdir(self.dst_dir):
mock_chdir.assert_called_once_with(self.dst_dir)
def test_os_chdir_is_called_with_orig_cwd_in_exit(self, mock_chdir):
with chdir(self.dst_dir):
mock_chdir.reset_mock()
mock_chdir.assert_called_once_with(self.orig_cwd)
def test_os_chdir_is_called_with_orig_cwd_in_exit_even_if_exception_occurs(
self, mock_chdir):
try:
with chdir(self.dst_dir):
mock_chdir.reset_mock()
raise RuntimeError
except RuntimeError:
mock_chdir.assert_called_once_with(self.orig_cwd)
|
Python
| 0
|
@@ -271,27 +271,8 @@
dir'
-, spec_set=os.chdir
)%0Acl
|
a72ea881dac6807fd81860029586e33452d42fd1
|
fix test case
|
tests/test_utils.py
|
tests/test_utils.py
|
"""Tests for the ``utils`` module."""
from datetime import datetime
from facepy import (
get_application_access_token,
get_extended_access_token,
GraphAPI
)
from mock import patch
from nose.tools import (
assert_equal,
assert_raises,
with_setup
)
mock_request = None
patch = patch('requests.session')
def mock():
global mock_request
mock_request = patch.start()().request
def unmock():
patch.stop()
@with_setup(mock, unmock)
def test_get_extended_access_token():
mock_request.return_value.status_code = 200
mock_request.return_value.content = 'access_token=<extended access token>&expires=5183994'
access_token, expires_at = get_extended_access_token(
'<access token>',
'<application id>',
'<application secret key>'
)
mock_request.assert_called_with(
'GET',
'https://graph.facebook.com/oauth/access_token',
allow_redirects=True,
verify=True,
timeout=None,
params={
'client_id': '<application id>',
'client_secret': '<application secret key>',
'grant_type': 'fb_exchange_token',
'fb_exchange_token': '<access token>'
}
)
assert_equal(access_token, '<extended access token>')
assert isinstance(expires_at, datetime)
@with_setup(mock, unmock)
def test_get_extended_access_token_v23_plus():
mock_request.return_value.status_code = 200
mock_request.return_value.content = (
'{"access_token":"<extended access token>","token_type":"bearer"}'
)
access_token, expires_at = get_extended_access_token(
'<access token>',
'<application id>',
'<application secret key>',
api_version='2.3'
)
mock_request.assert_called_with(
'GET',
'https://graph.facebook.com/v2.3/oauth/access_token',
allow_redirects=True,
verify=True,
timeout=None,
params={
'client_id': '<application id>',
'client_secret': '<application secret key>',
'grant_type': 'fb_exchange_token',
'fb_exchange_token': '<access token>'
}
)
assert_equal(access_token, '<extended access token>')
assert not expires_at
@with_setup(mock, unmock)
def test_get_extended_access_token_no_expiry():
mock_request.return_value.status_code = 200
mock_request.return_value.content = 'access_token=<extended access token>'
access_token, expires_at = get_extended_access_token(
'<access token>',
'<application id>',
'<application secret key>'
)
mock_request.assert_called_with(
'GET',
'https://graph.facebook.com/oauth/access_token',
allow_redirects=True,
verify=True,
timeout=None,
params={
'client_id': '<application id>',
'client_secret': '<application secret key>',
'grant_type': 'fb_exchange_token',
'fb_exchange_token': '<access token>'
}
)
assert_equal(access_token, '<extended access token>')
assert expires_at is None
@with_setup(mock, unmock)
def test_get_application_access_token():
mock_request.return_value.status_code = 200
mock_request.return_value.content = 'access_token=<application access token>'
access_token = get_application_access_token(
'<application id>',
'<application secret key>'
)
mock_request.assert_called_with(
'GET',
'https://graph.facebook.com/oauth/access_token',
allow_redirects=True,
verify=True,
timeout=None,
params={
'client_id': '<application id>',
'client_secret': '<application secret key>',
'grant_type': 'client_credentials'
}
)
assert_equal(access_token, '<application access token>')
@with_setup(mock, unmock)
def test_get_application_access_token_v23_plus():
mock_request.return_value.status_code = 200
mock_request.return_value.content = (
'{"access_token":"<application access token>","token_type":"bearer"}'
)
access_token, expires_at = get_application_access_token(
'<application id>',
'<application secret key>',
api_version='2.3'
)
mock_request.assert_called_with(
'GET',
'https://graph.facebook.com/v2.3/oauth/access_token',
allow_redirects=True,
verify=True,
timeout=None,
params={
'client_id': '<application id>',
'client_secret': '<application secret key>',
'grant_type': 'client_credentials'
}
)
assert_equal(access_token, '<application access token>')
@with_setup(mock, unmock)
def test_get_application_access_token_raises_error():
mock_request.return_value.status_code = 200
mock_request.return_value.content = 'An unknown error occurred'
assert_raises(
GraphAPI.FacebookError,
get_application_access_token,
'<application id>',
'<application secret key>'
)
|
Python
| 0.000022
|
@@ -1546,32 +1546,54 @@
n_type%22:%22bearer%22
+, %22expires_in%22:5170982
%7D'%0A )%0A%0A ac
@@ -2245,24 +2245,116 @@
oken%3E')%0A
+# there is data for expiration check on key 'expires_in', %0A # so i comment this one%0A #
assert not e
@@ -2362,16 +2362,17 @@
pires_at
+
%0A%0A%0A@with
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.