commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
|---|---|---|---|---|---|---|---|
4cbe4e3a6150af17aa66119edcac3ca35117299e
|
Add a test to make sure we can Add{Pre,Post}Action() on Dir Nodes.
|
test/pre-post-actions.py
|
test/pre-post-actions.py
|
#!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
# This test exercises the AddPreAction() and AddPostAction() API
# functions, which add pre-build and post-build actions to nodes.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
import os
import stat
import sys
import TestSCons
_exe = TestSCons._exe
test = TestSCons.TestSCons()
test.subdir('work1', 'work2')
test.write(['work1', 'SConstruct'], """
import os.path
import stat
env = Environment(XXX='bar%(_exe)s')
def before(env, target, source):
f=open(str(target[0]), "wb")
f.write("Foo\\n")
f.close()
f=open("before.txt", "ab")
f.write(os.path.splitext(str(target[0]))[0] + "\\n")
f.close()
def after(env, target, source):
t = str(target[0])
a = "after_" + t
fin = open(t, "rb")
fout = open(a, "wb")
fout.write(fin.read())
fout.close()
fin.close()
os.chmod(a, os.stat(a)[stat.ST_MODE] | stat.S_IXUSR)
foo = env.Program(source='foo.c', target='foo')
AddPreAction(foo, before)
AddPostAction('foo%(_exe)s', after)
bar = env.Program(source='bar.c', target='bar')
env.AddPreAction('$XXX', before)
env.AddPostAction('$XXX', after)
""" % locals())
test.write(['work1', 'foo.c'], r"""
#include <stdio.h>
int main(void)
{
printf("foo.c\n");
return 0;
}
""")
test.write(['work1', 'bar.c'], r"""
#include <stdio.h>
int main(void)
{
printf("bar.c\n");
return 0;
}
""")
test.run(chdir='work1', arguments='.')
test.run(program=test.workpath('work1', 'foo'+ _exe), stdout="foo.c\n")
test.run(program=test.workpath('work1', 'bar'+ _exe), stdout="bar.c\n")
test.must_match(['work1', 'before.txt'], "bar\nfoo\n")
after_foo_exe = test.workpath('work1', 'after_foo' + _exe)
test.run(program=after_foo_exe, stdout="foo.c\n")
after_bar_exe = test.workpath('work1', 'after_bar' + _exe)
test.run(program=after_bar_exe, stdout="bar.c\n")
test.write(['work2', 'SConstruct'], """\
def b(target, source, env):
open(str(target[0]), 'wb').write(env['X'] + '\\n')
env1 = Environment(X='111')
env2 = Environment(X='222')
B = Builder(action = b, env = env1, multi=1)
print "B =", B
print "B.env =", B.env
env1.Append(BUILDERS = {'B' : B})
env2.Append(BUILDERS = {'B' : B})
env3 = env1.Copy(X='333')
print "env1 =", env1
print "env2 =", env2
print "env3 =", env3
f1 = env1.B(File('file1.out'), [])
f2 = env2.B('file2.out', [])
f3 = env3.B('file3.out', [])
def do_nothing(env, target, source):
pass
AddPreAction(f2[0], do_nothing)
AddPostAction(f3[0], do_nothing)
print "f1[0].builder =", f1[0].builder
print "f2[0].builder =", f2[0].builder
print "f3[0].builder =", f3[0].builder
print "f1[0].env =", f1[0].env
print "f2[0].env =", f2[0].env
print "f3[0].env =", f3[0].env
""")
test.run(chdir='work2', arguments = '.')
test.must_match(['work2', 'file1.out'], "111\n")
test.must_match(['work2', 'file2.out'], "222\n")
test.must_match(['work2', 'file3.out'], "333\n")
test.pass_test()
|
Python
| 0.000035
|
@@ -1423,16 +1423,25 @@
'work2'
+, 'work3'
)%0A%0A%0A%0Ates
@@ -3961,24 +3961,543 @@
%22333%5Cn%22)%0A%0A%0A%0A
+test.write(%5B'work3', 'SConstruct'%5D, %22%22%22%5C%0Adef pre(target, source, env):%0A pass%0Adef post(target, source, env):%0A pass%0Adef build(target, source, env):%0A open(str(target%5B0%5D), 'wb').write('build()%5C%5Cn')%0Aenv = Environment()%0AAddPreAction('dir', pre)%0AAddPostAction('dir', post)%0Aenv.Command('dir/file', %5B%5D, build)%0A%22%22%22)%0A%0Atest.run(chdir = 'work3', arguments = 'dir/file', stdout=test.wrap_stdout(%22%22%22%5C%0Apre(%5B%22dir%22%5D, %5B%5D)%0Apost(%5B%22dir%22%5D, %5B%5D)%0Abuild(%5B%22dir/file%22%5D, %5B%5D)%0A%22%22%22))%0A%0Atest.must_match(%5B'work3', 'dir', 'file'%5D, %22build()%5Cn%22)%0A%0A%0A%0A
test.pass_te
|
0a70a700f450c3c22ee0e7a32ffb57c29b823fe1
|
Exclude test/assembly on Windows
|
test/assembly/gyptest-assembly.py
|
test/assembly/gyptest-assembly.py
|
#!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that .hpp files are ignored when included in the source list on all
platforms.
"""
import sys
import TestGyp
# TODO(bradnelson): get this working for windows.
test = TestGyp.TestGyp(formats=['make', 'ninja', 'scons', 'xcode'])
test.run_gyp('assembly.gyp', chdir='src')
test.relocate('src', 'relocate/src')
test.build('assembly.gyp', test.ALL, chdir='relocate/src')
expect = """\
Hello from program.c
Got 42.
"""
test.run_built_executable('program', chdir='relocate/src', stdout=expect)
test.pass_test()
|
Python
| 0.001267
|
@@ -38,10 +38,10 @@
) 20
-09
+12
Goo
@@ -181,93 +181,48 @@
%22%22%22%0A
-Verifies that .hpp files are ignored when included in the source list on all%0Aplatform
+A basic test of compiling assembler file
s.%0A%22
@@ -252,16 +252,46 @@
estGyp%0A%0A
+if sys.platform != 'win32':%0A
# TODO(b
@@ -332,16 +332,18 @@
indows.%0A
+
test = T
@@ -403,16 +403,18 @@
ode'%5D)%0A%0A
+
test.run
@@ -444,24 +444,26 @@
dir='src')%0A%0A
+
test.relocat
@@ -488,16 +488,18 @@
/src')%0A%0A
+
test.bui
@@ -550,16 +550,18 @@
/src')%0A%0A
+
expect =
@@ -566,16 +566,18 @@
= %22%22%22%5C%0A
+
Hello fr
@@ -593,16 +593,18 @@
m.c%0A
+
Got 42.%0A
%22%22%22%0A
@@ -599,20 +599,24 @@
Got 42.%0A
+
%22%22%22%0A
+
test.run
@@ -683,16 +683,18 @@
pect)%0A%0A%0A
+
test.pas
|
492f769dd9b40bcf2b13379bdc5618def53832e2
|
replace namedtuples by more obvious Points as pair of x,y
|
xoi.py
|
xoi.py
|
#! /usr/bin/env python
import sys
import curses
from curses import KEY_ENTER
import time
from collections import namedtuple
KEY = "KEY"
K_A = ord("a")
K_D = ord("d")
class Point:
def __init__(self, x, y):
self._x = x
self._y = y
@property
def x(self):
return self._x
@x.setter
def x(self, val):
self._x = val
@property
def y(self):
return self._y
@y.setter
def y(self, val):
self._y = val
Event = namedtuple("Event", ["type", "val"])
class Spaceship(object):
def __init__(self, border):
self._image = "<i>"
self._dx = 1
self.border = border
self._pos = Point(self.border.x // 2, self.border.y - 1)
def events(self, event):
if event.type == KEY:
if event.val == K_A:
self._dx = -1
if event.val == K_D:
self._dx = 1
def update(self):
if self._pos.x == self.border.x - len(self._image) - 1 and self._dx > 0:
self._pos.x = 0
elif self._pos.x == 1 and self._dx < 0:
self._pos.x = self.border.x - len(self._image)
self._pos.x += self._dx
self._dx = 0
def draw(self, screen):
screen.addstr(self._pos.y, self._pos.x, self._image, curses.A_BOLD)
class App(object):
def __init__(self):
#self.screen = curses.initscr()
curses.initscr()
self.border = namedtuple("border", ["y", "x"])(24, 80)
self.field = namedtuple("field", ["y", "x"])(self.border.y-1, self.border.x)
self.screen = curses.newwin(self.border.y, self.border.x, 0, 0)
self.screen.keypad(1)
self.screen.nodelay(1)
curses.noecho()
#curses.cbreak()
curses.curs_set(0)
self.spaceship = Spaceship(self.field)
self._objects = []
self._objects.append(self.spaceship)
def deinit(self):
self.screen.nodelay(0)
self.screen.keypad(0)
curses.nocbreak()
curses.echo()
curses.curs_set(1)
curses.endwin()
def events(self):
c = self.screen.getch()
if c == 27: #Escape
self.deinit()
sys.exit(1)
else:
for o in self._objects:
o.events(Event(type="KEY", val=c))
def update(self):
for o in self._objects:
o.update()
def render(self):
self.screen.clear()
self.screen.border(0)
self.screen.addstr(0, 2, "Score: {} ".format(0))
self.screen.addstr(0, self.border.x // 2 - 4, "XOInvader", curses.A_BOLD)
for o in self._objects:
o.draw(self.screen)
self.screen.refresh()
time.sleep(0.03)
def loop(self):
while True:
self.events()
self.update()
try:
self.render()
except:
self.deinit()
sys.exit(1)
def main():
app = App()
app.loop()
if __name__ == "__main__":
main()
|
Python
| 0.000386
|
@@ -1444,103 +1444,56 @@
r =
-namedtuple(%22border%22, %5B%22y%22, %22x%22%5D)(24, 80)%0A self.field = namedtuple(%22field%22, %5B%22y%22, %22x%22%5D)(
+Point(x=80, y=24)%0A self.field = Point(x=
self
@@ -1504,13 +1504,13 @@
der.
-y-1,
+x, y=
self
@@ -1517,17 +1517,19 @@
.border.
-x
+y-1
)%0A
|
bcfa6c5ab4b9ea7cf56a3c4e680efd6433cd6a14
|
Remove documentation of unused --cache option
|
iamine/__main__.py
|
iamine/__main__.py
|
#!/usr/bin/env python3
"""Concurrently retrieve metadata from Archive.org items.
usage: ia-mine [--config-file=<FILE>] (<itemlist> | -) [--debug] [--workers WORKERS] [--cache]
[--retries RETRIES] [--secure] [--hosts HOSTS]
ia-mine [--all | --search QUERY] [[--info | --info --field FIELD...]
|--num-found | --mine-ids | --field FIELD... | --itemlist]
[--debug] [--rows ROWS] [--workers WORKERS] [--cache]
[--retries RETRIES] [--secure] [--hosts HOSTS]
ia-mine [--config-file=<FILE>] [-h | --version | --configure]
positional arguments:
itemlist A file containing Archive.org identifiers, one per
line, for which to retrieve metadata from. If no
itemlist is provided, identifiers will be read from
stdin.
optional arguments:
-h, --help Show this help message and exit.
-v, --version Show program's version number and exit.
--configure Configure ia-mine to use your Archive.org credentials.
-C, --config-file=<FILE> The config file to use.
-d, --debug Turn on verbose logging [default: False]
-a, --all Mine all indexed items.
-s, --search QUERY Mine search results. For help formatting your query,
see: https://archive.org/advancedsearch.php
-m, --mine-ids Mine items returned from search results.
[default: False]
-i, --info Print search result response header to stdout and exit.
-f, --field FIELD Fields to include in search results.
-i, --itemlist Print identifiers only to stdout. [default: False]
-n, --num-found Print the number of items found for the given search
query.
--rows ROWS The number of rows to return for each request made to
the Archive.org Advancedsearch API. On slower networks,
it may be useful to use a lower value, and on faster
networks, a higher value. [default: 50]
-w, --workers WORKERS
The maximum number of tasks to run at once.
[default: 100]
-c, --cache Cache item metadata on Archive.org. Items are not
cached are not cached by default.
-r, --retries RETRIES
The maximum number of retries for each item.
[default: 10]
--secure Use HTTPS. HTTP is used by default.
-H, --hosts HOSTS A file containing a list of hosts to shuffle through.
"""
from .utils import suppress_interrupt_messages, suppress_brokenpipe_messages, handle_cli_exceptions
suppress_interrupt_messages()
suppress_brokenpipe_messages()
handle_cli_exceptions()
import logging
import os
import sys
import json
from docopt import docopt, DocoptExit
from schema import Schema, Use, Or, SchemaError
from .api import mine_items, search, configure
from . import __version__
from .exceptions import AuthenticationError
asyncio_logger = logging.getLogger('asyncio')
asyncio_logger.setLevel(logging.CRITICAL)
def print_itemlist(resp):
j = yield from resp.json(encoding='utf-8')
for doc in j.get('response', {}).get('docs', []):
print(doc.get('identifier'))
def main(argv=None, session=None):
# If ia-wrapper calls main with argv argument, strip the
# "mine" subcommand from args.
argv = argv[1:] if argv else sys.argv[1:]
# Catch DocoptExit error and write to stderr manually.
# Otherwise error's vanish if executed from a pex binary.
try:
args = docopt(__doc__, version=__version__, argv=argv, help=True)
except DocoptExit as exc:
sys.exit(sys.stderr.write('{}\n'.format(exc.code)))
# Validate args.
open_file_or_stdin = lambda f: sys.stdin if (f == '-') or (not f) else open(f)
parse_hosts = lambda f: [x.strip() for x in open(f) if x.strip()]
schema = Schema({object: bool,
'--search': Or(None, Use(str)),
'--field': list,
'--config-file': Or(None, str),
'--rows': Use(int,
error='"{}" should be an integer'.format(args['--rows'])),
'--hosts': Or(None, Use(parse_hosts,
error='"{}" should be a readable file.'.format(args['--hosts']))),
'--retries': Use(int, '"{}" should be an integer.'.format(args['--retries'])),
'<itemlist>': Use(open_file_or_stdin,
error='"{}" should be readable'.format(args['<itemlist>'])),
'--workers': Use(int,
error='"{}" should be an integer.'.format(args['--workers'])),
})
try:
args = schema.validate(args)
except SchemaError as exc:
sys.exit(sys.stderr.write('error: {1}\n{0}'.format(__doc__, str(exc))))
# Configure.
if args['--configure']:
sys.stdout.write(
'Enter your Archive.org credentials below to configure ia-mine.\n\n')
try:
configure(overwrite=True, config_file=args['--config-file'])
except AuthenticationError as exc:
sys.stdout.write('\n')
sys.stderr.write('error: {}\n'.format(str(exc)))
sys.exit(1)
sys.exit(0)
# Search.
if args['--search'] or args['--all']:
query = 'all:1' if not args['--search'] else args['--search']
callback = print_itemlist if args['--itemlist'] else None
info_only = True if args['--info'] or args['--num-found'] else False
params = {
'rows': args['--rows']
}
for i, f in enumerate(args['--field']):
params['fl[{}]'.format(i)] = f
r = search(query,
params=params,
callback=callback,
mine_ids=args['--mine-ids'],
info_only=info_only,
max_tasks=args['--workers'],
retries=args['--retries'],
config_file=args['--config-file'],
secure=args['--secure'],
hosts=args['--hosts'],
debug=args['--debug'])
if args['--info']:
sys.stdout.write('{}\n'.format(json.dumps(r)))
elif args['--num-found']:
sys.stdout.write('{}\n'.format(r.get('numFound', 0)))
sys.exit(0)
# Mine.
else:
# Exit with 2 if stdin appears to be empty.
if args['-']:
if (not os.fstat(sys.stdin.fileno()).st_size > 0) and (sys.stdin.seekable()):
sys.exit(2)
mine_items(args['<itemlist>'],
max_tasks=args['--workers'],
retries=args['--retries'],
secure=args['--secure'],
hosts=args['--hosts'],
config_file=args['--config-file'],
debug=args['--debug'])
if __name__ == '__main__':
main()
|
Python
| 0
|
@@ -155,34 +155,24 @@
ers WORKERS%5D
- %5B--cache%5D
%0A
@@ -434,18 +434,8 @@
ERS%5D
- %5B--cache%5D
%0A
@@ -2336,150 +2336,8 @@
00%5D%0A
- -c, --cache Cache item metadata on Archive.org. Items are not%0A cached are not cached by default.%0A
-r
|
101608c8d769e383a849b063bfb002cfe1ed3261
|
Make sqlalchemy converter be compatible with latest version wtforms.
|
flask_superadmin/model/backends/sqlalchemy/orm.py
|
flask_superadmin/model/backends/sqlalchemy/orm.py
|
from sqlalchemy.orm.exc import NoResultFound
from wtforms import ValidationError, fields, validators
from wtforms.ext.sqlalchemy.orm import converts, ModelConverter
from wtforms.ext.sqlalchemy.fields import QuerySelectField, QuerySelectMultipleField
from wtforms.ext.sqlalchemy.orm import model_form
from flask.ext.superadmin import form
class Unique(object):
"""Checks field value unicity against specified table field.
:param get_session:
A function that return a SQAlchemy Session.
:param model:
The model to check unicity against.
:param column:
The unique column.
:param message:
The error message.
"""
field_flags = ('unique', )
def __init__(self, db_session, model, column, message=None):
self.db_session = db_session
self.model = model
self.column = column
self.message = message
def __call__(self, form, field):
try:
obj = (self.db_session.query(self.model)
.filter(self.column == field.data).one())
if not hasattr(form, '_obj') or not form._obj == obj:
if self.message is None:
self.message = field.gettext(u'Already exists.')
raise ValidationError(self.message)
except NoResultFound:
pass
class AdminModelConverter(ModelConverter):
"""
SQLAlchemy model to form converter
"""
def __init__(self, view):
super(AdminModelConverter, self).__init__()
self.view = view
def _get_label(self, name, field_args):
if 'label' in field_args:
return field_args['label']
# if self.view.rename_columns:
# return self.view.rename_columns.get(name)
return None
def _get_field_override(self, name):
# if self.view.form_overrides:
# return self.view.form_overrides.get(name)
return None
def convert(self, model, mapper, prop, field_args):
kwargs = {
'validators': [],
'filters': []
}
if field_args:
kwargs.update(field_args)
if hasattr(prop, 'direction'):
remote_model = prop.mapper.class_
local_column = prop.local_remote_pairs[0][0]
kwargs.update({
'allow_blank': local_column.nullable,
'label': self._get_label(prop.key, kwargs),
'query_factory': lambda: self.view.session.query(remote_model)
})
if local_column.nullable:
kwargs['validators'].append(validators.Optional())
elif prop.direction.name not in ('MANYTOMANY','ONETOMANY'):
kwargs['validators'].append(validators.Required())
# Override field type if necessary
override = self._get_field_override(prop.key)
if override:
return override(**kwargs)
if prop.direction.name == 'MANYTOONE':
return QuerySelectField(widget=form.ChosenSelectWidget(),
**kwargs)
elif prop.direction.name == 'ONETOMANY':
# Skip backrefs
if not local_column.foreign_keys and self.view.hide_backrefs:
return None
return QuerySelectMultipleField(
widget=form.ChosenSelectWidget(multiple=True),
**kwargs)
elif prop.direction.name == 'MANYTOMANY':
return QuerySelectMultipleField(
widget=form.ChosenSelectWidget(multiple=True),
**kwargs)
else:
# Ignore pk/fk
if hasattr(prop, 'columns'):
column = prop.columns[0]
# Do not display foreign keys - use relations
if column.foreign_keys:
return None
unique = False
if column.primary_key:
# By default, don't show primary keys either
if self.view.fields is None:
return None
# If PK is not explicitly allowed, ignore it
if prop.key not in self.view.fields:
return None
kwargs['validators'].append(Unique(self.view.session,
model,
column))
unique = True
# If field is unique, validate it
if column.unique and not unique:
kwargs['validators'].append(Unique(self.view.session,
model,
column))
if not column.nullable:
kwargs['validators'].append(validators.Required())
# Apply label
kwargs['label'] = self._get_label(prop.key, kwargs)
# Override field type if necessary
override = self._get_field_override(prop.key)
if override:
return override(**kwargs)
return super(AdminModelConverter, self).convert(model,
mapper,
prop,
kwargs)
@converts('Date')
def convert_date(self, field_args, **extra):
field_args['widget'] = form.DatePickerWidget()
return fields.DateField(**field_args)
@converts('DateTime')
def convert_datetime(self, field_args, **extra):
field_args['widget'] = form.DateTimePickerWidget()
return fields.DateTimeField(**field_args)
@converts('Time')
def convert_time(self, field_args, **extra):
return form.TimeField(**field_args)
|
Python
| 0
|
@@ -1979,32 +1979,39 @@
prop, field_args
+, *args
):%0A kwarg
|
86c45216633a3a273d04a64bc54ca1026b3d5069
|
Fix comment middleware
|
debreach/middleware.py
|
debreach/middleware.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import base64
import logging
import random
from Crypto.Cipher import AES
from django.core.exceptions import SuspiciousOperation
from debreach.compat import \
force_bytes, get_random_string, string_types, force_text
log = logging.getLogger(__name__)
class CSRFCryptMiddleware(object):
def process_request(self, request):
if request.POST.get('csrfmiddlewaretoken') \
and '$' in request.POST.get('csrfmiddlewaretoken'):
try:
POST = request.POST.copy()
token = POST.get('csrfmiddlewaretoken')
key, value = token.split('$')
value = base64.decodestring(force_bytes(value)).strip()
aes = AES.new(key.strip())
POST['csrfmiddlewaretoken'] = aes.decrypt(value).strip()
POST._mutable = False
request.POST = POST
except:
log.exception('Error decoding csrfmiddlewaretoken')
raise SuspiciousOperation(
'csrfmiddlewaretoken has been tampered with')
return
class RandomCommentMiddleware(object):
def process_response(self, request, response):
if not getattr(response, 'streaming', False) \
and response['Content-Type'] == 'text/html' \
and isinstance(response.content, string_types):
comment = '<!-- {0} -->'.format(
get_random_string(random.choice(range(12, 25))))
response.content = '{0}{1}'.format(
force_text(response.content), comment)
return response
|
Python
| 0.000002
|
@@ -1350,12 +1350,28 @@
pe'%5D
- ==
+.strip().startswith(
'tex
@@ -1377,16 +1377,17 @@
xt/html'
+)
%5C%0A
|
1e7bbd7b59abbe0bcb01fd98079a362f4f874d3b
|
Fix long waiting version number up
|
dedupsqlfs/__init__.py
|
dedupsqlfs/__init__.py
|
# -*- coding: utf8 -*-
# Documentation. {{{1
"""
This Python library implements a file system in user space using FUSE. It's
called DedupFS because the file system's primary feature is deduplication,
which enables it to store virtually unlimited copies of files because data
is only stored once.
In addition to deduplication the file system also supports transparent
compression using any of the compression methods zlib, bz2, lzma
and optionaly lzo, lz4, snappy, zstd.
These two properties make the file system ideal for backups: I'm currently
storing 250 GB worth of backups using only 8 GB of disk space.
The latest version is available at https://github.com/sergey-dryabzhinsky/dedupsqlfs
DedupFS is licensed under the MIT license.
Copyright 2010 Peter Odding <peter@peterodding.com>.
Copyright 2013-2020 Sergey Dryabzhinsky <sergey.dryabzhinsky@gmail.com>.
"""
__name__ = "DedupSQLfs"
# for fuse mount
__fsname__ = "dedupsqlfs"
__fsversion__ = "3.3"
# Future 1.3
__version__ = "1.2.947"
# Check the Python version, warn the user if untested.
import sys
if sys.version_info[0] < 3 or \
(sys.version_info[0] == 3 and sys.version_info[1] < 2):
msg = "Warning: %s(%s, $s) has only been tested on Python 3.2, while you're running Python %d.%d!\n"
sys.stderr.write(msg % (__name__, __fsversion__, __version__, sys.version_info[0], sys.version_info[1]))
# Do not abuse GC - we generate alot objects
import gc
if hasattr(gc, "set_threshold"):
gc.set_threshold(100000, 2000, 200)
|
Python
| 0.000075
|
@@ -992,9 +992,13 @@
2.94
-7
+9-dev
%22%0A%0A#
|
2b78c7bc3187e640ad093c964fc7a0f0006208f1
|
comment botan v2
|
2048.py
|
2048.py
|
# -*- coding: utf-8 -*-
#import botan
import constant_2048
import telebot
import urllib
import logging
from board import Board
from telebot import types
import sqlite3
# Using the ReplyKeyboardMarkup class
# It's constructor can take the following optional arguments:
# - resize_keyboard: True/False (default False)
# - one_time_keyboard: True/False (default False)
# - selective: True/False (default False)
# - row_width: integer (default 3)
# row_width is used in combination with the add() function.
# It defines how many buttons are fit on each row before continuing on the next row.
chr_UP = u'\u2191'
chr_DOWN = u'\u2193'
chr_LEFT = u'\u2190'
chr_RIGHT = u'\u2192'
API_TOKEN = constant_2048.API_TOKEN
botan_token = constant_2048.BOTAN_TOKEN # Token got from @botaniobot
tb = telebot.TeleBot(API_TOKEN)
logger = telebot.logger
telebot.logger.setLevel(logging.DEBUG)
def getCellStr(board, x, y): # TODO: refactor regarding issue #11
"""
return a string representation of the cell located at x,y.
"""
# global board
c = board.getCell(x, y)
if c == 0:
return ' '
elif c == 2:
s = ' 2 '
elif c == 4:
s = ' 4 '
elif c == 8:
s = ' 8 '
elif c == 16:
s = ' 16 '
elif c == 32:
s = ' 32 '
elif c == 64:
s = ' 64 '
elif c == 128:
s = ' 128'
elif c == 256:
s = ' 256'
elif c == 512:
s = ' 512'
elif c == 1024:
s = '1024'
elif c == 2048:
s = '2048'
elif c == 4096:
s = '4096'
elif c == 8192:
s = '8192'
else:
s = '%3d' % c
# 2 \ud83d\ude36\n
# 4 \ud83d\ude10\n
# 8 \ud83d\ude42\n
# 16 \ud83d\ude0a\n
# 32 \ud83d\ude00\n
# 64 \ud83d\ude05\n
# 128 \ud83d\ude1d\n
# 256 \ud83d\ude02\n
# 512 \ud83d\ude07\n
# 1024 \ud83d\ude2c\n
# 2048 \ud83d\ude0e\n
# 4096 \ud83d\udc7d
return s
def boardToStringBD(board):
# global board
l = []
stringCell = ''
array_x = range(board.size())
print board.size()
print array_x
for x in array_x:
for y in array_x:
j = board.getCell(x,y)
stringCell = stringCell + str(j) + ','
l.append(j)
return stringCell
def boardToString(board):
"""
return a string representation of the current board.
"""
# global board
# b = board
rg = range(board.size())
s = "ββββββ¬βββββ¬βββββ¬βββββ\n"+"ββββββ¬βββββ¬βββββ¬βββββ\n|"+"|\nββββββͺβββββͺβββββͺβββββ‘\n|".join(
['|'.join([getCellStr(board, x, y) for x in rg]) for y in rg])
s = s + "|\nββββββ΄βββββ΄βββββ΄βββββ"
return s
# Handle '/start' and '/help'
@tb.message_handler(commands=['help', 'start'])
def send_welcome(message):
msg = tb.reply_to(message, """\
Hi there, I am 2048bot. For game starting click /game
""")
tb.register_next_step_handler(msg, game_start)
@tb.message_handler(commands=['game'])
def game_start(message):
# or add strings one row at a time:
# global score
markup = types.ReplyKeyboardMarkup()
board = Board()
score = 0
chat_id = str(message.chat.id)
con = sqlite3.connect('2048.db')
with con:
cur = con.cursor()
cur.execute('INSERT or REPLACE INTO users (id, board, score) VALUES ('+chat_id+', "'+boardToStringBD(board)+'", "'+str(score)+'");')
con.close()
s = boardToString(board)
markup.row(chr_UP)
markup.row(chr_LEFT, chr_DOWN, chr_RIGHT)
tb.send_message(message.chat.id, "```" + s + "```", parse_mode = "Markdown", reply_markup = markup)
uid = message.chat.id
message_dict = message.to_dict()
event_name = message.text
# print botan.track(botan_token, uid, message_dict, event_name)
@tb.message_handler(content_types=['text'])
def game_arrow(message):
try:
con = sqlite3.connect('2048.db')
chat_id = str(message.chat.id)
with con:
cur = con.cursor()
cur.execute('SELECT board, score FROM users WHERE id='+chat_id+';')
data_from_db = cur.fetchone()
list_from_db = []
if data_from_db:
list_from_db = data_from_db[0].split(',')
else:
list_from_db = "0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0".split(',')
if data_from_db[1]:
score = int(data_from_db[1])
else:
score = 0
board = Board()
array_x = range(board.size())
for x in array_x:
print 'list from db'
print list_from_db
j = []
for y in array_x:
j.append(int(list_from_db[4*x+y]))
print j
if (j!=''):
board.setCol(x, j)
if message.text == chr_UP:
print 'UP'
score += board.move(Board.UP)
if message.text == chr_DOWN:
print 'DOWN'
score += board.move(Board.DOWN)
if message.text == chr_RIGHT:
score += board.move(Board.RIGHT)
if message.text == chr_LEFT:
score += board.move(Board.LEFT)
with con:
cur = con.cursor()
cur.execute('INSERT or REPLACE INTO users (id, board, score) VALUES ('+chat_id+', "'+boardToStringBD(board)+'", "'+str(score)+'");')
con.close()
s = boardToString(board)
tb.send_message(message.chat.id, "Score: "+ str(score) + "```" + s + "```", parse_mode = "Markdown")
except Exception:
print 'wtf!'
send_welcome(message)
tb.polling()
|
Python
| 0
|
@@ -3517,16 +3517,17 @@
markup)%0A
+#
uid
@@ -3544,16 +3544,17 @@
chat.id%0A
+#
mess
@@ -3582,16 +3582,17 @@
_dict()%0A
+#
even
|
30100751f64e20804dce332fa458a8490be62336
|
Add the "interval" option to the raw_parameter_script example
|
examples/raw_parameter_script.py
|
examples/raw_parameter_script.py
|
""" The main purpose of this file is to demonstrate running SeleniumBase
scripts without the use of Pytest by calling the script directly
with Python or from a Python interactive interpreter. Based on
whether relative imports work or don't, the script can autodetect
how this file was run. With pure Python, it will initialize
all the variables that would've been automatically initialized
by the Pytest plugin. The setUp() and tearDown() methods are also
now called from the script itself.
One big advantage to running tests with Pytest is that most of this
is done for you automatically, with the option to update any of the
parameters through command line parsing. Pytest also provides you
with other plugins, such as ones for generating test reports,
handling multithreading, and parametrized tests. Depending on your
specific needs, you may need to call SeleniumBase commands without
using Pytest, and this example shows you how. """
try:
# Running with Pytest / (Finds test methods to run using autodiscovery)
# Example run command: "pytest raw_parameter_script.py"
from .my_first_test import MyTestClass # (relative imports work: ".~")
except (ImportError, ValueError):
# Running with pure Python OR from a Python interactive interpreter
# Example run command: "python raw_parameter_script.py"
from my_first_test import MyTestClass # (relative imports DON'T work)
sb = MyTestClass("test_basics")
sb.browser = "chrome"
sb.headless = False
sb.headed = False
sb.start_page = None
sb.locale_code = None
sb.servername = "localhost"
sb.port = 4444
sb.data = None
sb.environment = "test"
sb.user_agent = None
sb.incognito = False
sb.guest_mode = False
sb.devtools = False
sb.mobile_emulator = False
sb.device_metrics = None
sb.extension_zip = None
sb.extension_dir = None
sb.database_env = "test"
sb.log_path = "latest_logs/"
sb.archive_logs = False
sb.disable_csp = False
sb.disable_ws = False
sb.enable_ws = False
sb.enable_sync = False
sb.use_auto_ext = False
sb.no_sandbox = False
sb.disable_gpu = False
sb._reuse_session = False
sb._crumbs = False
sb.visual_baseline = False
sb.maximize_option = False
sb.save_screenshot_after_test = False
sb.timeout_multiplier = None
sb.pytest_html_report = None
sb.with_db_reporting = False
sb.with_s3_logging = False
sb.js_checking_on = False
sb.report_on = False
sb.is_pytest = False
sb.slow_mode = False
sb.demo_mode = False
sb.time_limit = None
sb.demo_sleep = 1
sb.dashboard = False
sb._dash_initialized = False
sb.message_duration = 2
sb.block_images = False
sb.remote_debug = False
sb.settings_file = None
sb.user_data_dir = None
sb.proxy_string = None
sb.swiftshader = False
sb.ad_block_on = False
sb.highlights = None
sb.check_js = False
sb.cap_file = None
sb.cap_string = None
sb.setUp()
try:
sb.test_basics()
finally:
sb.tearDown()
del sb
|
Python
| 0.001246
|
@@ -2989,24 +2989,47 @@
_js = False%0A
+ sb.interval = None%0A
sb.cap_f
|
b8b630c0f1bd53960c1f6bb275f25fecbca520ba
|
tweak output format.
|
import_profiler.py
|
import_profiler.py
|
import collections
import time
__OLD_IMPORT = None
class ImportInfo(object):
def __init__(self, name, context_name, counter):
self.name = name
self.context_name = context_name
self._counter = counter
self._depth = 0
self._start = time.time()
self.elapsed = None
def done(self):
self.elapsed = time.time() - self._start
@property
def _key(self):
return self.name, self.context_name, self._counter
def __repr__(self):
return "ImportInfo({!r}, {!r}, {!r})".format(*self._key)
def __hash__(self):
return hash(self._key)
def __eq__(self, other):
if isinstance(other, ImportInfo):
return other._key == self._key
return NotImplemented
def __ne__(self):
return not self == other
class ImportStack(object):
def __init__(self):
self._current_stack = []
self._full_stack = collections.defaultdict(list)
self._counter = 0
def push(self, name, context_name):
info = ImportInfo(name, context_name, self._counter)
self._counter += 1
if len(self._current_stack) > 0:
parent = self._current_stack[-1]
self._full_stack[parent].append(info)
self._current_stack.append(info)
info._depth = len(self._current_stack) - 1
return info
def pop(self, import_info):
top = self._current_stack.pop()
assert top is import_info
top.done()
def compute_intime(parent, full_stack, ordered_visited, visited, depth=0):
if parent in visited:
return
cumtime = intime = parent.elapsed
visited[parent] = [cumtime, parent.name, parent.context_name, depth]
ordered_visited.append(parent)
for child in full_stack[parent]:
intime -= child.elapsed
compute_intime(child, full_stack, ordered_visited, visited, depth + 1)
visited[parent].append(intime)
def print_info(import_stack):
full_stack = import_stack._full_stack
keys = sorted(full_stack.keys(), key=lambda p: p._counter)
visited = {}
ordered_visited = []
for key in keys:
compute_intime(key, full_stack, ordered_visited, visited)
lines = []
for k in ordered_visited:
node = visited[k]
cumtime = node[0] * 1000
name = node[1]
context_name = node[2]
level = node[3]
intime = node[-1] * 1000
if cumtime > 1:
lines.append((
"{:.2}".format(cumtime),
"{:.2}".format(intime),
"+" * level + name,
))
import tabulate
print(
tabulate.tabulate(
lines, headers=("cumtime", "intime", "name"), tablefmt="plain")
)
_IMPORT_STACK = ImportStack()
def profiled_import(name, globals=None, locals=None, fromlist=None,
level=-1, *a, **kw):
if globals is None:
context_name = None
else:
context_name = globals.get("__name__")
if context_name is None:
context_name = globals.get("__file__")
info = _IMPORT_STACK.push(name, context_name)
try:
return __OLD_IMPORT(name, globals, locals, fromlist, level, *a, **kw)
finally:
_IMPORT_STACK.pop(info)
def enable():
global __OLD_IMPORT
__OLD_IMPORT = __builtins__["__import__"]
__builtins__["__import__"] = profiled_import
def disable():
__builtins__["__import__"] = __OLD_IMPORT
|
Python
| 0
|
@@ -2495,33 +2495,34 @@
%22%7B:.
-2
+1f
%7D%22.format(cumtim
@@ -2545,17 +2545,18 @@
%22%7B:.
-2
+1f
%7D%22.forma
@@ -2709,24 +2709,29 @@
rs=(%22cumtime
+ (ms)
%22, %22intime%22,
@@ -2728,16 +2728,21 @@
%22intime
+ (ms)
%22, %22name
|
dd7b10a89e3fd5e431b03e922fbbc0a49c3d8c5e
|
Fix failing wavelet example due to outdated code
|
examples/trafos/wavelet_trafo.py
|
examples/trafos/wavelet_trafo.py
|
# Copyright 2014-2016 The ODL development group
#
# This file is part of ODL.
#
# ODL is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ODL is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ODL. If not, see <http://www.gnu.org/licenses/>.
"""Simple example on the usage of the Wavelet Transform."""
import odl
# Discretized space: discretized functions on the rectangle [-1, 1] x [-1, 1]
# with 512 samples per dimension.
space = odl.uniform_discr([-1, -1], [1, 1], (256, 256))
# Make the Wavelet transform operator on this space. The range is calculated
# automatically. The default backend is PyWavelets (pywt).
wavelet_op = odl.trafos.WaveletTransform(space, nscales=2, wbasis='Haar')
# Create a phantom and its wavelet transfrom and display them.
phantom = odl.phantom.shepp_logan(space, modified=True)
phantom.show(title='Shepp-Logan phantom')
# Note that the wavelet transform is a vector in rn.
phantom_wt = wavelet_op(phantom)
phantom_wt.show(title='wavelet transform')
# It may however (for some choices of wbasis) be interpreted as a vector in the
# domain of the transformation
phantom_wt_2d = space.element(phantom_wt)
phantom_wt_2d.show('wavelet transform in 2d')
# Calculate the inverse transform.
phantom_wt_inv = wavelet_op.inverse(phantom_wt)
phantom_wt_inv.show(title='wavelet transform inverted')
|
Python
| 0
|
@@ -1111,32 +1111,33 @@
ce,
-nscales=2, wbasis='Haar'
+wavelet='Haar', nlevels=2
)%0A%0A#
|
5e86582ebde98f14df796102062659f185c2bcca
|
update docs/extensions/fancy_include.py with git_cast_file2repos.py
|
docs/extensions/fancy_include.py
|
docs/extensions/fancy_include.py
|
"""Include single scripts with doc string, code, and image
Use case
--------
There is an "examples" directory in the root of a repository,
e.g. 'include_doc_code_img_path = "../examples"' in conf.py
(default). An example is a file ("an_example.py") that consists
of a doc string at the beginning of the file, the example code,
and, optionally, an image file (png, jpg) ("an_example.png").
Configuration
-------------
In conf.py, set the parameter
fancy_include_path = "../examples"
to wherever the included files reside.
Usage
-----
The directive
.. fancy_include:: an_example.py
will display the doc string formatted with the first line as a
heading, a code block with line numbers, and the image file.
"""
import os.path as op
from docutils.statemachine import ViewList
from docutils.parsers.rst import Directive
from sphinx.util.nodes import nested_parse_with_titles
from docutils import nodes
class IncludeDirective(Directive):
required_arguments = 1
optional_arguments = 0
def run(self):
path = self.state.document.settings.env.config.fancy_include_path
full_path = op.join(path, self.arguments[0])
with open(full_path, "r") as myfile:
text = myfile.read()
source = text.split('"""')
doc = source[1].split("\n")
doc.insert(1, "~" * len(doc[0])) # make title heading
code = source[2].split("\n")
# documentation
rst = []
for line in doc:
rst.append(line)
# image
for ext in [".png", ".jpg"]:
image_path = full_path[:-3] + ext
if op.exists(image_path):
break
else:
image_path = ""
if image_path:
rst.append(".. figure:: {}".format(image_path))
# download file
rst.append(":download:`{}<{}>`".format(
op.basename(full_path), full_path))
# code
rst.append("")
rst.append(".. code-block:: python")
rst.append(" :linenos:")
rst.append("")
for line in code:
rst.append(" {}".format(line))
rst.append("")
vl = ViewList(rst, "fakefile.rst")
# Create a node.
node = nodes.section()
node.document = self.state.document
# Parse the rst.
nested_parse_with_titles(self.state, vl, node)
return node.children
def setup(app):
app.add_config_value('fancy_include_path', "../examples", 'html')
app.add_directive('fancy_include', IncludeDirective)
return {'version': '0.1'} # identifies the version of our extension
|
Python
| 0.000001
|
@@ -1773,24 +1773,51 @@
image_path))
+%0A rst.append(%22%22)
%0A%0A #
|
342ada244652bb287da86dfb8c2c38031f3be9a8
|
Update README.
|
set-ornata-chroma-rgb.py
|
set-ornata-chroma-rgb.py
|
#!/usr/bin/env python3
# file: set-ornata-chroma-rgb.py
# vim:fileencoding=utf-8:fdm=marker:ft=python
#
# Copyright Β© 2019 R.F. Smith <rsmith@xs4all.nl>
# Created: 2019-06-16T19:09:06+0200
# Last modified: 2019-06-16T20:47:20+0200
"""Set the LEDs on a Razer Ornata Chroma keyboard to a static RGB color."""
import argparse
import logging
import sys
import usb.core
__version__ = '0.1'
def static_color_msg(red, green, blue):
"""
Create a message to set the Razer Ornata Croma lights to a static color.
All arguments should be convertable to an integer in the range 0-255.
Returns a bytearray containing the message.
"""
def _chk(name, channel):
if (
isinstance(channel, str) and len(channel) > 2 and channel[0] == '0' and
channel[1] in 'bBoOxX'
):
channel = int(channel, {'b': 2, 'o': 8, 'x': 16}[channel[1].lower()])
else:
channel = int(channel)
if channel < 0 or channel > 255:
logging.error(f'{name} value should be in the range 0 to 255')
sys.exit(2)
return channel
red = _chk('red', red)
green = _chk('green', green)
blue = _chk('blue', blue)
msg = bytearray(90)
# byte 0 is 0
msg[1] = 0x3F # transaction id
# bytes 2-4 are 0.
msg[5] = 0x09 # data size
msg[6] = 0x0F # command class
msg[7] = 0x02 # command id
# The rest of the msg bytes are variable data
msg[8] = 0x01 # VARSTORE
msg[9] = 0x05 # BACKLIGHT_LED
msg[10] = 0x01 # effect id
# bytes 11-12 are 0.
msg[13] = 0x01
msg[14] = red
msg[15] = green
msg[16] = blue
# Calculate and set the checksum.
crc = 0
for j in msg[2:88]:
crc ^= j
msg[-2] = crc
return msg
def main(argv):
"""
Entry point for set-ornata-chroma-rgb.py.
Arguments:
argv: command line arguments
"""
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
'--log',
default='warning',
choices=['debug', 'info', 'warning', 'error'],
help="logging level (defaults to 'warning')"
)
parser.add_argument('-v', '--version', action='version', version=__version__)
parser.add_argument('red', help="red value 0-255")
parser.add_argument('green', help="green value 0-255")
parser.add_argument('blue', help="blue value 0-255")
args = parser.parse_args(argv)
logging.basicConfig(
level=getattr(logging, args.log.upper(), None),
format='%(levelname)s: %(message)s'
)
logging.debug(f'command line arguments = {argv}')
logging.debug(f'parsed arguments = {args}')
dev = usb.core.find(idVendor=0x1532, idProduct=0x021e)
if dev is None:
logging.error('No Razer Ornata Chroma keyboard found')
sys.exit(1)
msg = static_color_msg(args.red, args.green, args.blue) # set color to Green.
logging.info(f'red={args.red}')
logging.info(f'green={args.green}')
logging.info(f'blue={args.blue}')
read = dev.ctrl_transfer(0x21, 0x09, 0x300, 0x01, msg)
if read != 90:
logging.error('control transfer for setting the color failed')
if __name__ == '__main__':
main(sys.argv[1:])
|
Python
| 0
|
@@ -146,16 +146,47 @@
all.nl%3E%0A
+# SPDX-License-Identifier: MIT%0A
# Create
@@ -246,15 +246,15 @@
16T2
-0:47:20
+1:52:22
+020
|
67f431c23dba00860b1e5c13bb8a131cb850c33d
|
work around some views not being editable but still requiring it to be re-usable for all
|
go/conversation/templatetags/conversation_tags.py
|
go/conversation/templatetags/conversation_tags.py
|
import re
from copy import copy
from django.conf import settings
from django import template
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.core.urlresolvers import reverse
from django.contrib.sites.models import Site
from django.template.defaultfilters import stringfilter
from go.conversation.utils import PagedMessageCache
from go.conversation.forms import ReplyToMessageForm
from go.base import message_store_client as ms_client
from go.base.utils import page_range_window, get_conversation_view_definition
register = template.Library()
@register.simple_tag
def conversation_screen(conv, view_name='show'):
# FIXME: Unhack this when all apps have definition modules.
try:
view_def = get_conversation_view_definition(
conv.conversation_type, conv)
except AttributeError:
return '/conversations/%s/' % (conv.key,)
return view_def.get_view_url(view_name, conversation_key=conv.key)
@register.simple_tag
def conversation_action(conv, action_name):
return reverse('conversations:conversation_action', kwargs={
'conversation_key': conv.key, 'action_name': action_name})
@register.inclusion_tag(
'conversation/inclusion_tags/show_conversation_messages.html',
takes_context=True)
def show_conversation_messages(context, conversation, direction=None,
page=None, batch_id=None, query=None,
token=None):
"""
Render the messages sent & received for this conversation.
:param ConversationWrapper conversation:
The conversation to show messages for.
:param str direction:
Either 'inbound' or 'outbound', defaults to 'inbound'
:param int page:
The page to display for the pagination.
:param str batch_id:
The batch_id to show messages for.
:param str query:
The query string to search messages for in the batch's inbound
messages.
"""
batch_id = batch_id or conversation.get_latest_batch_key()
direction = 'outbound' if direction == 'outbound' else 'inbound'
# Paginator starts counting at 1 so 0 would also be invalid
page = page or 1
inbound_message_paginator = Paginator(
PagedMessageCache(conversation.count_replies(),
lambda start, stop: conversation.received_messages(
start, stop, batch_id)), 20)
outbound_message_paginator = Paginator(
PagedMessageCache(conversation.count_sent_messages(),
lambda start, stop: conversation.sent_messages(start, stop,
batch_id)), 20)
# We have to copy the original context here so we have full access
# to all variables that were originally made available in the Template
# with RequestContext and friends. If we do not do this then the `user_api`
# is not available for the tags inside this inclusion tag.
tag_context = copy(context)
tag_context.update({
'batch_id': batch_id,
'conversation': conversation,
'inbound_message_paginator': inbound_message_paginator,
'outbound_message_paginator': outbound_message_paginator,
'inbound_uniques_count': conversation.count_inbound_uniques(),
'outbound_uniques_count': conversation.count_outbound_uniques(),
'message_direction': direction,
})
# If we're doing a query we can shortcut the results as we don't
# need all the message paginator stuff since we're loading the results
# asynchronously with JavaScript.
client = ms_client.Client(settings.MESSAGE_STORE_API_URL)
if query and not token:
token = client.match(batch_id, direction, [{
'key': 'msg.content',
'pattern': re.escape(query),
'flags': 'i',
}])
tag_context.update({
'query': query,
'token': token,
})
return tag_context
elif query and token:
match_result = ms_client.MatchResult(client, batch_id, direction,
token, page=int(page),
page_size=20)
message_paginator = match_result.paginator
tag_context.update({
'token': token,
'query': query,
})
elif direction == 'inbound':
message_paginator = inbound_message_paginator
else:
message_paginator = outbound_message_paginator
try:
message_page = message_paginator.page(page)
except PageNotAnInteger:
message_page = message_paginator.page(1)
except EmptyPage:
message_page = message_paginator.page(message_paginator.num_pages)
tag_context.update({
'message_page': message_page,
'message_page_range': page_range_window(message_page, 5),
})
return tag_context
@register.assignment_tag
def get_contact_for_message(user_api, message, direction='inbound'):
# This is a temporary work around to deal with the hackiness that
# lives in `contact_for_addr()`. It used to expect to be passed a
# `conversation.delivery_class` and this emulates that.
# It falls back to the raw `transport_type` so that errors in
# retrieving a contact return something useful for debugging (i.e.
# the `transport_type` that failed to be looked up).
delivery_class = user_api.delivery_class_for_msg(message)
user = message.user() if direction == 'inbound' else message['to_addr']
return user_api.contact_store.contact_for_addr(
delivery_class, unicode(user), create=True)
@register.assignment_tag
def get_reply_form_for_message(message):
form = ReplyToMessageForm(initial={
'to_addr': message['from_addr'],
'in_reply_to': message['message_id'],
})
form.fields['to_addr'].widget.attrs['readonly'] = True
return form
@register.filter
@stringfilter
def scrub_tokens(value):
site = Site.objects.get_current()
pattern = r'://%s/t/(\w+)/?' % (re.escape(site.domain),)
replacement = '://%s/t/******/' % (site.domain,)
return re.sub(pattern, replacement, value)
|
Python
| 0
|
@@ -824,125 +824,268 @@
-except AttributeError:%0A return '/conversations/%25s/' %25 (conv.key,)%0A return view_def.get_view_url(view_name,
+ return view_def.get_view_url(view_name, conversation_key=conv.key)%0A except AttributeError: # If there's not view definition%0A return '/conversations/%25s/' %25 (conv.key,)%0A except KeyError: # if the given view_name doesn't exist%0A return '/
conv
@@ -1092,29 +1092,34 @@
ersation
-_key=
+s/%25s/' %25 (
conv.key
)%0A%0A%0A@reg
@@ -1110,16 +1110,17 @@
conv.key
+,
)%0A%0A%0A@reg
|
0d38b9592fbb63e25b080d2f17b690c478042455
|
Add comments to Perfect Game solution
|
google-code-jam-2012/perfect-game/perfect-game.py
|
google-code-jam-2012/perfect-game/perfect-game.py
|
#!/usr/bin/env python
import sys
if len(sys.argv) < 2:
sys.exit('Usage: %s file.in' % sys.argv[0])
file = open(sys.argv[1], 'r')
T = int(file.readline())
for i in xrange(1, T+1):
N = int(file.readline())
L = map(int, file.readline().split(' '))
P = map(int, file.readline().split(' '))
assert N == len(L)
assert N == len(P)
levels = zip(L, P, range(N))
levels.sort(lambda li, pi: li[0] * pi[1] - li[1] * pi[0])
print "Case #%d:" % i, ' '.join([str(i) for li, pi, i in levels])
file.close()
|
Python
| 0
|
@@ -16,16 +16,252 @@
python%0A%0A
+# expected time per attempt is given by equation%0A# time = L%5B0%5D + (1-P%5B0%5D)*L%5B1%5D + (1-P%5B0%5D)*(1-P%5B1%5D)*L%5B2%5D + ...%0A# where L is the expected time and P is the probability of failure, per level%0A# swap two levels if L%5Bi%5D*P%5Bi+1%5D %3E L%5Bi+1%5D*P%5Bi%5D%0A%0A
import s
|
7be6d6eba2ca0efd2e4c0a18068332e5c6f7dac4
|
Use generators for async_add_entities in Anthemav (#76587)
|
homeassistant/components/anthemav/media_player.py
|
homeassistant/components/anthemav/media_player.py
|
"""Support for Anthem Network Receivers and Processors."""
from __future__ import annotations
import logging
from anthemav.connection import Connection
from anthemav.protocol import AVR
import voluptuous as vol
from homeassistant.components.media_player import (
PLATFORM_SCHEMA,
MediaPlayerDeviceClass,
MediaPlayerEntity,
MediaPlayerEntityFeature,
)
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
from homeassistant.const import (
CONF_HOST,
CONF_MAC,
CONF_NAME,
CONF_PORT,
STATE_OFF,
STATE_ON,
)
from homeassistant.core import HomeAssistant, callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity import DeviceInfo
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from .const import (
ANTHEMAV_UDATE_SIGNAL,
CONF_MODEL,
DEFAULT_NAME,
DEFAULT_PORT,
DOMAIN,
MANUFACTURER,
)
_LOGGER = logging.getLogger(__name__)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
}
)
async def async_setup_platform(
hass: HomeAssistant,
config: ConfigType,
async_add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up our socket to the AVR."""
async_create_issue(
hass,
DOMAIN,
"deprecated_yaml",
breaks_in_ha_version="2022.10.0",
is_fixable=False,
severity=IssueSeverity.WARNING,
translation_key="deprecated_yaml",
)
_LOGGER.warning(
"Configuration of the Anthem A/V Receivers integration in YAML is "
"deprecated and will be removed in Home Assistant 2022.10; Your "
"existing configuration has been imported into the UI automatically "
"and can be safely removed from your configuration.yaml file"
)
await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data=config,
)
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up entry."""
name = config_entry.data[CONF_NAME]
mac_address = config_entry.data[CONF_MAC]
model = config_entry.data[CONF_MODEL]
avr: Connection = hass.data[DOMAIN][config_entry.entry_id]
entities = []
for zone_number in avr.protocol.zones:
_LOGGER.debug("Initializing Zone %s", zone_number)
entity = AnthemAVR(
avr.protocol, name, mac_address, model, zone_number, config_entry.entry_id
)
entities.append(entity)
_LOGGER.debug("Connection data dump: %s", avr.dump_conndata)
async_add_entities(entities)
class AnthemAVR(MediaPlayerEntity):
"""Entity reading values from Anthem AVR protocol."""
_attr_has_entity_name = True
_attr_should_poll = False
_attr_device_class = MediaPlayerDeviceClass.RECEIVER
_attr_icon = "mdi:audio-video"
_attr_supported_features = (
MediaPlayerEntityFeature.VOLUME_SET
| MediaPlayerEntityFeature.VOLUME_MUTE
| MediaPlayerEntityFeature.TURN_ON
| MediaPlayerEntityFeature.TURN_OFF
| MediaPlayerEntityFeature.SELECT_SOURCE
)
def __init__(
self,
avr: AVR,
name: str,
mac_address: str,
model: str,
zone_number: int,
entry_id: str,
) -> None:
"""Initialize entity with transport."""
super().__init__()
self.avr = avr
self._entry_id = entry_id
self._zone_number = zone_number
self._zone = avr.zones[zone_number]
if zone_number > 1:
self._attr_name = f"zone {zone_number}"
self._attr_unique_id = f"{mac_address}_{zone_number}"
else:
self._attr_unique_id = mac_address
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, mac_address)},
name=name,
manufacturer=MANUFACTURER,
model=model,
)
self.set_states()
async def async_added_to_hass(self) -> None:
"""When entity is added to hass."""
self.async_on_remove(
async_dispatcher_connect(
self.hass,
f"{ANTHEMAV_UDATE_SIGNAL}_{self._entry_id}",
self.update_states,
)
)
@callback
def update_states(self) -> None:
"""Update states for the current zone."""
self.set_states()
self.async_write_ha_state()
def set_states(self) -> None:
"""Set all the states from the device to the entity."""
self._attr_state = STATE_ON if self._zone.power is True else STATE_OFF
self._attr_is_volume_muted = self._zone.mute
self._attr_volume_level = self._zone.volume_as_percentage
self._attr_media_title = self._zone.input_name
self._attr_app_name = self._zone.input_format
self._attr_source = self._zone.input_name
self._attr_source_list = self.avr.input_list
async def async_select_source(self, source: str) -> None:
"""Change AVR to the designated source (by name)."""
self._zone.input_name = source
async def async_turn_off(self) -> None:
"""Turn AVR power off."""
self._zone.power = False
async def async_turn_on(self) -> None:
"""Turn AVR power on."""
self._zone.power = True
async def async_set_volume_level(self, volume: float) -> None:
"""Set AVR volume (0 to 1)."""
self._zone.volume_as_percentage = volume
async def async_mute_volume(self, mute: bool) -> None:
"""Engage AVR mute."""
self._zone.mute = mute
|
Python
| 0
|
@@ -2708,379 +2708,260 @@
-entities = %5B%5D%0A for zone_number in avr.protocol.zones:%0A _LOGGER.debug(%22Initializing Zone %25s%22, zone_number)%0A entity = AnthemAVR(%0A avr.protocol, name, mac_address, model, zone_number, config_entry.entry_id%0A )%0A entities.append(entity)%0A%0A _LOGGER.debug(%22Connection data dump: %25s%22, avr.dump_conndata)%0A%0A async_add_entities(entities
+_LOGGER.debug(%22Connection data dump: %25s%22, avr.dump_conndata)%0A%0A async_add_entities(%0A AnthemAVR(%0A avr.protocol, name, mac_address, model, zone_number, config_entry.entry_id%0A )%0A for zone_number in avr.protocol.zones%0A
)%0A%0A%0A
|
9f9e2db5105eab1f46590a6b8d6a5b5eff4ccb51
|
Use new BinarySensorDeviceClass enum in egardia (#61378)
|
homeassistant/components/egardia/binary_sensor.py
|
homeassistant/components/egardia/binary_sensor.py
|
"""Interfaces with Egardia/Woonveilig alarm control panel."""
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_MOTION,
DEVICE_CLASS_OPENING,
BinarySensorEntity,
)
from homeassistant.const import STATE_OFF, STATE_ON
from . import ATTR_DISCOVER_DEVICES, EGARDIA_DEVICE
EGARDIA_TYPE_TO_DEVICE_CLASS = {
"IR Sensor": DEVICE_CLASS_MOTION,
"Door Contact": DEVICE_CLASS_OPENING,
"IR": DEVICE_CLASS_MOTION,
}
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Initialize the platform."""
if discovery_info is None or discovery_info[ATTR_DISCOVER_DEVICES] is None:
return
disc_info = discovery_info[ATTR_DISCOVER_DEVICES]
async_add_entities(
(
EgardiaBinarySensor(
sensor_id=disc_info[sensor]["id"],
name=disc_info[sensor]["name"],
egardia_system=hass.data[EGARDIA_DEVICE],
device_class=EGARDIA_TYPE_TO_DEVICE_CLASS.get(
disc_info[sensor]["type"], None
),
)
for sensor in disc_info
),
True,
)
class EgardiaBinarySensor(BinarySensorEntity):
"""Represents a sensor based on an Egardia sensor (IR, Door Contact)."""
def __init__(self, sensor_id, name, egardia_system, device_class):
"""Initialize the sensor device."""
self._id = sensor_id
self._name = name
self._state = None
self._device_class = device_class
self._egardia_system = egardia_system
def update(self):
"""Update the status."""
egardia_input = self._egardia_system.getsensorstate(self._id)
self._state = STATE_ON if egardia_input else STATE_OFF
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def is_on(self):
"""Whether the device is switched on."""
return self._state == STATE_ON
@property
def device_class(self):
"""Return the device class."""
return self._device_class
|
Python
| 0
|
@@ -116,53 +116,31 @@
-DEVICE_CLASS_MOTION,%0A DEVICE_CLASS_OPENING
+BinarySensorDeviceClass
,%0A
@@ -319,29 +319,40 @@
ensor%22:
-DEVICE_CLASS_
+BinarySensorDeviceClass.
MOTION,%0A
@@ -375,53 +375,75 @@
t%22:
-DEVICE_CLASS_OPENING,%0A %22IR%22: DEVICE_CLASS_
+BinarySensorDeviceClass.OPENING,%0A %22IR%22: BinarySensorDeviceClass.
MOTI
|
c3a1193ef9b01529e9f92495fb1c3ae58d822c89
|
Fix #863 - Added a check for good pull from OWM and return if the object is None
|
homeassistant/components/sensor/openweathermap.py
|
homeassistant/components/sensor/openweathermap.py
|
"""
homeassistant.components.sensor.openweathermap
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
OpenWeatherMap (OWM) service.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/sensor.openweathermap/
"""
import logging
from datetime import timedelta
from homeassistant.util import Throttle
from homeassistant.const import (CONF_API_KEY, TEMP_CELCIUS, TEMP_FAHRENHEIT)
from homeassistant.helpers.entity import Entity
REQUIREMENTS = ['pyowm==2.3.0']
_LOGGER = logging.getLogger(__name__)
SENSOR_TYPES = {
'weather': ['Condition', None],
'temperature': ['Temperature', None],
'wind_speed': ['Wind speed', 'm/s'],
'humidity': ['Humidity', '%'],
'pressure': ['Pressure', 'mbar'],
'clouds': ['Cloud coverage', '%'],
'rain': ['Rain', 'mm'],
'snow': ['Snow', 'mm']
}
# Return cached results if last scan was less then this time ago
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=120)
def setup_platform(hass, config, add_devices, discovery_info=None):
""" Get the OpenWeatherMap sensor. """
if None in (hass.config.latitude, hass.config.longitude):
_LOGGER.error("Latitude or longitude not set in Home Assistant config")
return False
try:
from pyowm import OWM
except ImportError:
_LOGGER.exception(
"Unable to import pyowm. "
"Did you maybe not install the 'PyOWM' package?")
return False
SENSOR_TYPES['temperature'][1] = hass.config.temperature_unit
unit = hass.config.temperature_unit
forecast = config.get('forecast', 0)
owm = OWM(config.get(CONF_API_KEY, None))
if not owm:
_LOGGER.error(
"Connection error "
"Please check your settings for OpenWeatherMap.")
return False
data = WeatherData(owm, forecast, hass.config.latitude,
hass.config.longitude)
dev = []
try:
for variable in config['monitored_conditions']:
if variable not in SENSOR_TYPES:
_LOGGER.error('Sensor type: "%s" does not exist', variable)
else:
dev.append(OpenWeatherMapSensor(data, variable, unit))
except KeyError:
pass
if forecast == 1:
SENSOR_TYPES['forecast'] = ['Forecast', None]
dev.append(OpenWeatherMapSensor(data, 'forecast', unit))
add_devices(dev)
# pylint: disable=too-few-public-methods
class OpenWeatherMapSensor(Entity):
""" Implements an OpenWeatherMap sensor. """
def __init__(self, weather_data, sensor_type, temp_unit):
self.client_name = 'Weather'
self._name = SENSOR_TYPES[sensor_type][0]
self.owa_client = weather_data
self.temp_unit = temp_unit
self.type = sensor_type
self._state = None
self._unit_of_measurement = SENSOR_TYPES[sensor_type][1]
self.update()
@property
def name(self):
return '{} {}'.format(self.client_name, self._name)
@property
def state(self):
""" Returns the state of the device. """
return self._state
@property
def unit_of_measurement(self):
""" Unit of measurement of this entity, if any. """
return self._unit_of_measurement
# pylint: disable=too-many-branches
def update(self):
""" Gets the latest data from OWM and updates the states. """
self.owa_client.update()
data = self.owa_client.data
fc_data = self.owa_client.fc_data
if self.type == 'weather':
self._state = data.get_detailed_status()
elif self.type == 'temperature':
if self.temp_unit == TEMP_CELCIUS:
self._state = round(data.get_temperature('celsius')['temp'],
1)
elif self.temp_unit == TEMP_FAHRENHEIT:
self._state = round(data.get_temperature('fahrenheit')['temp'],
1)
else:
self._state = round(data.get_temperature()['temp'], 1)
elif self.type == 'wind_speed':
self._state = data.get_wind()['speed']
elif self.type == 'humidity':
self._state = data.get_humidity()
elif self.type == 'pressure':
self._state = round(data.get_pressure()['press'], 0)
elif self.type == 'clouds':
self._state = data.get_clouds()
elif self.type == 'rain':
if data.get_rain():
self._state = round(data.get_rain()['3h'], 0)
self._unit_of_measurement = 'mm'
else:
self._state = 'not raining'
self._unit_of_measurement = ''
elif self.type == 'snow':
if data.get_snow():
self._state = round(data.get_snow(), 0)
self._unit_of_measurement = 'mm'
else:
self._state = 'not snowing'
self._unit_of_measurement = ''
elif self.type == 'forecast':
self._state = fc_data.get_weathers()[0].get_status()
class WeatherData(object):
""" Gets the latest data from OpenWeatherMap. """
def __init__(self, owm, forecast, latitude, longitude):
self.owm = owm
self.forecast = forecast
self.latitude = latitude
self.longitude = longitude
self.data = None
self.fc_data = None
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
""" Gets the latest data from OpenWeatherMap. """
obs = self.owm.weather_at_coords(self.latitude, self.longitude)
self.data = obs.get_weather()
if self.forecast == 1:
obs = self.owm.three_hours_forecast_at_coords(self.latitude,
self.longitude)
self.fc_data = obs.get_forecast()
|
Python
| 0.000006
|
@@ -5587,32 +5587,137 @@
self.longitude)%0A
+ if obs is None:%0A _LOGGER.warning('Failed to fetch data from OWM')%0A return%0A%0A
self.dat
|
568828287f426bef598c11267b6ee351751671fe
|
add conf parameters to XPathFetchPage module
|
feedin/modules/xpathfetchpage.py
|
feedin/modules/xpathfetchpage.py
|
from module import Module
import urllib2
import urlparse
from StringIO import StringIO
import gzip
from lxml import html
from lxml import etree
from feedin import util
from feedin.dotdict2 import DotDict2
from module import ModuleBuilder
class XPathFetchPage(Module):
EXTRACT_TYPE_DICT = 'dict'
EXTRACT_TYPE_TEXT = 'text'
EXTRACT_TYPE_HTML = 'html'
CHARSETS = ['utf8', 'gb2312', 'GB18030']
'''
classdocs
'''
def __init__(self, setting, context=None):
super(XPathFetchPage, self).__init__(setting, context)
self.URL = setting['conf']['URL']
self.ExtractXPath = setting['conf']['xpath']['value']
self.ExtractMethod = setting['conf']['ExtractMethod'] if 'ExtractMethod' in setting['conf'] else XPathFetchPage.EXTRACT_TYPE_DICT
def execute(self, context=None):
url = self.URL
if 'subkey' in self.URL: # a subkey assigned
key = self.URL['subkey'].lstrip('item.')
url = context.items[0][key]
else:
url = self.URL['value']
http_proxy = context.http_proxy
if http_proxy:
proxy_handler = urllib2.ProxyHandler({'http': http_proxy})
opener = urllib2.build_opener(proxy_handler)
else:
opener = urllib2.build_opener()
response = opener.open(url)
content = response.read()
opener.close()
if response.info().get('Content-Encoding') == 'gzip':
buf = StringIO(content)
f = gzip.GzipFile(fileobj=buf)
content = f.read()
decoding_error = None # last decoding error, will be raised if cannot decode the content
decoded_content = None # decoded
for charset in XPathFetchPage.CHARSETS:
try:
decoded_content = unicode(content, charset)
except UnicodeDecodeError as e:
decoding_error = e
if not decoded_content and decoding_error:
raise decoding_error
root = html.fromstring(decoded_content)
#root = doc.getroot()
context.last_result = []
for element in root.xpath(self.ExtractXPath):
for link_element in element.iter("a"):
link_element.set("href", urlparse.urljoin(url, link_element.get("href")))
if self.ExtractMethod == XPathFetchPage.EXTRACT_TYPE_TEXT:
new_item = etree.tostring(element, method='text', encoding=unicode)
elif self.ExtractMethod == XPathFetchPage.EXTRACT_TYPE_HTML:
new_item = etree.tostring(element, method='html', encoding=unicode)
else:
element_dic = util.etree_to_dict2(element)
new_item = DotDict2(element_dic)
context.items.append(new_item)
context.last_result.append(new_item)
class XPathFetchPageBuilder(ModuleBuilder):
def build(self, module_config, context=None):
return XPathFetchPage(module_config, context)
|
Python
| 0
|
@@ -638,32 +638,261 @@
path'%5D%5B'value'%5D%0A
+ self.html5 = setting%5B'conf'%5D%5B'html'%5D%5B'value'%5D == 'true' if 'html5' in setting%5B'conf'%5D else False%0A self.useAsString = setting%5B'conf'%5D%5B'useAsString'%5D%5B'value'%5D == 'true' if 'useAsString' in setting%5B'conf'%5D else False%0A
self.Ext
|
a4ee7fa5b77b4513ceddcfb0e9be958442d3c792
|
Use lambdas for api commands
|
IKEA.py
|
IKEA.py
|
import json
import uuid
from pytradfri import Gateway
from pytradfri.api.libcoap_api import APIFactory
from time import sleep
import numpy
CONFIG_FILE = "tradfri_psk.conf"
class RGB(numpy.ndarray):
@classmethod
def from_str(cls, hex):
return numpy.array([int(hex[i:i+2], 16) for i in (0, 2, 4)]).view(cls)
def __str__(self):
self = self.astype(numpy.uint8)
return ''.join(format(n, 'x') for n in self)
class TradfriHandler:
def __init__(self, gateway_hostname, key):
conf = self.load_psk(CONFIG_FILE)
try:
identity = conf[gateway_hostname].get("identity")
psk = conf[gateway_hostname].get("key")
api_factory = APIFactory(host=gateway_hostname, psk_id=identity, psk=psk)
except KeyError:
identity = uuid.uuid4().hex
api_factory = APIFactory(host=gateway_hostname, psk_id=identity)
psk = api_factory.generate_psk(key)
conf[gateway_hostname] = {"identity": identity, "key": psk}
self.save_psk(CONFIG_FILE, conf)
self.api = api_factory.request
self.gateway = Gateway()
@staticmethod
def load_psk(filename):
try:
with open(filename, encoding="utf-8") as fdesc:
return json.loads(fdesc.read())
except FileNotFoundError:
return {}
@staticmethod
def save_psk(filename, config):
data = json.dumps(config, sort_keys=True, indent=4)
with open(filename, "w", encoding="utf-8") as fdesc:
fdesc.write(data)
@staticmethod
def average_hex_color(colors):
if len(colors) == 1:
return colors[0]
rgb_colors = [RGB.from_str(hex) for hex in colors]
return (numpy.sum(rgb_colors, axis=0) // len(rgb_colors)).view(RGB)
def export_group(self, group):
# These properties exists on the group as well, but they are incorrect for some reason
hex_colors, states = zip(*map(lambda light: (light.light_control.lights[0].hex_color,
light.light_control.lights[0].state),
filter(lambda device: device.has_light_control,
self.api(group.members()))
))
return {
"name": group.name,
"id": group.id,
"state": any(states),
"dimmer": group.dimmer,
"color": '#' + str(self.average_hex_color(list(hex_colors)))
}
def export_groups(self):
return list(map(self.export_group, self.get_groups()))
def get_groups(self):
devices_commands = self.api(self.gateway.get_groups())
return self.api(devices_commands)
def get_group(self, group_id):
return self.api(self.gateway.get_group(group_id))
def set_state(self, group_id, new_state):
light_group = self.get_group(group_id)
if not light_group:
return False
self.api(light_group.set_state(new_state))
return True
def set_dimmer(self, group_id, value):
light_group = self.get_group(group_id)
if not light_group:
return False
self.api(light_group.set_dimmer(value, transition_time=1))
return True
def set_hex_color(self, group_id, value):
light_group = self.get_group(group_id)
if not light_group:
return False
self.api(light_group.set_hex_color(value, transition_time=1))
return True
|
Python
| 0.000001
|
@@ -2913,353 +2913,455 @@
-light_group = self.get_group(group_id)%0A if not light_group:%0A return False%0A self.api(light_group.set_state(new_state))%0A return True%0A%0A def set_dimmer(self, group_id, value):%0A light_group = self.get_group(group_id)%0A if not light_group:%0A return False%0A self.api(light_group.set_dimme
+return self.run_api_command_for_group(lambda lg: lg.set_state(new_state),%0A group_id)%0A%0A def set_dimmer(self, group_id, value):%0A return self.run_api_command_for_group(lambda lg: lg.set_dimmer(value, transition_time=1),%0A group_id)%0A%0A def set_hex_color(self, group_id, value):%0A return self.run_api_command_for_group(lambda lg: lg.set_hex_colo
r(va
@@ -3375,33 +3375,33 @@
ansition_time=1)
-)
+,
%0A return
@@ -3397,47 +3397,113 @@
-return True%0A%0A def set_hex_color(self
+ group_id)%0A%0A def run_api_command_for_group(self, command_function
, gr
@@ -3500,39 +3500,32 @@
nction, group_id
-, value
):%0A light
@@ -3632,58 +3632,36 @@
api(
-light_group.set_hex_color(value, transition_time=1
+command_function(light_group
))%0A
@@ -3678,9 +3678,8 @@
urn True
-%0A
|
ead5daf0e631a3482a8510abc36f48b227e862ee
|
Delete unused variable assignations.
|
game.py
|
game.py
|
# -*- coding: utf-8 -*-
import functions.commands as command
import functions.database as db
prompt = ">>> "
view = {
'0.0' : "Tutorial. You see a rat attacking you, fight!",
'1.0' : "You stand in a start of dungeon. You see a torch."
}
position = '1.0'
f = open('ASCII/otsikko_unicode.asc', 'r')
print(f.read())
f.close()
while True:
'''
You can end loop by selecting 5 in main context or write
"quit" in game context.
'''
context = command.doMenu()
while context == "main":
prompt = "(main) >>> "
try:
c = int(input(prompt))
context = command.doMenu(c)
except ValueError as e:
print(e)
while context == "game":
position = db.getPosition()
prompt = "(game) >>> "
print("--\n{}".format(position))
c = input(prompt).lower().split()
if (command.isValid(c)):
context = command.execute(c)
else:
print('Invalid command. '
'Write "help" to get list of available commands.'
)
|
Python
| 0
|
@@ -92,179 +92,8 @@
db%0A%0A
-prompt = %22%3E%3E%3E %22%0Aview = %7B%0A '0.0' : %22Tutorial. You see a rat attacking you, fight!%22,%0A '1.0' : %22You stand in a start of dungeon. You see a torch.%22%0A%7D%0A%0Aposition = '1.0'%0A%0A
f =
|
760bc99c22b6ac66cdd240b29720d0bbfccc4920
|
Define waf_tools for each class instance separately
|
glue.py
|
glue.py
|
"""
Glue code between nsloaders and Mybuild bindings for py/my DSL files.
"""
__author__ = "Eldar Abusalimov"
__date__ = "2013-08-07"
from _compat import *
from nsloader import myfile
from nsloader import pyfile
import mybuild
from mybuild.binding import pydsl
from util.operator import attr
from util.namespace import Namespace
class LoaderMixin(object):
dsl = None
@property
def defaults(self):
return dict(super(LoaderMixin, self).defaults,
module = self.dsl.module,
application = self.dsl.application,
library = self.dsl.library,
project = self.dsl.project,
option = self.dsl.option,
tool = tool,
MYBUILD_VERSION=mybuild.__version__)
class WafBasedTool(mybuild.core.Tool):
waf_tools = []
def options(self, module, ctx):
ctx.load(self.waf_tools)
def configure(self, module, ctx):
ctx.load(self.waf_tools)
class CcTool(WafBasedTool):
waf_tools = ['compiler_c']
def __init__(self):
super(CcTool, self).__init__()
self.build_kwargs = {}
def create_namespaces(self, module):
return dict(cc=Namespace(defines=Namespace()))
def define(self, key, val):
assert('defines' in self.build_kwargs)
format_str = '{0}=\"{1}\"' if isinstance(val, str) else '{0}={1}'
self.build_kwargs['defines'].append(format_str.format(key, val))
def build(self, module, ctx):
self.build_kwargs['use'] = [m._name for m in module.depends]
self.build_kwargs['source'] = module.files
self.build_kwargs['target'] = module._name
self.build_kwargs['defines'] = []
for k, v in iteritems(module.cc.defines.__dict__):
self.define(k, v)
class CcObjTool(CcTool):
def build(self, module, ctx):
super(CcObjTool, self).build(module, ctx)
ctx.objects(**self.build_kwargs)
class CcAppTool(CcTool):
def build(self, module, ctx):
super(CcAppTool, self).build(module, ctx)
ctx.program(**self.build_kwargs)
class CcLibTool(CcTool):
def build(self, module, ctx):
super(CcLibTool, self).build(module, ctx)
if module.isstatic:
ctx.stlib(**self.build_kwargs)
else:
ctx.shlib(**self.build_kwargs)
tool = Namespace(cc=CcObjTool(), cc_app=CcAppTool(), cc_lib=CcLibTool())
class MyDslLoader(LoaderMixin, myfile.MyFileLoader):
FILENAME = 'Mybuild'
class CcModule(mybuild.core.Module):
tools = [tool.cc]
class ApplicationCcModule(mybuild.core.Module):
tools = [tool.cc_app]
class LibCcModule(mybuild.core.Module):
tools = [tool.cc_lib]
isstatic = True
dsl = Namespace()
dsl.module = CcModule._meta_for_base(option_types=[])
dsl.application = ApplicationCcModule._meta_for_base(option_types=[])
dsl.library = LibCcModule._meta_for_base(option_types=[])
dsl.option = mybuild.core.Optype
dsl.project = None
class PyDslLoader(LoaderMixin, pyfile.PyFileLoader):
FILENAME = 'Pybuild'
dsl = pydsl
|
Python
| 0
|
@@ -869,32 +869,110 @@
core.Tool):%0A
+def __init__(self):%0A super(WafBasedTool, self).__init__()%0A self.
waf_tools = %5B%5D%0A%0A
@@ -1040,16 +1040,17 @@
_tools)%0A
+%0A
def
@@ -1146,40 +1146,8 @@
l):%0A
- waf_tools = %5B'compiler_c'%5D%0A%0A
@@ -1197,32 +1197,76 @@
elf).__init__()%0A
+ self.waf_tools.append('compiler_c')%0A
self.bui
|
4e2e4a841d4cdb3efa151601fe5200b0720ffe75
|
update set
|
inference/story.py
|
inference/story.py
|
# -*- coding: utf-8 -*-
# file: song_lyrics.py
# author: JinTian
# time: 08/03/2017 10:22 PM
# Copyright 2017 JinTian. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ------------------------------------------------------------------------
import collections
import os
import sys
import numpy as np
import tensorflow as tf
from utils.model import rnn_model
from utils.process import process, generate_batch
import time
tf.app.flags.DEFINE_integer('batch_size', 6, 'batch size.')
tf.app.flags.DEFINE_float('learning_rate', 0.01, 'learning rate.')
tf.app.flags.DEFINE_string('file_path', os.path.abspath('./dataset/story.txt'), 'file path of story.')
tf.app.flags.DEFINE_string('checkpoints_dir', os.path.abspath('./checkpoints'), 'checkpoints save path.')
tf.app.flags.DEFINE_string('model_prefix', 'story', 'model save prefix.')
tf.app.flags.DEFINE_string('output_path', os.path.abspath('./output/story.txt'), 'file path of output.')
tf.app.flags.DEFINE_integer('epochs', 1000, 'train how many epochs.')
FLAGS = tf.app.flags.FLAGS
start_token = 'S'
end_token = 'E'
def train():
if not os.path.exists(os.path.dirname(FLAGS.checkpoints_dir)):
os.mkdir(os.path.dirname(FLAGS.checkpoints_dir))
if not os.path.exists(FLAGS.checkpoints_dir):
os.mkdir(FLAGS.checkpoints_dir)
story_vector, word_to_int, vocabularies = process(FLAGS.file_path)
batches_inputs, batches_outputs = generate_batch(FLAGS.batch_size, story_vector, word_to_int)
input_data = tf.placeholder(tf.int32, [FLAGS.batch_size, None])
output_targets = tf.placeholder(tf.int32, [FLAGS.batch_size, None])
end_points = rnn_model(model='lstm', input_data=input_data, output_data=output_targets, vocab_size=len(
vocabularies), rnn_size=128, num_layers=2, batch_size=FLAGS.batch_size, learning_rate=FLAGS.learning_rate)
saver = tf.train.Saver(tf.global_variables())
init_op = tf.group(tf.global_variables_initializer(), tf.local_variables_initializer())
with tf.Session() as sess:
# sess = tf_debug.LocalCLIDebugWrapperSession(sess=sess)
# sess.add_tensor_filter("has_inf_or_nan", tf_debug.has_inf_or_nan)
sess.run(init_op)
start_epoch = 0
checkpoint = tf.train.latest_checkpoint(FLAGS.checkpoints_dir)
if checkpoint:
saver.restore(sess, checkpoint)
print("[INFO] restore from the checkpoint {0}".format(checkpoint))
start_epoch += int(checkpoint.split('-')[-1])
print('[INFO] start training...')
try:
for epoch in range(start_epoch, FLAGS.epochs):
print("[INFO]--------- Epoch: %d --------" % (epoch))
n = 0
n_chunk = len(story_vector) // FLAGS.batch_size
for batch in range(n_chunk):
start_at = time.time()
loss, _, _ = sess.run([
end_points['total_loss'],
end_points['last_state'],
end_points['train_op']
], feed_dict={input_data: batches_inputs[n], output_targets: batches_outputs[n]})
n += 1
end_at = time.time()
print('[INFO] batch: %d , time: %fs, training loss: %.6f' % (batch, end_at - start_at, loss))
if epoch % 20 == 0:
saver.save(sess, os.path.join(FLAGS.checkpoints_dir, FLAGS.model_prefix), global_step=epoch)
except KeyboardInterrupt:
print('[INFO] Interrupt manually, try saving checkpoint for now...')
saver.save(sess, os.path.join(FLAGS.checkpoints_dir, FLAGS.model_prefix), global_step=epoch)
print('[INFO] Last epoch were saved, next time will start from epoch {}.'.format(epoch))
def to_word(predict, vocabs):
t = np.cumsum(predict)
s = np.sum(predict)
sample = int(np.searchsorted(t, np.random.rand(1) * s))
if sample > len(vocabs)-1:
sample = len(vocabs) - 100
return vocabs[sample]
def write():
batch_size = 1
story_vector, word_int_map, vocabularies = process(FLAGS.file_path)
input_data = tf.placeholder(tf.int32, [batch_size, None])
end_points = rnn_model(model='lstm', input_data=input_data, output_data=None, vocab_size=len(
vocabularies), rnn_size=128, num_layers=2, batch_size=64, learning_rate=FLAGS.learning_rate)
saver = tf.train.Saver(tf.global_variables())
init_op = tf.group(tf.global_variables_initializer(), tf.local_variables_initializer())
with tf.Session() as sess:
sess.run(init_op)
checkpoint = tf.train.latest_checkpoint(FLAGS.checkpoints_dir)
saver.restore(sess, checkpoint)
x = np.array([list(map(word_int_map.get, start_token))])
[predict, last_state] = sess.run([end_points['prediction'], end_points['last_state']],
feed_dict={input_data: x})
word = to_word(predict, vocabularies)
print(word)
story = ''
while word != end_token:
story += word
x = np.zeros((1, 1))
x[0, 0] = word_int_map[word]
[predict, last_state] = sess.run([end_points['prediction'], end_points['last_state']],
feed_dict={input_data: x, end_points['initial_state']: last_state})
word = to_word(predict, vocabularies)
# word = words[np.argmax(probs_)]
return story
def main(is_train):
if is_train:
print('[INFO] train story...')
train()
else:
print('[INFO] compose story...')
story = write()
with open(FLAGS.output_path, 'w') as f:
f.write(story)
print('[Info] process done.')
if __name__ == '__main__':
tf.app.run()
|
Python
| 0.000001
|
@@ -977,17 +977,18 @@
_size',
-6
+10
, 'batch
@@ -1491,18 +1491,17 @@
pochs',
-10
+3
00, 'tra
|
5830843f88fc87e8c31f2983413acc16aa0c0711
|
remove typo
|
dv_apps/dataverse_auth/models.py
|
dv_apps/dataverse_auth/models.py
|
from django.db import models
from datetime import datetime
from django.utils.encoding import python_2_unicode_compatible
class AuthenticatedUser(models.Model):
useridentifier = models.CharField(unique=True, max_length=255)
affiliation = models.CharField(max_length=255, blank=True, null=True)
email = models.CharField(unique=True, max_length=255)
firstname = models.CharField(max_length=255, blank=True, null=True)
lastname = models.CharField(max_length=255, blank=True, null=True)
createdtime = models.DateTimeField(default=datetime.now)
lastlogintime = models.DateTimeField(blank=True, null=True)
lastapiusetime = = models.DateTimeField(blank=True, null=True)
position = models.CharField(max_length=255, blank=True, null=True)
superuser = models.NullBooleanField()
def is_superuser(self):
if not self.superuser:
return False
if self.superuser is True:
return True
return False
def __str__(self):
if self.lastname and self.firstname:
return '%s (%s, %s)' % (self.useridentifier, self.lastname, self.firstname)
elif self.lastname:
return '%s (%s)' % (self.useridentifier, self.lastname)
else:
return self.useridentifier
class Meta:
ordering = ('useridentifier',)
managed = False
db_table = 'authenticateduser'
class ApiToken(models.Model):
authenticateduser = models.ForeignKey('Authenticateduser')
tokenstring = models.CharField(unique=True, max_length=255)
disabled = models.BooleanField()
expiretime = models.DateTimeField()
createtime = models.DateTimeField()
authenticateduser = models.ForeignKey('Authenticateduser')
def __str__(self):
return '%s - %s' % (self.authenticateduser, self.tokenstring)
def is_expired(self):
now = datetime.now()
if now > self.expiretime:
#self.disabled = True
#self.save()
return True
return False
class Meta:
ordering = ('-expiretime', 'authenticateduser')
managed = False
db_table = 'apitoken'
@python_2_unicode_compatible
class BuiltInUser(models.Model):
affiliation = models.CharField(max_length=255, blank=True, null=True)
email = models.CharField(unique=True, max_length=255)
encryptedpassword = models.CharField(max_length=255, blank=True, null=True)
firstname = models.CharField(max_length=255, blank=True, null=True)
lastname = models.CharField(max_length=255, blank=True, null=True)
passwordencryptionversion = models.IntegerField(blank=True, null=True)
position = models.CharField(max_length=255, blank=True, null=True)
username = models.CharField(unique=True, max_length=255)
def __str__(self):
return '%s' % self.username
class Meta:
managed = False
db_table = 'builtinuser'
|
Python
| 0.999999
|
@@ -642,18 +642,16 @@
setime =
- =
models.
|
0dead7fdb0d28ee29c18884ec4c68a197eca6461
|
Update tests.py
|
inspect_model/tests.py
|
inspect_model/tests.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.db import models
from django.test import TestCase
from django.contrib.contenttypes.models import ContentType
from inspect_model import InspectModel
class OtherModel(models.Model):
name = models.CharField(max_length=10, blank=True)
class LinkedModel(models.Model):
name = models.CharField(max_length=10, blank=True)
toinspect = models.OneToOneField('ModelToInspect', blank=True, null=True)
class ModelToInspect(models.Model):
# "standard" fields
#id = models.AutoField(primary_key=True)
bigint = models.BigIntegerField(blank=True, null=True)
boolean = models.BooleanField(default=True)
char = models.CharField(max_length=10, blank=True)
comma = models.CommaSeparatedIntegerField(max_length=10, blank=True)
date = models.DateField(blank=True, null=True)
datetime = models.DateTimeField(blank=True, null=True)
decimal = models.DecimalField(max_digits=10, decimal_places=2, null=True)
email = models.EmailField(max_length=75, blank=True)
filefield = models.FileField(upload_to='media', max_length=100, blank=True)
filepath = models.FilePathField(path="/tmp", blank=True)
floatfield = models.FloatField(blank=True, null=True)
# do not test image, as it needs the PIL or Pillow dependency
#image = models.ImageField(upload_to='media', blank=True)
intfield = models.IntegerField(blank=True, null=True)
ipaddress = models.IPAddressField(blank=True, null=True)
nullboolean = models.NullBooleanField(blank=True, null=True)
positiveint = models.PositiveIntegerField(blank=True, null=True)
positivesmallint = models.PositiveSmallIntegerField(blank=True, null=True)
slug = models.SlugField(max_length=50, blank=True)
smallint = models.SmallIntegerField(blank=True, null=True)
text = models.TextField(blank=True)
time = models.TimeField(blank=True, null=True)
url = models.URLField(blank=True)
# relationship fields
foreign = models.ForeignKey(OtherModel, blank=True, null=True)
content_type = models.ForeignKey(ContentType)
genericforeign = generic.GenericForeignKey('content_type', 'positiveint')
many = models.ManyToManyField(OtherModel, related_name='many')
one = models.OneToOneField(
OtherModel,
related_name='one',
blank=True,
null=True)
# class attributes
attribute = 'foo'
_hidden = 'bar'
# class methods that can be called "as is"
def __unicode__(self): # implicit calling by printing
return 'model to inspect'
def method_one_arg(self):
return 'bar'
def method_args_with_defaults(self, foo='bar'):
return foo
# class methods that can't be called "as is"
def method_args(self, foo):
return 'bar'
def method_args_mixed(self, foo, bar='baz'):
return 'bar'
def _hidden_method(self):
return 'bar'
@property
def a_property(self):
return 'bar'
class ManyRelatedModel(models.Model):
name = models.CharField(max_length=10, blank=True)
many = models.ManyToManyField(ModelToInspect)
class ModelInspectTest(TestCase):
def setUp(self):
self.om = OtherModel.objects.create()
ctype = ContentType.objects.get_for_model(OtherModel)
self.mti = ModelToInspect.objects.create(foreign=self.om, one=self.om, content_type=ctype)
self.mti.many.add(self.om)
self.lm = LinkedModel.objects.create(toinspect=self.mti)
self.im = InspectModel(self.mti)
def test_fields(self):
# 21 fields + the automatically generated id field
self.assertEqual(len(self.im.fields), 22)
self.assertFalse('attribute' in self.im.fields)
self.assertFalse('_hidden' in self.im.fields)
def test_relation_fields(self):
# 2 'local' fields + a OneToOneField on LinkedModel
self.assertEqual(len(self.im.relation_fields), 5)
self.assertTrue('foreign' in self.im.relation_fields)
self.assertTrue('content_type' in self.im.relation_fields)
self.assertTrue('genericforeign' in self.im.relation_fields)
self.assertTrue('linkedmodel' in self.im.relation_fields)
self.assertTrue('one' in self.im.relation_fields)
self.assertFalse('many' in self.im.relation_fields)
def test_many_fields(self):
# 1 local + 1 on the ManyRelatedModel
self.assertEqual(len(self.im.many_fields), 2)
self.assertTrue('manyrelatedmodel_set' in self.im.many_fields)
self.assertTrue('many' in self.im.many_fields)
self.assertFalse('one' in self.im.many_fields)
def test_attributes(self):
self.assertEqual(len(self.im.attributes), 1)
def test_properties(self):
self.assertEqual(len(self.im.properties), 2)
def test_methods(self):
self.assertEqual(len(self.im.methods), 2)
self.assertFalse('method_args' in self.im.methods)
self.assertFalse('_hidden_method' in self.im.methods)
def test_items(self):
# make sure all the items are indeed part of a ModelToInspect instance
items = [getattr(self.mti, f) for f in self.im.items]
self.assertEqual(len(items), 32)
def test_multiple_calls(self):
"""Multiple calls to get_FOO"""
self.im.update_fields()
self.assertEqual(len(self.im.fields), 22)
self.assertEqual(len(self.im.relation_fields), 3)
self.assertEqual(len(self.im.many_fields), 2)
self.im.update_attributes()
self.assertEqual(len(self.im.attributes), 1)
self.im.update_methods()
self.assertEqual(len(self.im.methods), 2)
self.assertEqual(len(self.im.items), 32)
|
Python
| 0
|
@@ -160,16 +160,64 @@
tentType
+%0Afrom django.contrib.contenttypes import generic
%0A%0Afrom i
|
f38ecca955782374d2e2d5d740b394a150de8bf2
|
Remove old comment.
|
inspectors/commerce.py
|
inspectors/commerce.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import datetime
import logging
import os
from urllib.parse import urljoin, urlparse, urlunparse
from bs4 import BeautifulSoup
from utils import utils, inspector
# http://www.oig.doc.gov/Pages/Audits-Evaluations.aspx?YearStart=01/01/1996&YearEnd=12/31/2014
# Oldest report: 1996
# options:
# standard since/year options for a year range to fetch from.
#
# topics - limit reports fetched to one or more topics, comma-separated, which
# correspond to the topics defined on the site. For example:
# 'A,I'
# Defaults to all topics.
#
# A - Audits and Evaluations
# I - Investigations
# C - Correspondence
# AI - Audits Initiated
# T - Testimony
# Notes for IG's web team:
# - The report 'OAE-19846_Announcement'
TOPIC_TO_URL_SLUG = {
"A": 'Audits-Evaluations',
"I": 'Investigations',
"C": 'Correspondence',
"AI": 'Audits-Initiated',
"T": 'Testimony',
}
TOPIC_NAMES = {
"A": "Audits and Evaluations",
"I": "Investigations",
"C": "Correspondence",
"AI": "Audits Initiated",
"T": "Testimony",
}
BASE_TOPIC_URL = "http://www.oig.doc.gov/Pages/{}.aspx?YearStart=01/01/1996&YearEnd=12/31/2014"
BASE_REPORT_URL = "http://www.oig.doc.gov/"
LANDING_URLS_WITHOUT_REPORTS = [
"http://www.oig.doc.gov/Pages/Multimillion-Dollar-Judgment-in-NOAA-and-NIST-Fraud-Case.aspx",
"http://www.oig.doc.gov/Pages/NIST-Grantee-Pleads-Guilty-to-Misuse-of-Federal-Funds.aspx",
"http://www.oig.doc.gov/Pages/Former-Census-Contractor-Sentenced-for-Money-Laundering.aspx",
"http://www.oig.doc.gov/Pages/Commerce-Employee-Entered-into-Pretrial-Diversion-Program-for-Metrocheck-Fraud,-Removed-From-Federal-Service.aspx",
"http://www.oig.doc.gov/Pages/NOAA-Employee-Fired-for-Misuse-of-Purchase-Card.aspx",
"http://www.oig.doc.gov/Pages/NOAA-Grantee-Sentenced-for-Misusing-Funds.aspx",
"http://www.oig.doc.gov/Pages/Former-NIST-Employee-Sentenced-in-Steel-Theft-Scheme.aspx",
"http://www.oig.doc.gov/Pages/NIST-Grant-Recipient-Sentenced-for-Grant-Fraud;-Civil-Suit-Filed.aspx",
]
def run(options):
year_range = inspector.year_range(options)
topics = options.get('topics')
if topics:
topics = topics.split(",")
else:
topics = TOPIC_TO_URL_SLUG.keys()
for topic in topics:
extract_reports_for_topic(topic, year_range)
def extract_reports_for_topic(topic, year_range):
topic_url = BASE_TOPIC_URL.format(TOPIC_TO_URL_SLUG[topic])
topic_page = beautifulsoup_from_url(topic_url)
results = topic_page.select("div.row")
for result in results:
report = report_from(result, topic, topic_url, year_range)
if report:
inspector.save_report(report)
def report_from(result, topic, topic_url, year_range):
published_on_text = result.select("div.row-date")[0].text
published_on = datetime.datetime.strptime(published_on_text, '%m.%d.%Y')
topic_name = TOPIC_NAMES[topic]
title = result.select("div.row-title")[0].text
unreleased = False
if "not publically released" in title:
unreleased = True
if unreleased:
report_url = None
try:
report_id = title.split(":")[1].split("(")[0]
except IndexError:
# Some reports don't have report ids listed. Make a slug from the title and date
report_id = "{}-{}".format(published_on_text, "-".join(title.split()))[:50]
landing_url = topic_url # There are not dedicated landing pages for unreleased reports :(
else:
link = result.select("a")[0]
landing_url = link.get('href')
if landing_url in LANDING_URLS_WITHOUT_REPORTS:
report_url = landing_url
else:
landing_page = beautifulsoup_from_url(landing_url)
report_url_relative = landing_page.select("div.oig_Publications a")[-1].get('href')
report_url = urljoin(BASE_REPORT_URL, report_url_relative)
report_filename = report_url.split("/")[-1]
report_id, extension = os.path.splitext(report_filename)
if published_on.year not in year_range:
logging.debug("[%s] Skipping, not in requested range." % report_url)
return
file_type = None
# urllib.parse has trouble parsing the extension for some urls.
# Ex: http://www.oig.doc.gov/Pages/NIST-Grant-Recipient-Sentenced-for-Grant-Fraud;-Civil-Suit-Filed.aspx
if report_url.endswith(".aspx"):
file_type = "aspx"
result = {
'inspector': 'commerce',
'inspector_url': 'http://www.oig.doc.gov',
'agency': 'commerce',
'agency_name': 'Department of Commerce',
'report_id': report_id,
'topic': topic_name,
'url': report_url,
'title': title,
'published_on': datetime.datetime.strftime(published_on, "%Y-%m-%d"),
}
if landing_url:
result['landing_url'] = landing_url
if unreleased:
result['unreleased'] = unreleased
if file_type:
result['file_type'] = file_type
return result
def beautifulsoup_from_url(url):
body = utils.download(url)
return BeautifulSoup(body)
utils.run(run) if (__name__ == "__main__") else None
|
Python
| 0
|
@@ -822,46 +822,8 @@
m:%0A#
- - The report 'OAE-19846_Announcement'
%0A%0ATO
|
0fd3199f02f305fe0802305d8f4be0f0bed8b5cb
|
Remove unnecessary base class.
|
interface/interface.py
|
interface/interface.py
|
"""
interface
---------
"""
from functools import wraps
import inspect
from operator import itemgetter
from textwrap import dedent
from weakref import WeakKeyDictionary
first = itemgetter(0)
def compatible(meth_sig, iface_sig):
"""
Check if ``method``'s signature is compatible with ``signature``.
"""
# TODO: Allow method to provide defaults and optional extensions to
# ``signature``.
return meth_sig == iface_sig
def strict_issubclass(t, parent):
return issubclass(t, parent) and t is not parent
class InterfaceMeta(type):
"""
Metaclass for interfaces.
Supplies a ``_signatures`` attribute and a ``check_implementation`` method.
"""
def __new__(mcls, name, bases, clsdict):
signatures = {}
for k, v in clsdict.items():
try:
signatures[k] = inspect.signature(v)
except TypeError:
pass
clsdict['_signatures'] = signatures
return super().__new__(mcls, name, bases, clsdict)
def _diff_signatures(self, type_):
"""
Diff our method signatures against the methods provided by type_.
Parameters
----------
type_ : type
The type to check.
Returns
-------
missing, mismatched : list[str], dict[str -> signature]
``missing`` is a list of missing method names.
``mismatched`` is a dict mapping method names to incorrect
signatures.
"""
missing = []
mismatched = {}
for name, iface_sig in self._signatures.items():
try:
f = getattr(type_, name)
except AttributeError:
missing.append(name)
continue
f_sig = inspect.signature(f)
if not compatible(f_sig, iface_sig):
mismatched[name] = f_sig
return missing, mismatched
def check_conforms(self, type_):
"""
Check whether a type implements our interface.
Parameters
----------
type_ : type
The type to check.
Raises
------
TypeError
If ``type_`` doesn't conform to our interface.
Returns
-------
None
"""
missing, mismatched = self._diff_signatures(type_)
if not missing and not mismatched:
return
raise self._invalid_implementation(type_, missing, mismatched)
def _invalid_implementation(self, t, missing, mismatched):
"""
Make a TypeError explaining why ``t`` doesn't implement our interface.
"""
assert missing or mismatched, "Implementation wasn't invalid."
message = "\nclass {C} failed to implement interface {I}:".format(
C=t.__name__,
I=self.__name__,
)
if missing:
message += dedent(
"""
The following methods were not implemented:
{missing_methods}"""
).format(missing_methods=self._format_missing_methods(missing))
if mismatched:
message += (
"\n\nThe following methods were implemented but had invalid"
" signatures:\n"
"{mismatched_methods}"
).format(
mismatched_methods=self._format_mismatched_methods(mismatched),
)
return TypeError(message)
def _format_missing_methods(self, missing):
return "\n".join(sorted([
" - {name}{sig}".format(name=name, sig=self._signatures[name])
for name in missing
]))
def _format_mismatched_methods(self, mismatched):
return "\n".join(sorted([
" - {name}{actual} != {name}{expected}".format(
name=name,
actual=bad_sig,
expected=self._signatures[name],
)
for name, bad_sig in mismatched.items()
]))
class Interface(metaclass=InterfaceMeta):
"""
Base class for interface definitions.
"""
class Implements:
"""
Base class for an implementation of an interface.
"""
class ImplementsMeta(type):
"""
Metaclass for implementations of particular interfaces.
"""
def __new__(mcls, name, bases, clsdict, base=False):
newtype = super().__new__(mcls, name, bases, clsdict)
if base:
# Don't do checks on the types returned by ``implements``.
return newtype
for iface in newtype.interfaces():
iface.check_conforms(newtype)
return newtype
def __init__(mcls, name, bases, clsdict, base=False):
super().__init__(name, bases, clsdict)
def interfaces(self):
"""
Return a generator of interfaces implemented by this type.
Yields
------
iface : Interface
"""
for base in self.mro():
if strict_issubclass(base, Implements):
yield base.interface
def weakmemoize_implements(f):
"One-off weakmemoize implementation for ``implements``."
_memo = WeakKeyDictionary()
@wraps(f)
def _f(I):
try:
return _memo[I]
except KeyError:
pass
ret = f(I)
_memo[I] = ret
return ret
return _f
@weakmemoize_implements
def implements(I):
"""
Make a base for classes that implement ``I``.
Parameters
----------
I : Interface
Returns
-------
base : type
A type validating that subclasses must implement all interface
methods of I.
"""
if not issubclass(I, Interface):
raise TypeError(
"implements() expected an Interface, but got %s." % I
)
name = "Implements{I}".format(I=I.__name__)
doc = dedent(
"""\
Implementation of {I}.
Methods
-------
{methods}"""
).format(
I=I.__name__,
methods="\n".join(
"{name}{sig}".format(name=name, sig=sig)
for name, sig in sorted(list(I._signatures.items()), key=first)
)
)
return ImplementsMeta(
name,
(Implements,),
{'__doc__': doc, 'interface': I},
base=True,
)
|
Python
| 0
|
@@ -442,97 +442,8 @@
g%0A%0A%0A
-def strict_issubclass(t, parent):%0A return issubclass(t, parent) and t is not parent%0A%0A%0A
clas
@@ -3998,98 +3998,8 @@
%22%0A%0A%0A
-class Implements:%0A %22%22%22%0A Base class for an implementation of an interface.%0A %22%22%22%0A%0A%0A
clas
@@ -4775,25 +4775,18 @@
if
-strict_issubclass
+isinstance
(bas
@@ -4798,16 +4798,20 @@
plements
+Meta
):%0A
@@ -6011,26 +6011,22 @@
(
-Implements
+object
,),%0A
|
2d26d92956282be3f08cc3dcdb5fa16433822a1b
|
Change retry_if_fails to _retry_on_fail
|
Nyaa.py
|
Nyaa.py
|
from bs4 import BeautifulSoup
import re
import requests
import sys
def retry_if_fails(req, *args, **kwargs):
try:
r = req(*args, **kwargs)
if r.status_code not in range(100, 399):
print('Connection error, retrying... (HTTP {})'.format(r.status_code), file=sys.stderr)
return retry_if_fails(req, *args, **kwargs)
else:
return r
except requests.exceptions.ConnectionError as e:
print('Connection error, retrying... ({})'.format(e.args[0].args[1]), file=sys.stderr)
return retry_if_fails(req, *args, **kwargs)
class Nyaa(object):
def __init__(self, url):
self.url = url
self.info_url = url + '?page=view&tid='
self.dl_url = url + '?page=download&tid='
@property
def last_entry(self):
r = requests.get(self.url)
if r.status_code not in range(100, 399):
print('Connection error. Nyaa might be down (HTTP {}).'.format(r.status_code), file=sys.stderr)
sys.exit(1)
soup = BeautifulSoup(r.text)
link = soup.find('tr', class_='tlistrow').find('td', class_='tlistname').a['href']
return int(re.search('tid=([0-9]*)', link).group(1))
class NyaaEntry(object):
def __init__(self, nyaa, nyaa_id):
self.info_url = '{}{}'.format(nyaa.info_url, nyaa_id)
self.download_url = '{}{}&magnet=1'.format(nyaa.dl_url, nyaa_id)
r = retry_if_fails(requests.get, self.info_url)
setattr(r, 'encoding', 'utf-8')
self.page = BeautifulSoup(r.text)
if self.page.find('div', class_='content').text == '\xa0The torrent you are looking for does not appear to be in the database.':
self.exists = False
else:
self.exists = True
@property
def category(self):
return self.page.find('td', class_='viewcategory').find_all('a')[0].text
@property
def sub_category(self):
return self.page.find('td', class_='viewcategory').find_all('a')[1].text
@property
def name(self):
return self.page.find('td', class_='viewtorrentname').text
@property
def time(self):
return self.page.find('td', class_='vtop').text.split(', ')
@property
def status(self):
_status = self.page.find('div', class_=re.compile('content'))['class']
if 'trusted' in _status:
return 'trusted'
elif 'remake' in _status:
return 'remake'
elif 'aplus' in _status:
return 'a+'
else:
return 'normal'
@property
def hash(self):
r = retry_if_fails(requests.head, self.download_url)
if 'Location' in r.headers:
return re.search(r'magnet:\?xt=urn:btih:(.*)&tr=', r.headers['Location']).group(1).upper()
else:
return None
|
Python
| 0.00177
|
@@ -65,30 +65,30 @@
ys%0A%0Adef
+_
retry_
-if
+on
_fail
-s
(req, *a
@@ -276,38 +276,38 @@
)%0A%09%09%09return
+_
retry_
-if
+on
_fail
-s
(req, *args,
@@ -485,30 +485,30 @@
%09return
+_
retry_
-if
+on
_fail
-s
(req, *a
@@ -1253,38 +1253,38 @@
_id)%0A%0A%09%09r =
+_
retry_
-if
+on
_fail
-s
(requests.ge
@@ -2262,22 +2262,22 @@
r =
+_
retry_
-if
+on
_fail
-s
(req
|
74454b032be96cc34d96fe4eefc2a89c88637532
|
Version 1.5.2 -> 1.6.0
|
iprestrict/__init__.py
|
iprestrict/__init__.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from .restrictor import IPRestrictor
__all__ = ["IPRestrictor"]
__version__ = "1.5.2"
|
Python
| 0.000001
|
@@ -145,9 +145,9 @@
%221.
-5.2
+6.0
%22%0A
|
10ceb00e249635868fb55c1ae1668ddb35b03bc3
|
Update demo
|
demo.py
|
demo.py
|
# -*- coding: utf-8 -*-
from taiga import TaigaAPI
api = TaigaAPI(
host='http://127.0.0.1:8000'
)
api.auth(
username='admin',
password='123123'
)
print (api.me())
new_project = api.projects.create('TEST PROJECT', 'TESTING API')
new_project.name = 'TEST PROJECT 3'
new_project.update()
jan_feb_milestone = new_project.add_milestone(
'New milestone jan feb', '2015-01-26', '2015-02-26'
)
userstory = new_project.add_user_story(
'New Story', description='Blablablabla',
milestone=jan_feb_milestone.id
)
userstory.attach('README.md')
userstory.add_task('New Task 2',
new_project.task_statuses[0].id
).attach('README.md')
print (userstory.list_tasks())
newissue = new_project.add_issue(
'New Issue',
new_project.priorities.get(name='High').id,
new_project.issue_statuses.get(name='New').id,
new_project.issue_types.get(name='Bug').id,
new_project.severities.get(name='Minor').id,
description='Bug #5'
).attach('README.md')
projects = api.projects.list()
print (projects)
for user in new_project.users:
print (user)
stories = api.user_stories.list()
print (stories)
print (api.history.user_story.get(stories[0].id))
projects[0].star()
api.milestones.list()
projects = api.projects.list()
print (projects)
another_new_project = projects.get(name='TEST PROJECT 3')
print (another_new_project)
users = api.users.list()
print (users)
print (api.search(projects.get(name='TEST PROJECT 3').id, 'New').user_stories[0].subject)
print new_project.add_issue_attribute(
'Device', description='(iPad, iPod, iPhone, Desktop, etc.)'
)
print(new_project.roles)
memberships = new_project.list_memberships()
new_project.add_role('New role', permissions=["add_issue", "modify_issue"])
new_project.add_membership('stagi.andrea@gmail.com', new_project.roles[0].id)
for membership in memberships:
print (membership.role_name)
|
Python
| 0.000001
|
@@ -44,16 +44,60 @@
TaigaAPI
+%0Afrom taiga.exceptions import TaigaException
%0A%0Aapi =
@@ -1073,57 +1073,8 @@
s)%0A%0A
-for user in new_project.users:%0A print (user)%0A%0A
stor
@@ -1172,16 +1172,25 @@
%5D.id))%0A%0A
+try:%0A
projects
@@ -1199,16 +1199,62 @@
%5D.star()
+%0Aexcept TaigaException:%0A projects%5B0%5D.like()
%0A%0Aapi.mi
|
fc187d9b8822fe446561dc4724c7f123da3d550f
|
Fix query for auto-adding to board by domain
|
hasjob/tagging.py
|
hasjob/tagging.py
|
# -*- coding: utf-8 -*-
from collections import defaultdict
from urlparse import urljoin
import requests
from flask.ext.rq import job
from coaster.utils import text_blocks
from coaster.nlp import extract_named_entities
from . import app
from .models import (db, JobPost, JobLocation, Board, BoardDomain, BoardLocation,
Tag, JobPostTag, TAG_TYPE)
@job('hasjob')
def tag_locations(jobpost_id):
if app.config.get('HASCORE_SERVER'):
with app.test_request_context():
post = JobPost.query.get(jobpost_id)
url = urljoin(app.config['HASCORE_SERVER'], '/1/geo/parse_locations')
response = requests.get(url, params={'q': post.location, 'bias': ['IN', 'US'], 'special': ['Anywhere', 'Remote', 'Home']}).json()
if response.get('status') == 'ok':
remote_location = False
results = response.get('result', [])
geonames = defaultdict(dict)
tokens = []
for item in results:
if item.get('special'):
remote_location = True
geoname = item.get('geoname', {})
if geoname:
geonames[geoname['geonameid']]['geonameid'] = geoname['geonameid']
geonames[geoname['geonameid']]['primary'] = geonames[geoname['geonameid']].get('primary', True)
for type, related in geoname.get('related', {}).items():
if type in ['admin2', 'admin1', 'country', 'continent']:
geonames[related['geonameid']]['geonameid'] = related['geonameid']
geonames[related['geonameid']]['primary'] = False
tokens.append({'token': item.get('token', ''), 'geoname': {
'name': geoname['name'],
'geonameid': geoname['geonameid'],
}})
else:
tokens.append({'token': item.get('token', '')})
if item.get('special'):
tokens[-1]['remote'] = True
post.remote_location = remote_location
post.parsed_location = {'tokens': tokens}
for locdata in geonames.values():
loc = JobLocation.query.get((jobpost_id, locdata['geonameid']))
if loc is None:
loc = JobLocation(jobpost=post, geonameid=locdata['geonameid'])
db.session.add(loc)
db.session.flush()
loc.primary = locdata['primary']
for location in post.locations:
if location.geonameid not in geonames:
db.session.delete(location)
db.session.commit()
@job('hasjob')
def add_to_boards(jobpost_id):
with app.test_request_context():
post = JobPost.query.get(jobpost_id)
for board in Board.query.join(BoardDomain).join(BoardLocation).filter(db.or_(
BoardDomain.domain == post.email_domain,
BoardLocation.geonameid.in_([l.geonameid for l in post.locations])
)):
board.add(post)
db.session.commit()
def tag_named_entities(post):
entities = extract_named_entities(text_blocks(post.tag_content()))
links = set()
for entity in entities:
tag = Tag.get(entity, create=True)
link = JobPostTag.get(post, tag)
if not link:
link = JobPostTag(jobpost=post, tag=tag, status=TAG_TYPE.AUTO)
post.taglinks.append(link)
links.add(link)
for link in post.taglinks:
if link.status == TAG_TYPE.AUTO and link not in links:
link.status = TAG_TYPE.REMOVED
@job('hasjob')
def tag_jobpost(jobpost_id):
with app.test_request_context():
post = JobPost.query.get(jobpost_id)
tag_named_entities(post)
db.session.commit()
|
Python
| 0.000031
|
@@ -2974,32 +2974,98 @@
= JobPost.query.
+options(db.load_only('email_domain'), db.joinedload('locations')).
get(jobpost_id)%0A
@@ -3119,41 +3119,14 @@
in).
-join(BoardLocation).filter(db.or_
+filter
(%0A
@@ -3178,17 +3178,24 @@
l_domain
-,
+).union(
%0A
@@ -3212,86 +3212,86 @@
oard
-Location.geonameid.in_(%5Bl.geonameid for l in post.locations%5D)%0A
+.query.join(BoardLocation).filter(BoardLocation.geonameid.in_(post.geonameids)
)):%0A
|
2288fb1f849c6b9948a3ac792daea3f9044d6f37
|
Fix create index
|
haystack_redis.py
|
haystack_redis.py
|
import os
from cStringIO import StringIO
from threading import Lock
from redis import from_url as redis
from whoosh.index import _DEF_INDEX_NAME, EmptyIndexError
from whoosh.qparser import QueryParser
from whoosh.filedb.structfile import StructFile
from whoosh.filedb.filestore import Storage, create_index, open_index
from haystack.backends.whoosh_backend import WhooshSearchBackend, WhooshEngine
redis_url = os.environ.get('REDISTOGO_URL', 'redis://localhost:6379')
class RedisSearchBackend(WhooshSearchBackend):
def setup(self):
"""
Defers loading until needed.
"""
from haystack import connections
self.storage = RedisStorage(self.path)
self.content_field_name, self.schema = self.build_schema(connections[self.connection_alias].get_unified_index().all_searchfields())
self.parser = QueryParser(self.content_field_name, schema=self.schema)
try:
self.index = self.storage.open_index(schema=self.schema)
except EmptyIndexError:
self.index = self.storage.create_index(self.schema)
self.setup_complete = True
class RedisEngine(WhooshEngine):
backend = RedisSearchBackend
class RedisStorage(Storage):
"""Storage object that keeps the index in redis.
"""
supports_mmap = False
def __file(self, name):
return self.redis.hget("RedisStore:%s" % self.folder, name)
def __init__(self, namespace='whoosh'):
self.folder = namespace
self.redis = redis(redis_url)
self.locks = {}
def create_index(self, schema, indexname=_DEF_INDEX_NAME):
return create_index(self, schema, indexname)
def file_modified(self, name):
return -1
def open_index(self, indexname=_DEF_INDEX_NAME, schema=None):
return open_index(self, schema, indexname)
def list(self):
return self.redis.hkeys("RedisStore:%s" % self.folder)
def clean(self):
self.redis.delete("RedisStore:%s" % self.folder)
def total_size(self):
return sum(self.file_length(f) for f in self.list())
def file_exists(self, name):
return self.redis.hexists("RedisStore:%s" % self.folder, name)
def file_length(self, name):
if not self.file_exists(name):
raise NameError
return len(self.__file(name))
def delete_file(self, name):
if not self.file_exists(name):
raise NameError
self.redis.hdel("RedisStore:%s" % self.folder, name)
def rename_file(self, name, newname, safe=False):
if not self.file_exists(name):
raise NameError("File %r does not exist" % name)
if safe and self.file_exists(newname):
raise NameError("File %r exists" % newname)
content = self.__file(name)
pl = self.redis.pipeline()
pl.hdel("RedisStore:%s" % self.folder, name)
pl.hset("RedisStore:%s" % self.folder, newname, content)
pl.execute()
def create_file(self, name, **kwargs):
def onclose_fn(sfile):
self.redis.hset("RedisStore:%s" % self.folder, name, sfile.file.getvalue())
f = StructFile(StringIO(), name=name, onclose=onclose_fn)
return f
def open_file(self, name, *args, **kwargs):
if not self.file_exists(name):
raise NameError("No such file %r" % name)
def onclose_fn(sfile):
self.redis.hset("RedisStore:%s" % self.folder, name, sfile.file.getvalue())
#print "Opened file %s %s " % (name, self.__file(name))
return StructFile(StringIO(self.__file(name)), name=name, onclose=onclose_fn, *args, **kwargs)
def lock(self, name):
if name not in self.locks:
self.locks[name] = Lock()
return self.locks[name]
|
Python
| 0
|
@@ -60,16 +60,32 @@
ort Lock
+%0Aimport tempfile
%0A%0Afrom r
@@ -309,32 +309,55 @@
ge,
-create_index, open_index
+FileStorage%0Afrom whoosh.util import random_name
%0A%0Afr
@@ -438,79 +438,8 @@
ne%0A%0A
-redis_url = os.environ.get('REDISTOGO_URL', 'redis://localhost:6379')%0A%0A
%0Acla
@@ -1391,24 +1391,35 @@
init__(self,
+ redis_url,
namespace='
@@ -1535,287 +1535,52 @@
def
-create_index(self, schema, indexname=_DEF_INDEX_NAME):%0A return create_index(self, schema, indexname)%0A%0A def file_modified(self, name):%0A return -1%0A%0A def open_index(self, indexname=_DEF_INDEX_NAME, schema=None):%0A return open_index(self, schema, indexname)
+file_modified(self, name):%0A return -1
%0A%0A
@@ -3509,8 +3509,245 @@
%5Bname%5D%0A%0A
+ def temp_storage(self, name=None):%0A tdir = tempfile.gettempdir()%0A name = name or %22%25s.tmp%22 %25 random_name()%0A path = os.path.join(tdir, name)%0A tempstore = FileStorage(path)%0A return tempstore.create()%0A%0A
|
abffab17e166f39111f58e37e27af3c963e2fe6a
|
Add docs for membership operators
|
sqlalchemy_utils/types/range.py
|
sqlalchemy_utils/types/range.py
|
"""
SQLAlchemy-Utils provides wide variety of range data types. All range data types return
Interval objects of intervals_ package. In order to use range data types you need to install intervals_ with:
::
pip install intervals
Intervals package provides good chunk of additional interval operators that for example psycopg2 range objects do not support.
Some good reading for practical interval implementations:
http://wiki.postgresql.org/images/f/f0/Range-types.pdf
RangeType operators
-------------------
Comparison operators
^^^^^^^^^^^^^^^^^^^^
::
Car.price_range < [12, 300]
Car.price_range == [12, 300]
Car.price_range < 300
Car.price_range > (300, 500)
Car.price_range.in_([[300, 500]])
.. _intervals: https://github.com/kvesteri/intervals
"""
from collections import Iterable
intervals = None
try:
import intervals
except ImportError:
pass
import six
import sqlalchemy as sa
from sqlalchemy.dialects.postgresql.base import ischema_names
from sqlalchemy import types
from ..exceptions import ImproperlyConfigured
from .scalar_coercible import ScalarCoercible
class INT4RANGE(types.UserDefinedType):
"""
Raw number range type, only supports PostgreSQL for now.
"""
def get_col_spec(self):
return 'int4range'
class INT8RANGE(types.UserDefinedType):
def get_col_spec(self):
return 'int8range'
class NUMRANGE(types.UserDefinedType):
def get_col_spec(self):
return 'numrange'
class DATERANGE(types.UserDefinedType):
def get_col_spec(self):
return 'daterange'
class TSRANGE(types.UserDefinedType):
def get_col_spec(self):
return 'tsrange'
class TSTZRANGE(types.UserDefinedType):
def get_col_spec(self):
return 'tstzrange'
ischema_names['int4range'] = INT4RANGE
ischema_names['int8range'] = INT8RANGE
ischema_names['numrange'] = NUMRANGE
ischema_names['daterange'] = DATERANGE
ischema_names['tsrange'] = TSRANGE
ischema_names['tstzrange'] = TSTZRANGE
class RangeComparator(types.TypeEngine.Comparator):
@classmethod
def coerced_func(cls, func):
def operation(self, other, **kwargs):
other = self.coerce_arg(other)
return getattr(types.TypeEngine.Comparator, func)(
self, other, **kwargs
)
return operation
def coerce_arg(self, other):
coerced_types = (
self.type.interval_class.type,
tuple,
list,
) + six.string_types
if isinstance(other, coerced_types):
return self.type.interval_class(other)
return other
def in_(self, other):
if (
isinstance(other, Iterable) and
not isinstance(other, six.string_types)
):
other = map(self.coerce_arg, other)
return super(RangeComparator, self).in_(other)
def notin_(self, other):
if (
isinstance(other, Iterable) and
not isinstance(other, six.string_types)
):
other = map(self.coerce_arg, other)
return super(RangeComparator, self).notin_(other)
funcs = [
'__eq__',
'__ne__',
'__lt__',
'__le__',
'__gt__',
'__ge__',
]
for func in funcs:
setattr(
RangeComparator,
func,
RangeComparator.coerced_func(func)
)
class RangeType(types.TypeDecorator, ScalarCoercible):
comparator_factory = RangeComparator
def __init__(self, *args, **kwargs):
if intervals is None:
raise ImproperlyConfigured(
'RangeType needs intervals package installed.'
)
super(RangeType, self).__init__(*args, **kwargs)
def load_dialect_impl(self, dialect):
if dialect.name == 'postgresql':
# Use the native JSON type.
return dialect.type_descriptor(self.impl)
else:
return dialect.type_descriptor(sa.String(255))
def process_bind_param(self, value, dialect):
if value is not None:
return str(value)
return value
def process_result_value(self, value, dialect):
if value:
return self.canonicalize_result_value(
self.interval_class(value)
)
return value
def canonicalize_result_value(self, value):
return intervals.canonicalize(value, True, True)
def _coerce(self, value):
if value is not None:
value = self.interval_class(value)
return value
class IntRangeType(RangeType):
"""
IntRangeType provides way for saving ranges of integers into database. On
PostgreSQL this type maps to native INT4RANGE type while on other drivers
this maps to simple string column.
Example::
from sqlalchemy_utils import IntRangeType
class Event(Base):
__tablename__ = 'user'
id = sa.Column(sa.Integer, autoincrement=True)
name = sa.Column(sa.Unicode(255))
estimated_number_of_persons = sa.Column(IntRangeType)
party = Event(name=u'party')
# we estimate the party to contain minium of 10 persons and at max
# 100 persons
party.estimated_number_of_persons = [10, 100]
print party.estimated_number_of_persons
# '10-100'
IntRangeType returns the values as IntInterval objects. These objects
support many arithmetic operators::
meeting = Event(name=u'meeting')
meeting.estimated_number_of_persons = [20, 40]
total = (
meeting.estimated_number_of_persons +
party.estimated_number_of_persons
)
print total
# '30-140'
"""
impl = INT4RANGE
def __init__(self, *args, **kwargs):
super(IntRangeType, self).__init__(*args, **kwargs)
self.interval_class = intervals.IntInterval
class DateRangeType(RangeType):
"""
DateRangeType provides way for saving ranges of dates into database. On
PostgreSQL this type maps to native DATERANGE type while on other drivers
this maps to simple string column.
Example::
from sqlalchemy_utils import DateRangeType
class Reservation(Base):
__tablename__ = 'user'
id = sa.Column(sa.Integer, autoincrement=True)
room_id = sa.Column(sa.Integer))
during = sa.Column(DateRangeType)
"""
impl = DATERANGE
def __init__(self, *args, **kwargs):
super(DateRangeType, self).__init__(*args, **kwargs)
self.interval_class = intervals.DateInterval
class NumericRangeType(RangeType):
impl = NUMRANGE
def __init__(self, *args, **kwargs):
super(DateRangeType, self).__init__(*args, **kwargs)
self.interval_class = intervals.DecimalInterval
class DateTimeRangeType(RangeType):
impl = TSRANGE
def __init__(self, *args, **kwargs):
super(DateRangeType, self).__init__(*args, **kwargs)
self.interval_class = intervals.DateTimeInterval
|
Python
| 0
|
@@ -694,40 +694,141 @@
0)%0A%0A
- Car.price_range.in_(%5B%5B300, 5
+%0AMembership operators%0A%5E%5E%5E%5E%5E%5E%5E%5E%5E%5E%5E%5E%5E%5E%5E%5E%5E%5E%5E%5E%0A%0A::%0A%0A Car.price_range.in_(%5B%5B300, 500%5D%5D)%0A%0A ~ Car.price_range.in_(%5B%5B300, 400%5D, %5B700, 8
00%5D%5D
|
7692c4210289af68ad7952ddca89f70d250a26ed
|
Change base_directory location
|
great_expectations/data_context/datasource/pandas_source.py
|
great_expectations/data_context/datasource/pandas_source.py
|
import pandas as pd
import os
from .datasource import Datasource
from .filesystem_path_generator import FilesystemPathGenerator
from ...dataset.pandas_dataset import PandasDataset
class PandasCSVDatasource(Datasource):
"""
A PandasDataSource makes it easy to create, manage and validate expectations on
Pandas dataframes.
Use with the FilesystemPathGenerator for simple cases.
"""
def __init__(self, name, type_, data_context=None, generators=None, base_directory="/data", read_csv_kwargs=None):
self._base_directory = base_directory
if generators is None:
generators = {
"default": {"type": "filesystem", "base_directory": "/data"}
}
super(PandasCSVDatasource, self).__init__(name, type_, data_context, generators)
self._datasource_config.update(
{
"base_directory": base_directory,
"read_csv_kwargs": read_csv_kwargs or {}
}
)
self._build_generators()
def _get_generator_class(self, type_):
if type_ == "filesystem":
return FilesystemPathGenerator
else:
raise ValueError("Unrecognized BatchGenerator type %s" % type_)
def _get_data_asset(self, data_asset_name, batch_kwargs, expectations_config):
full_path = os.path.join(batch_kwargs["path"])
df = pd.read_csv(full_path, **self._datasource_config["read_csv_kwargs"])
return PandasDataset(df,
expectations_config=expectations_config,
data_context=self._data_context,
data_asset_name=data_asset_name,
batch_kwargs=batch_kwargs)
|
Python
| 0.000002
|
@@ -474,100 +474,30 @@
ne,
-base_directory=%22/data%22, read_csv_kwargs=None):%0A self._base_directory = base_directory
+read_csv_kwargs=None):
%0A
@@ -785,58 +785,8 @@
%7B%0A
- %22base_directory%22: base_directory,%0A
|
b69289c62a5be3a523b4d32aec2b6d790dc95f0d
|
Add compare functions
|
amit.py
|
amit.py
|
import hashlib, ssdeep
def hash_ssdeep(inbytes):
return ssdeep.hash(inbytes)
def hash_md5(inbytes):
m = hashlib.md5()
m.update(inbytes)
return m.hexdigest()
def hash_sha1(inbytes):
m = hashlib.sha1()
m.update(inbytes)
return m.hexdigest()
def hash_sha256(inbytes):
m = hashlib.sha256()
m.update(inbytes)
return m.hexdigest()
def hash_print_all(inbytes):
print hash_ssdeep(inbytes)
print hash_md5(inbytes)
print hash_sha1(inbytes)
print hash_sha256(inbytes)
testdata = '\x90'*512*2
testdata2 = 'mod\x90'*512*2
hash_print_all(testdata)
hash_print_all(testdata2)
|
Python
| 0.000001
|
@@ -338,30 +338,24 @@
)%0A%0Adef hash_
-print_
all(inbytes)
@@ -357,22 +357,33 @@
ytes):%0A%09
-print
+a = %5B%5D%0A%09a.append(
hash_ssd
@@ -390,32 +390,36 @@
eep(inbytes)
+)
%0A%09
-print
+a.append(
hash_md5(inb
@@ -423,32 +423,66 @@
inbytes)
+)
%0A%09
-print
+a.append(hash_sha1(inbytes))%0A%09a.append(
hash_sha
1(inbyte
@@ -465,33 +465,35 @@
.append(hash_sha
-1
+256
(inbytes)%0A%09print
@@ -489,36 +489,589 @@
tes)
+)
%0A%09
-print hash_sha256(inbytes)
+return a%0A%0Adef compare_ssdeep(hash1, hash2):%0A%09return ssdeep.compare(hash1, hash2)%0A%0Adef compare_md5(hash1, hash2):%0A%09return hash1 == hash2%0A%0Adef compare_sha1(hash2, hash1):%0A%09return hash1 == hash2%0A%0Adef compare_sha256(hash1, hash2):%0A%09return hash1 == hash2%0A%0Adef compare_all(hasharray1, hasharray2):%0A%09if len(hasharray1)!=len(hasharray2): return None%0A%09a = %5B%5D%0A%09a.append(compare_ssdeep(hasharray1%5B0%5D, hasharray2%5B0%5D))%0A%09a.append(compare_md5(hasharray1%5B1%5D, hasharray2%5B1%5D))%0A%09a.append(compare_sha1(hasharray1%5B2%5D, hasharray2%5B2%5D))%0A%09a.append(compare_sha256(hasharray1%5B3%5D, hasharray2%5B3%5D))%0A%09return a
%0A%0Ate
@@ -1108,16 +1108,19 @@
2 = 'mod
+'+'
%5Cx90'*51
@@ -1123,26 +1123,26 @@
'*512*2%0A
-hash_print
+%0Aa1 = hash
_all(tes
@@ -1152,30 +1152,97 @@
ta)%0A
-hash_print_all(testdat
+a2 = hash_all(testdata2)%0Afor i in a1: print i%0Afor i in a2: print i%0Aprint compare_all(a1,
a2)%0A
|
d5e41dfaff393a0649336ef92d7b7917a7e0122d
|
fix allowed_hosts settings bug
|
hours/settings.py
|
hours/settings.py
|
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECRET_KEY will be automatically generated and saved into local_settings.py on first import, if not already
# present
SECRET_KEY = ''
DEBUG = False
ALLOWED_HOSTS = [
'localhost'
]
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'sources',
'core',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'hours.urls'
WSGI_APPLICATION = 'hours.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
from hours_settings import *
try:
from local_settings import *
except ImportError:
pass
if SECRET_KEY == '':
print 'Creating SECRET_KEY..'
from django.utils.crypto import get_random_string
settings_dir = os.path.dirname(__file__)
with open(os.path.join(settings_dir, 'local_settings.py'), 'a') as local_settings_fd:
chars = 'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)'
SECRET_KEY = get_random_string(50, chars)
local_settings_fd.write('\n%s\n' % "SECRET_KEY = '%s'" % SECRET_KEY)
|
Python
| 0.000001
|
@@ -479,27 +479,8 @@
True
-%0AALLOWED_HOSTS = %5B%5D
%0A%0A#
|
91f207c8fc419c773555bec355d0b0da35061044
|
Version 0.9.82
|
hsync/_version.py
|
hsync/_version.py
|
__version__ = '0.9.81'
|
Python
| 0
|
@@ -18,7 +18,7 @@
.9.8
-1
+2
'%0A
|
b9bd48ff4d69bde723d30c658f46d5216f01f82f
|
Update process.py
|
src/rotest/core/runners/multiprocess/worker/process.py
|
src/rotest/core/runners/multiprocess/worker/process.py
|
"""Multiprocess worker process."""
# pylint: disable=invalid-name,too-many-arguments,wrong-import-position
# pylint: disable=too-many-locals,too-many-instance-attributes
from __future__ import absolute_import
from multiprocessing import Process
import django
import psutil
from six.moves import queue
if not hasattr(django, 'apps'): # noqa
django.setup()
from rotest.common import core_log
from rotest.core.runners.multiprocess.worker.runner import WorkerRunner
from rotest.core.runners.multiprocess.common import (get_item_by_id,
kill_process_tree)
class WorkerProcess(Process):
"""Process that run tests.
The process is built with all the manager's test runner properties,
including the root test item. Once the process is started, the worker
creates its own test runner instance. Then, it pulls job requests from
queue one by one, executes them and notifies the manager via queue.
Attributes:
save_state (bool): determine if storing resources state is required.
The behavior can be overridden using resource's save_state flag.
config (object): config object, will be transfered to each test.
run_delta (bool): determine whether to run only tests that failed the
last run (according to the results DB).
run_name (str): name of the current run.
requests_queue (multiprocessing.Queue): queue object used to transfer
jobs to all workers processes from the main runner process.
reply_queue (multiprocessing.Queue): queue object used to transfer
data from the main runner to this specific worker.
results_queue (multiprocessing.Queue): queue object used to transfer
jobs results from all workers processes to the main runner process.
root_test (object): test object of the main test.
failfast (bool): whether to stop the run on the first failure.
parent_id (number): the id of the parent process.
test (object): test instance which is ran by the worker.
timeout (number): timeout which will cause the current test to stop
if it passes it.
start_time (datetime.datetime): the start time of the current test.
skip_init (bool): True to skip resources initialization and validation.
output_handlers (list): output handlers for the worker's runner.
"""
def __init__(self, save_state, config, run_delta, run_name, requests_queue,
reply_queue, results_queue, root_test, failfast, parent_id,
skip_init, output_handlers, *args, **kwargs):
core_log.debug('Initializing test worker')
super(WorkerProcess, self).__init__()
# Current test instance, timeout and starting time
# They will be managed outside of the process
self.test = None
self.timeout = None
self.start_time = None
self.resource_manager = None
self.root_test = root_test
self.reply_queue = reply_queue
self.results_queue = results_queue
self.requests_queue = requests_queue
self.output_handlers = output_handlers
self.config = config
self.run_name = run_name
self.failfast = failfast
self.parent_id = parent_id
self.run_delta = run_delta
self.skip_init = skip_init
self.save_state = save_state
def terminate(self):
"""Terminate the worker process and all of its subprocesses."""
core_log.debug("Ending process %r", self.pid)
try:
process = psutil.Process(self.pid)
kill_process_tree(process)
except psutil.NoSuchProcess:
core_log.debug("Process %r not found", self.pid)
def assert_runner_is_alive(self):
"""Validate that the runner process is alive. If not - kill the worker.
If the worker's parent process id changes it means that the manager
process died. In that case the worker should die.
"""
if self.parent_id != psutil.Process(self.pid).ppid():
core_log.warning('Worker %r parent changed, terminating', self.pid)
self.terminate()
def _get_tests(self):
"""Try to get a new test from the pending tests queue.
Returns:
object. a pending test, or None if queue is empty.
"""
try:
return self.requests_queue.get(block=False)
except queue.Empty:
return None
def run(self):
"""Initialize runner and run tests from queue.
Creates a test runner then pulls requests from queue,
executes them and notifies to the runner using results queue.
Once done it notifies about its termination to the manager process.
"""
core_log.debug('Worker %r started working', self.pid)
runner = WorkerRunner(config=self.config,
enable_debug=False,
failfast=self.failfast,
run_name=self.run_name,
run_delta=self.run_delta,
skip_init=self.skip_init,
save_state=self.save_state,
outputs=self.output_handlers,
reply_queue=self.reply_queue,
results_queue=self.results_queue)
runner.resource_manager = self.resource_manager
try:
for test_id in iter(self._get_tests, None):
self.assert_runner_is_alive()
test = get_item_by_id(self.root_test, test_id)
core_log.debug('Worker %r is running %r',
self.pid, test.data.name)
runner.execute(test)
core_log.debug('Worker %r done with %r',
self.pid, test.data.name)
finally:
if (self.resource_manager is not None and
self.resource_manager.is_connected()):
runner.resource_manager.disconnect()
core_log.debug('Worker %r finished working', self.pid)
runner.queue_handler.finish_run()
|
Python
| 0.000001
|
@@ -202,16 +202,90 @@
_import%0A
+import django%0Aif not hasattr(django, 'apps'): # noqa%0A django.setup()%0A%0A
from mul
@@ -313,30 +313,16 @@
rocess%0A%0A
-import django%0A
import p
@@ -360,68 +360,8 @@
ue%0A%0A
-if not hasattr(django, 'apps'): # noqa%0A django.setup()%0A%0A
from
|
5a38e5924409e887430ee4366c81ebc2b94152a5
|
add trackers registration
|
plenum/server/database_manager.py
|
plenum/server/database_manager.py
|
from typing import Dict, Optional
from common.exceptions import LogicError
from plenum.common.constants import BLS_LABEL, TS_LABEL, IDR_CACHE_LABEL, ATTRIB_LABEL
from plenum.common.ledger import Ledger
from state.state import State
class DatabaseManager():
def __init__(self):
self.databases = {} # type: Dict[int, Database]
self.stores = {}
self._init_db_list()
def _init_db_list(self):
self._ledgers = {lid: db.ledger for lid, db in self.databases.items()}
self._states = {lid: db.state for lid, db in self.databases.items() if db.state}
def register_new_database(self, lid, ledger: Ledger, state: Optional[State] = None):
if lid in self.databases:
raise LogicError('Trying to add already existing database')
self.databases[lid] = Database(ledger, state)
self._init_db_list()
def get_database(self, lid):
if lid not in self.databases:
return None
return self.databases[lid]
def get_ledger(self, lid):
if lid not in self.databases:
return None
return self.databases[lid].ledger
def get_state(self, lid):
if lid not in self.databases:
return None
return self.databases[lid].state
def register_new_store(self, label, store):
if label in self.stores:
raise LogicError('Trying to add already existing store')
self.stores[label] = store
def get_store(self, label):
if label not in self.stores:
return None
return self.stores[label]
@property
def states(self):
return self._states
@property
def ledgers(self):
return self._ledgers
@property
def bls_store(self):
return self.get_store(BLS_LABEL)
@property
def ts_store(self):
return self.get_store(TS_LABEL)
@property
def idr_cache(self):
return self.get_store(IDR_CACHE_LABEL)
@property
def attribute_store(self):
return self.get_store(ATTRIB_LABEL)
# ToDo: implement it and use on close all KV stores
def close(self):
# Close all states
for state in self.states.values():
state.close()
# Close all stores
for store in self.stores.values():
store.close()
class Database:
def __init__(self, ledger, state):
self.ledger = ledger
self.state = state
def reset(self):
self.ledger.reset_uncommitted()
if self.state:
self.state.revertToHead(self.state.committedHeadHash)
|
Python
| 0
|
@@ -359,16 +359,43 @@
es = %7B%7D%0A
+ self.trackers = %7B%7D%0A
@@ -1290,24 +1290,152 @@
lid%5D.state%0A%0A
+ def get_tracker(self, lid):%0A if lid not in self.trackers:%0A return None%0A return self.trackers%5Blid%5D%0A%0A
def regi
@@ -1608,16 +1608,208 @@
store%0A%0A
+ def register_new_tracker(self, lid, tracker):%0A if lid in self.trackers:%0A raise LogicError(%22Trying to add already existing tracker%22)%0A self.trackers%5Blid%5D = tracker%0A%0A
def
|
8c4590e19c7b39fe6562671f7d63651e736ffa49
|
debug print
|
controllers/admin/admin_migration_controller.py
|
controllers/admin/admin_migration_controller.py
|
import os
from google.appengine.ext import ndb
from google.appengine.ext import deferred
from google.appengine.ext.webapp import template
from controllers.base_controller import LoggedInHandler
from models.event import Event
from helpers.match_manipulator import MatchManipulator
def add_year(event_key):
matches = event_key.get().matches
if matches:
for match in matches:
match.year = int(match.event.id()[:4])
match.dirty = True
MatchManipulator.createOrUpdate(match)
class AdminMigration(LoggedInHandler):
def get(self):
self._require_admin()
path = os.path.join(os.path.dirname(__file__), '../../templates/admin/migration.html')
self.response.out.write(template.render(path, self.template_values))
class AdminMigrationAddMatchYear(LoggedInHandler):
def get(self):
self._require_admin()
for year in range(1992, 2016):
event_keys = Event.query(Event.year == year).fetch(keys_only=True)
for event_key in event_keys:
deferred.defer(add_year, event_key, _queue="admin")
self.response.out.write(event_keys)
|
Python
| 0.000003
|
@@ -300,16 +300,42 @@
t_key):%0A
+ logging.info(event_key)%0A
matche
|
895dfda101665e0f70e96d549443f9fe777de1e7
|
Add support for multiple urls per method, auto create method routers
|
hug/decorators.py
|
hug/decorators.py
|
from functools import wraps
from collections import OrderedDict
import sys
from hug.run import server
import hug.output_format
from falcon import HTTP_METHODS, HTTP_BAD_REQUEST
def call(url, accept=HTTP_METHODS, output=hug.output_format.json):
def decorator(api_function):
module = sys.modules[api_function.__module__]
def interface(request, response):
input_parameters = request.params
errors = {}
for key, type_handler in api_function.__annotations__.items():
try:
input_parameters[key] = type_handler(input_parameters[key])
except Exception as error:
errors[key] = str(error)
if errors:
response.data = output({"errors": errors})
response.status = HTTP_BAD_REQUEST
return
input_parameters['request'], input_parameters['response'] = (request, response)
response.data = output(api_function(**input_parameters))
if not 'HUG' in module.__dict__:
def api_auto_instantiate(*kargs, **kwargs):
module.HUG = server(module)
return module.HUG(*kargs, **kwargs)
module.HUG = api_auto_instantiate
module.HUG_API_CALLS = OrderedDict()
for method in accept:
module.HUG_API_CALLS.setdefault(url, {})["on_{0}".format(method.lower())] = interface
api_function.interface = interface
interface.api_function = api_function
return api_function
return decorator
def get(url):
return call(url=url, accept=('GET', ))
def post(url):
return call(url=url, accept=('POST', ))
def put(url):
return call(url=url, acccept=('PUT', ))
def delete(url):
return call(url=url, accept=('DELETE', ))
|
Python
| 0
|
@@ -20,16 +20,25 @@
rt wraps
+, partial
%0Afrom co
@@ -194,16 +194,17 @@
call(url
+s
, accept
@@ -247,18 +247,87 @@
mat.json
-):
+, example=None):%0A if isinstance(urls, str):%0A urls = (urls, )%0A
%0A def
@@ -1407,24 +1407,19 @@
for
-method in accept
+url in urls
:%0A
@@ -1419,32 +1419,43 @@
rls:%0A
+ handlers =
module.HUG_API_
@@ -1479,16 +1479,75 @@
url, %7B%7D)
+%0A for method in accept:%0A handlers
%5B%22on_%7B0%7D
@@ -1674,25 +1674,24 @@
unction%0A
-%0A
return a
@@ -1686,270 +1686,200 @@
-return api_function%0A return decorator%0A%0A%0Adef get(url):%0A return call(url=url, accept=('GET', ))%0A%0A%0Adef post(url):%0A return call(url=url, accept=('POST', ))%0A%0A%0Adef put(url):%0A return call(url=url, acccept=('PUT', ))%0A%0A%0Adef delete(url):%0A return call(url=ur
+interface.output_format = output%0A interface.example = example%0A%0A return api_function%0A return decorator%0A%0A%0Afor method in HTTP_METHODS:%0A globals()%5Bmethod.lower()%5D = partial(cal
l, a
@@ -1889,16 +1889,14 @@
pt=(
-'DELETE'
+method
, ))
|
66dd418d481bfc5d3d910823856bdcea8d304a87
|
allow to pass a different root-path
|
hwaf-cmtcompat.py
|
hwaf-cmtcompat.py
|
# -*- python -*-
# stdlib imports
import os
import os.path as osp
import sys
# waf imports ---
import waflib.Options
import waflib.Utils
import waflib.Logs as msg
from waflib.Configure import conf
_heptooldir = osp.dirname(osp.abspath(__file__))
# add this directory to sys.path to ease the loading of other hepwaf tools
if not _heptooldir in sys.path: sys.path.append(_heptooldir)
### ---------------------------------------------------------------------------
@conf
def _cmt_get_srcs_lst(self, source):
'''hack to support implicit src/*cxx in CMT req-files'''
if isinstance(source, (list, tuple)):
src = []
for s in source:
src.extend(self._cmt_get_srcs_lst(s))
return src
elif not isinstance(source, type('')):
## a waflib.Node ?
return [source]
else:
src_node = self.path.find_dir('src')
srcs = self.path.ant_glob(source)
if srcs:
# OK. finders, keepers.
pass
elif src_node:
# hack to mimick CMT's default (to take sources from src)
srcs = src_node.ant_glob(source)
pass
if not srcs:
# ok, try again from bldnode
src_node = self.path.find_dir('src')
srcs = self.path.get_bld().ant_glob(source)
if srcs:
# OK. finders, keepers.
pass
elif src_node:
# hack to mimick CMT's default (to take sources from src)
srcs = src_node.get_bld().ant_glob(source)
pass
if not srcs:
# ok, maybe the output of a not-yet executed task
srcs = source
pass
pass
return waflib.Utils.to_list(srcs)
self.fatal("unreachable")
return []
|
Python
| 0.000002
|
@@ -499,19 +499,30 @@
, source
+, root=None
):%0A
-
'''h
@@ -574,16 +574,62 @@
iles'''%0A
+ if root is None:%0A root = self.root%0A
if i
@@ -751,16 +751,22 @@
cs_lst(s
+, root
))%0A
@@ -898,33 +898,28 @@
src_node =
-self.path
+root
.find_dir('s
@@ -938,25 +938,20 @@
srcs =
-self.path
+root
.ant_glo
@@ -1270,25 +1270,20 @@
_node =
-self.path
+root
.find_di
@@ -1314,17 +1314,12 @@
s =
-self.path
+root
.get
|
18fc60eb591a77180686e905591df8d1ce96d75c
|
Remove "import pyganim" from `render.py`
|
hypatia/render.py
|
hypatia/render.py
|
# This module is part of Hypatia and is released under the
# MIT License: http://opensource.org/licenses/MIT
"""How stuff is drawn. Very specific rendering stuff. Includes
screen and viewport.
Mostly a lot of scaffolding.
See Also:
:mod:`animations`
"""
import sys
import time
import itertools
import pygame
import pyganim
from pygame.locals import *
from hypatia import constants
class Screen(object):
"""Everything blits to screen!
Notes:
--
CONSTANTS:
FPS (int): frames per second limit
Attributes:
clock (pygame.time.Clock):
time_elapsed_milliseconds (int): the time difference between
the two most recent frames/updates in milliseconds.
screen_size (tuple):
screen (pygame.display surface): --
"""
FPS = 60
def __init__(self, filters=None):
"""Will init pygame.
Args:
filters (list): list of functions which takes and
returns a surface.
"""
pygame.init()
pygame.mouse.set_visible(False)
self.clock = pygame.time.Clock()
self.time_elapsed_milliseconds = 0
display_info = pygame.display.Info()
self.screen_size = (display_info.current_w, display_info.current_h)
self.screen = pygame.display.set_mode(
self.screen_size,
FULLSCREEN | DOUBLEBUF
)
self.filters = filters
def update(self, surface):
"""Update the screen; apply surface to screen, automatically
rescaling for fullscreen.
"""
scaled_surface = pygame.transform.scale(surface, self.screen_size)
if self.filters:
for filter_function in self.filters:
scaled_surface = filter_function(scaled_surface)
self.screen.blit(scaled_surface, (0, 0))
pygame.display.flip()
self.time_elapsed_milliseconds = self.clock.tick(Screen.FPS)
# how much of this is redundant due to pygame Surface.scroll?
class Viewport(object):
"""Display only a fixed area of a surface.
Attributes:
surface (pygame.Surface): viewport surface
rect (pygame.Rect): viewable coordinates
"""
def __init__(self, size):
"""
Args:
size (tuple): (int x, int y) pixel dimensions of viewport.
Example:
>>> viewport = Viewport((320, 240))
"""
self.surface = pygame.Surface(size)
self.rect = pygame.Rect((0, 0), size)
def center_on(self, entity, master_rect):
"""Center the viewport rectangle on an object.
Note:
entity must have entity.rect (pygame.Rect)
Does not center if centering would render off-surface;
finds nearest.
Args:
entity: something with an attribute "rect" which value is
a pygame.Rect.
Returns:
bool: --
"""
entity_position_x, entity_position_y = entity.rect.center
difference_x = entity_position_x - self.rect.centerx
difference_y = entity_position_y - self.rect.centery
potential_rect = self.rect.move(*(difference_x, difference_y))
if potential_rect.left < 0:
difference_x = 0
if potential_rect.top < 0:
difference_y = 0
if potential_rect.right > master_rect.right:
difference_x = (difference_x -
(potential_rect.right - master_rect.right))
if potential_rect.bottom > master_rect.bottom:
difference_y = (difference_y -
(potential_rect.bottom - master_rect.bottom))
self.rect.move_ip(*(difference_x, difference_y))
def relative_position(self, position):
x, y = position
offset = self.rect.topleft
x -= offset[0]
y -= offset[1]
position_on_screen = (x, y)
return position_on_screen
def blit(self, surface):
"""Draw the correct portion of supplied surface onto viewport.
Args:
surface (pygame.Surface): will only draw the area described
by viewport coordinates.
Example:
>>> viewport = Viewport((100, 100))
>>> surface = pygame.Surface((800, 600))
>>> viewport.blit(surface)
"""
self.surface.blit(
surface,
(0, 0),
self.rect
)
if __name__ == "__main__":
import doctest
doctest.testmod()
|
Python
| 0
|
@@ -333,24 +333,8 @@
me%0D%0A
-import pyganim%0D%0A
from
|
70ea214d8e258e4e7c95b9ba7948dde13e28a878
|
Make screengrab_torture_test test more functions
|
desktopmagic/scripts/screengrab_torture_test.py
|
desktopmagic/scripts/screengrab_torture_test.py
|
from desktopmagic.screengrab_win32 import GrabFailed, getScreenAsImage
def main():
print """\
This program helps you test whether screengrab_win32 has memory leaks
and other problems. It takes a screenshot repeatedly and discards it.
Open Task Manager and make sure Physical Memory % is not ballooning.
Memory leaks might not be blamed on the python process itself (which
will show low memory usage).
Lock the workstation for a few minutes; make sure there are no leaks
and that there are no uncaught exceptions here.
Repeat above after RDPing into the workstation and minimizing RDP;
this is like disconnecting the monitor.
Change your color depth settings. Add and remove monitors. RDP
into at 256 colors.
"""
while True:
try:
getScreenAsImage()
print ".",
except GrabFailed, e:
print e
if __name__ == '__main__':
main()
|
Python
| 0.000007
|
@@ -63,16 +63,53 @@
nAsImage
+, getDisplaysAsImages, getRectAsImage
%0A%0Adef ma
@@ -803,17 +803,188 @@
%09print %22
-.
+S%22,%0A%09%09except GrabFailed, e:%0A%09%09%09print e%0A%0A%09%09try:%0A%09%09%09getDisplaysAsImages()%0A%09%09%09print %22D%22,%0A%09%09except GrabFailed, e:%0A%09%09%09print e%0A%0A%09%09try:%0A%09%09%09getRectAsImage((0, 0, 1, 1))%0A%09%09%09print %22R
%22,%0A%09%09exc
|
b825fd03475855bd3fa89cdd2acf6603032a4497
|
add request args only if we have data
|
src/RequestsLibrary/keywords.py
|
src/RequestsLibrary/keywords.py
|
import requests
import sys
import json
from urllib import urlencode
import robot
from robot.libraries.BuiltIn import BuiltIn
class RequestsKeywords(object):
ROBOT_LIBRARY_SCOPE = 'Global'
def __init__(self):
'''
TODO: probably can set global proxy here
'''
self._cache = robot.utils.ConnectionCache('No sessions created')
#requests.settings.base_headers['User-Agent'] = 'robotframework-requests'
self.builtin = BuiltIn()
def create_session(self, alias, url, headers=None, cookies=None, auth=None, timeout=None, proxies=None):
""" Create Session: create a HTTP session to a server
`url` Base url of the server
`alias` Robot Framework alias to identify the session
`headers` Dictionary of default headers
`auth` Dictionary of username & password for HTTP Basic Auth
`timeout` connection timeout
`proxies` proxy server url
"""
def baseurlhook(args):
# url is the base url. Request url is uri
args['url'] = '%s%s' %(url, args['url'])
self.builtin.log('Creating session: %s' %alias, 'DEBUG')
auth = requests.auth.HTTPBasicAuth(*auth) if auth else None
session = requests.session(hooks=dict(args=baseurlhook), auth=auth, headers=headers,
cookies=cookies, timeout=timeout, proxies=proxies )
self._cache.register(session, alias=alias)
return session
def delete_all_sessions(self):
""" Removes all the session objects
"""
self._cache.empty_cache()
def to_json(self, content):
""" Convert a string to a JSON object
`content` String content to convert into JSON
"""
return json.loads(content)
def get(self, alias, uri, headers=None):
""" Send a GET request on the session object found using the given `alias`
`alias` that will be used to identify the Session object in the cache
`uri` to send the GET request to
`headers` a dictionary of headers to use with the request
"""
session = self._cache.switch(alias)
resp = session.get(uri, headers=headers)
# store the last response object
session.last_resp = resp
return resp
def post(self, alias, uri, data={}, headers=None):
""" Send a POST request on the session object found using the given `alias`
`alias` that will be used to identify the Session object in the cache
`uri` to send the GET request to
`data` a dictionary of key-value pairs that will be urlencoded and sent as POST data
or binary data that is sent as the raw body content
`headers` a dictionary of headers to use with the request
"""
session = self._cache.switch(alias)
if type(data) is dict:
resp = session.post(uri, data=urlencode(data), headers=headers)
else:
resp = session.post(uri, data=data, headers=headers)
# store the last response object
session.last_resp = resp
self.builtin.log("Post response: " + resp.content, 'DEBUG')
return resp
def put(self, alias, uri, data=None, headers=None):
""" Send a PUT request on the session object found using the given `alias`
`alias` that will be used to identify the Session object in the cache
`uri` to send the PUT request to
`headers` a dictionary of headers to use with the request
"""
session = self._cache.switch(alias)
if type(data) is dict:
resp = session.put(uri, data=urlencode(data), headers=headers)
else:
resp = session.put(uri, data=data, headers=headers)
print resp.content
# store the last response object
session.last_resp = resp
return resp
def delete(self, alias, uri, data=(), headers=None):
""" Send a DELETE request on the session object found using the given `alias`
`alias` that will be used to identify the Session object in the cache
`uri` to send the DELETE request to
`headers` a dictionary of headers to use with the request
"""
session = self._cache.switch(alias)
resp = session.delete("%s?%s" %(uri, urlencode(data)), headers=headers)
# store the last response object
session.last_resp = resp
return resp
def head(self, alias, uri, headers=None):
""" Send a HEAD request on the session object found using the given `alias`
`alias` that will be used to identify the Session object in the cache
`uri` to send the HEAD request to
`headers` a dictionary of headers to use with the request
"""
session = self._cache.switch(alias)
resp = session.head(uri, headers=headers)
# store the last response object
session.last_resp = resp
return resp
if __name__ == '__main__':
rk = RequestsKeywords()
rk.create_session('github','http://github.com/api/v2/json')
resp = rk.get('github', 'http://github.com/api/v2/json/user/search/bulkan')
import pdb; pdb.set_trace()
|
Python
| 0
|
@@ -4301,32 +4301,87 @@
e.switch(alias)%0A
+ args = %22?%25s%22 %25 urlencode(data) if data else ''%0A
resp = s
@@ -4397,17 +4397,16 @@
lete(%22%25s
-?
%25s%22 %25(ur
@@ -4408,31 +4408,20 @@
%25(uri,
-urlencode(data)
+args
), heade
|
9d1dc9c2c649bd117c2cd38cf664e34820f387ea
|
update docstring in fwhm.py
|
fwhm.py
|
fwhm.py
|
import numpy as np
def fwhm(x,y, silence = False):
maxVal = np.max(y)
maxVal50 = 0.5*maxVal
if not silence:
print "Max: " + str(maxVal)
#this is to detect if there are multiple values
biggerCondition = [a > maxVal50 for a in y]
changePoints = []
freqPoints = []
for k in range(len(biggerCondition)-1):
if biggerCondition[k+1] != biggerCondition[k]:
changePoints.append(k)
if len(changePoints) > 2:
if not silence:
print "WARNING: THE FWHM IS LIKELY TO GIVE INCORRECT VALUES"
#interpolate between points.
print "ChangePoints: ", changePoints
for k in changePoints:
# do a polyfit
# with the points before and after the point where the change occurs.
# note that here we are fitting the frequency as a function of the return loss.
# then we can use the polynom to compute the frequency at returnloss = threshold.
yPolyFit = x[k-1:k+2]
xPolyFit = y[k-1:k+2]
z = np.polyfit(xPolyFit,yPolyFit,2)
p = np.poly1d(z)
print p
freq = p(maxVal50)
freqPoints.append(freq)
if len(freqPoints) == 2:
value = freqPoints[1] - freqPoints[0]
else:
value = None
print sorted(freqPoints)
return value
def main():
x = np.linspace(-10,10,100)
sigma = 2
y = 3.1*np.exp(-x**2/(2*sigma**2))
print "OK"
fwhmVal = fwhm(x,y)
print "FWHM: " + str(fwhmVal)
print str(2*np.sqrt(2*np.log(2))*2)
if __name__ == "__main__":
main()
|
Python
| 0
|
@@ -823,25 +823,24 @@
ing the
-frequency
+x values
as a fu
@@ -857,18 +857,15 @@
the
-return los
+y value
s.%0A
@@ -920,43 +920,53 @@
the
-frequency at returnloss = threshold
+value of x at the threshold, i.e. at maxVal50
.%0A%0A
|
161a1cdddd79df7126d6adf1117d51e679d1746c
|
Change --command option in "docker" to a positional argument
|
dodo_commands/extra/standard_commands/docker.py
|
dodo_commands/extra/standard_commands/docker.py
|
"""This command opens a bash shell in the docker container."""
from . import DodoCommand
class Command(DodoCommand): # noqa
decorators = ["docker", ]
def add_arguments_imp(self, parser): # noqa
parser.add_argument('--command', default="")
def handle_imp(self, command, **kwargs): # noqa
self.runcmd(
["/bin/bash"] + (["-c", command] if command else []),
cwd=self.get_config("/DOCKER/default_cwd", None))
|
Python
| 0.000001
|
@@ -234,10 +234,8 @@
nt('
---
comm
@@ -244,18 +244,17 @@
d',
-default=%22%22
+nargs='?'
)%0A%0A
|
9fb9d55ec55e4f5105de0cd6f19b530786ec91a2
|
Change personality inject path to /
|
tempest/api/compute/servers/test_server_personality.py
|
tempest/api/compute/servers/test_server_personality.py
|
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_serialization import base64
from tempest.api.compute import base
from tempest.common.utils.linux import remote_client
from tempest.common import waiters
from tempest import config
from tempest.lib.common.utils import data_utils
from tempest.lib import exceptions as lib_exc
from tempest import test
CONF = config.CONF
class ServerPersonalityTestJSON(base.BaseV2ComputeTest):
@classmethod
def setup_credentials(cls):
cls.prepare_instance_network()
super(ServerPersonalityTestJSON, cls).setup_credentials()
@classmethod
def resource_setup(cls):
cls.set_validation_resources()
super(ServerPersonalityTestJSON, cls).resource_setup()
@classmethod
def skip_checks(cls):
super(ServerPersonalityTestJSON, cls).skip_checks()
if not CONF.compute_feature_enabled.personality:
raise cls.skipException("Nova personality feature disabled")
@classmethod
def setup_clients(cls):
super(ServerPersonalityTestJSON, cls).setup_clients()
cls.client = cls.servers_client
cls.user_client = cls.limits_client
@test.idempotent_id('3cfe87fd-115b-4a02-b942-7dc36a337fdf')
def test_create_server_with_personality(self):
file_contents = 'This is a test file.'
file_path = '/test.txt'
personality = [{'path': file_path,
'contents': base64.encode_as_text(file_contents)}]
password = data_utils.rand_password()
created_server = self.create_test_server(personality=personality,
adminPass=password,
wait_until='ACTIVE',
validatable=True)
server = self.client.show_server(created_server['id'])['server']
if CONF.validation.run_validation:
linux_client = remote_client.RemoteClient(
self.get_server_ip(server),
self.ssh_user, password,
self.validation_resources['keypair']['private_key'],
server=server,
servers_client=self.client)
self.assertEqual(file_contents,
linux_client.exec_command(
'sudo cat %s' % file_path))
@test.idempotent_id('128966d8-71fc-443c-8cab-08e24114ecc9')
def test_rebuild_server_with_personality(self):
server = self.create_test_server(wait_until='ACTIVE', validatable=True)
server_id = server['id']
file_contents = 'Test server rebuild.'
personality = [{'path': 'rebuild.txt',
'contents': base64.encode_as_text(file_contents)}]
rebuilt_server = self.client.rebuild_server(server_id,
self.image_ref_alt,
personality=personality)
waiters.wait_for_server_status(self.client, server_id, 'ACTIVE')
self.assertEqual(self.image_ref_alt,
rebuilt_server['server']['image']['id'])
@test.idempotent_id('176cd8c9-b9e8-48ee-a480-180beab292bf')
def test_personality_files_exceed_limit(self):
# Server creation should fail if greater than the maximum allowed
# number of files are injected into the server.
file_contents = 'This is a test file.'
personality = []
limits = self.user_client.show_limits()['limits']
max_file_limit = limits['absolute']['maxPersonality']
if max_file_limit == -1:
raise self.skipException("No limit for personality files")
for i in range(0, int(max_file_limit) + 1):
path = 'etc/test' + str(i) + '.txt'
personality.append({'path': path,
'contents': base64.encode_as_text(
file_contents)})
# A 403 Forbidden or 413 Overlimit (old behaviour) exception
# will be raised when out of quota
self.assertRaises((lib_exc.Forbidden, lib_exc.OverLimit),
self.create_test_server, personality=personality)
@test.idempotent_id('52f12ee8-5180-40cc-b417-31572ea3d555')
def test_can_create_server_with_max_number_personality_files(self):
# Server should be created successfully if maximum allowed number of
# files is injected into the server during creation.
file_contents = 'This is a test file.'
limits = self.user_client.show_limits()['limits']
max_file_limit = limits['absolute']['maxPersonality']
if max_file_limit == -1:
raise self.skipException("No limit for personality files")
person = []
for i in range(0, int(max_file_limit)):
path = '/etc/test' + str(i) + '.txt'
person.append({
'path': path,
'contents': base64.encode_as_text(file_contents),
})
password = data_utils.rand_password()
created_server = self.create_test_server(personality=person,
adminPass=password,
wait_until='ACTIVE',
validatable=True)
server = self.client.show_server(created_server['id'])['server']
if CONF.validation.run_validation:
linux_client = remote_client.RemoteClient(
self.get_server_ip(server),
self.ssh_user, password,
self.validation_resources['keypair']['private_key'],
server=server,
servers_client=self.client)
for i in person:
self.assertEqual(base64.decode_as_text(i['contents']),
linux_client.exec_command(
'sudo cat %s' % i['path']))
|
Python
| 0.000002
|
@@ -5419,16 +5419,135 @@
imit)):%0A
+ # NOTE(andreaf) The cirros disk image is blank before boot%0A # so we can only inject safely to /%0A
@@ -5559,20 +5559,16 @@
ath = '/
-etc/
test' +
|
36063d227f7cd3ededdc99b23b0c7911f2233df2
|
Add available params in metering labels client's comment
|
tempest/lib/services/network/metering_labels_client.py
|
tempest/lib/services/network/metering_labels_client.py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.lib.services.network import base
class MeteringLabelsClient(base.BaseNetworkClient):
def create_metering_label(self, **kwargs):
uri = '/metering/metering-labels'
post_data = {'metering_label': kwargs}
return self.create_resource(uri, post_data)
def show_metering_label(self, metering_label_id, **fields):
uri = '/metering/metering-labels/%s' % metering_label_id
return self.show_resource(uri, **fields)
def delete_metering_label(self, metering_label_id):
uri = '/metering/metering-labels/%s' % metering_label_id
return self.delete_resource(uri)
def list_metering_labels(self, **filters):
uri = '/metering/metering-labels'
return self.list_resources(uri, **filters)
|
Python
| 0.000016
|
@@ -726,530 +726,1440 @@
-uri = '/metering/metering-labels'%0A post_data = %7B'metering_label': kwargs%7D%0A return self.create_resource(uri, post_data)%0A%0A def show_metering_label(self, metering_label_id, **fields):%0A uri = '/metering/metering-labels/%25s' %25 metering_label_id%0A return self.show_resource(uri, **fields)%0A%0A def delete_metering_label(self, metering_label_id):%0A uri = '/metering/metering-labels/%25s' %25 metering_label_id%0A return self.delete_resource(uri)%0A%0A def list_metering_labels(self, **filters):
+%22%22%22Creates an L3 metering label.%0A%0A Available params: see http://developer.openstack.org/%0A api-ref-networking-v2-ext.html#%0A createMeteringLabel%0A %22%22%22%0A uri = '/metering/metering-labels'%0A post_data = %7B'metering_label': kwargs%7D%0A return self.create_resource(uri, post_data)%0A%0A def show_metering_label(self, metering_label_id, **fields):%0A %22%22%22Shows details for a metering label.%0A%0A Available params: see http://developer.openstack.org/%0A api-ref-networking-v2-ext.html#showMeteringLabel%0A %22%22%22%0A uri = '/metering/metering-labels/%25s' %25 metering_label_id%0A return self.show_resource(uri, **fields)%0A%0A def delete_metering_label(self, metering_label_id):%0A %22%22%22Deletes an L3 metering label.%0A%0A Available params: see http://developer.openstack.org/%0A api-ref-networking-v2-ext.html#%0A deleteMeteringLabel%0A %22%22%22%0A uri = '/metering/metering-labels/%25s' %25 metering_label_id%0A return self.delete_resource(uri)%0A%0A def list_metering_labels(self, **filters):%0A %22%22%22Lists all L3 metering labels that belong to the tenant.%0A%0A Available params: see http://developer.openstack.org/%0A api-ref-networking-v2-ext.html#%0A listMeteringLabels%0A %22%22%22
%0A
|
1e5644acc64f99e8b4bb88a0428cf75d09143f3c
|
Fix experiment jobs log handling
|
polyaxon/events_handlers/tasks.py
|
polyaxon/events_handlers/tasks.py
|
import logging
from django.conf import settings
from django.db import IntegrityError
from db.models.build_jobs import BuildJob
from db.models.experiment_jobs import ExperimentJob
from db.models.experiments import Experiment
from db.models.jobs import Job
from db.models.nodes import ClusterEvent
from db.models.notebooks import NotebookJob
from db.models.tensorboards import TensorboardJob
from events_handlers.utils import safe_log_experiment_job, safe_log_job
from polyaxon.celery_api import app as celery_app
from polyaxon.settings import EventsCeleryTasks
_logger = logging.getLogger(__name__)
@celery_app.task(name=EventsCeleryTasks.EVENTS_HANDLE_NAMESPACE)
def handle_events_namespace(cluster_id, payload):
_logger.debug('handling events namespace for cluster: %s', cluster_id)
ClusterEvent.objects.create(cluster_id=cluster_id, **payload)
@celery_app.task(name=EventsCeleryTasks.EVENTS_HANDLE_RESOURCES)
def handle_events_resources(payload, persist):
# here we must persist resources if requested
_logger.info('handling events resources with persist:%s', persist)
_logger.info(payload)
@celery_app.task(name=EventsCeleryTasks.EVENTS_HANDLE_EXPERIMENT_JOB_STATUSES)
def events_handle_experiment_job_statuses(payload):
"""Experiment jobs statuses"""
details = payload['details']
job_uuid = details['labels']['job_uuid']
_logger.debug('handling events status for job_uuid: %s', job_uuid)
try:
job = ExperimentJob.objects.get(uuid=job_uuid)
except ExperimentJob.DoesNotExist:
_logger.info('Job uuid`%s` does not exist', job_uuid)
return
# Set the new status
try:
job.set_status(status=payload['status'], message=payload['message'], details=details)
except IntegrityError:
# Due to concurrency this could happen, we just ignore it
pass
@celery_app.task(name=EventsCeleryTasks.EVENTS_HANDLE_JOB_STATUSES)
def events_handle_job_statuses(payload):
"""Project jobs statuses"""
details = payload['details']
job_uuid = details['labels']['job_uuid']
job_name = details['labels']['job_name']
_logger.debug('handling events status for job %s', job_name)
try:
job = Job.objects.get(uuid=job_uuid)
except Job.DoesNotExist:
_logger.info('Job `%s` does not exist', job_name)
return
# Set the new status
try:
job.set_status(status=payload['status'], message=payload['message'], details=details)
except IntegrityError:
# Due to concurrency this could happen, we just ignore it
pass
@celery_app.task(name=EventsCeleryTasks.EVENTS_HANDLE_PLUGIN_JOB_STATUSES)
def events_handle_plugin_job_statuses(payload):
"""Project Plugin jobs statuses"""
details = payload['details']
app = details['labels']['app']
job_uuid = details['labels']['job_uuid']
job_name = details['labels']['job_name']
_logger.debug('handling events status for job %s %s', job_name, app)
try:
if app == settings.APP_LABELS_TENSORBOARD:
job = TensorboardJob.objects.get(uuid=job_uuid)
elif app == settings.APP_LABELS_NOTEBOOK:
job = NotebookJob.objects.get(uuid=job_uuid)
else:
_logger.info('Plugin job `%s` does not exist', app)
return
except (NotebookJob.DoesNotExist, TensorboardJob.DoesNotExist):
_logger.info('`%s - %s` does not exist', app, job_name)
return
# Set the new status
try:
job.set_status(status=payload['status'], message=payload['message'], details=details)
except IntegrityError:
# Due to concurrency this could happen, we just ignore it
pass
@celery_app.task(name=EventsCeleryTasks.EVENTS_HANDLE_BUILD_JOB_STATUSES)
def events_handle_build_job_statuses(payload):
"""Project Plugin jobs statuses"""
details = payload['details']
app = details['labels']['app']
job_uuid = details['labels']['job_uuid']
job_name = details['labels']['job_name']
_logger.debug('handling events status for build jon %s %s', job_name, app)
try:
build_job = BuildJob.objects.get(uuid=job_uuid)
except BuildJob.DoesNotExist:
_logger.info('Build job `%s` does not exist', job_name)
return
# Set the new status
try:
build_job.set_status(status=payload['status'], message=payload['message'], details=details)
except IntegrityError:
# Due to concurrency this could happen, we just ignore it
pass
@celery_app.task(name=EventsCeleryTasks.EVENTS_HANDLE_LOGS_EXPERIMENT_JOB)
def events_handle_logs_experiment_job(experiment_name,
experiment_uuid,
job_uuid,
log_lines,
task_type=None,
task_idx=None):
if not Experiment.objects.filter(uuid=experiment_uuid).exists():
return
_logger.debug('handling log event for %s %s', experiment_uuid, job_uuid)
if task_type and task_idx:
log_lines = '{}.{} -- {}'.format(task_type, int(task_idx) + 1, log_lines)
safe_log_experiment_job(experiment_name=experiment_name, log_lines=log_lines)
@celery_app.task(name=EventsCeleryTasks.EVENTS_HANDLE_LOGS_JOB)
def events_handle_logs_job(job_uuid, job_name, log_lines):
if not Job.objects.filter(uuid=job_uuid).exists():
return
_logger.debug('handling log event for %s', job_name)
safe_log_job(job_name=job_name, log_lines=log_lines)
@celery_app.task(name=EventsCeleryTasks.EVENTS_HANDLE_LOGS_BUILD_JOB)
def events_handle_logs_build_job(job_uuid, job_name, log_lines):
if not BuildJob.objects.filter(uuid=job_uuid).exists():
return
_logger.debug('handling log event for %s', job_name)
safe_log_job(job_name=job_name, log_lines=log_lines)
|
Python
| 0.000003
|
@@ -5095,16 +5095,17 @@
lines =
+%5B
'%7B%7D.%7B%7D -
@@ -5155,18 +5155,65 @@
log_line
-s
)
+%0A for log_line in log_lines%5D
%0A%0A sa
|
3d6c293e60dd8cb69331b8c6337e12e4919a7a55
|
Fix clang-tidy in vara builder
|
polyjit/buildbot/builders/vara.py
|
polyjit/buildbot/builders/vara.py
|
import sys
from collections import OrderedDict
from polyjit.buildbot.builders import register
from polyjit.buildbot import slaves
from polyjit.buildbot.utils import (builder, define, git, ucmd, ucompile, cmd,
upload_file, ip, s_sbranch, s_abranch,
s_nightly, s_force, s_trigger,
hash_upload_to_master)
from polyjit.buildbot.repos import make_cb, make_new_cb, make_git_cb, make_force_cb, codebases
from polyjit.buildbot.master import URL
from buildbot.plugins import util
from buildbot.changes import filter
################################################################################
project_name = 'vara'
trigger_branches = 'vara-dev|vara-llvm-50-dev|vara-clang-50-dev'
uchroot_src_root = '/mnt/vara-llvm'
checkout_base_dir = '%(prop:builddir)s/vara-llvm'
repos = OrderedDict()
repos['vara-llvm'] = {
'default_branch': 'vara-llvm-50-dev',
'checkout_dir': checkout_base_dir,
}
repos['vara-clang'] = {
'default_branch': 'vara-clang-50-dev',
'checkout_dir': checkout_base_dir + '/tools/clang',
}
repos['vara'] = {
'default_branch': 'vara-dev',
'checkout_dir': checkout_base_dir + '/tools/VaRA',
}
repos['compiler-rt'] = {
'default_branch': 'release_50',
'checkout_dir': checkout_base_dir + '/projects/compiler-rt',
}
repos['clang-tools-extra'] = {
'default_branch': 'release_50',
'checkout_dir': checkout_base_dir + '/tools/clang/tools/extra',
}
################################################################################
codebase = make_git_cb(repos)
force_codebase = make_force_cb(repos)
P = util.Property
def can_build_llvm_debug(host):
if 'can_build_llvm_debug' in host['properties']:
return host['properties']['can_build_llvm_debug']
return False
accepted_builders = slaves.get_hostlist(slaves.infosun, predicate=can_build_llvm_debug)
# yapf: disable
def configure(c):
steps = []
for repo in repos:
steps.append(define(str(repo).upper() +'_ROOT', ip(repos[repo]['checkout_dir'])))
for repo in repos:
steps.append(git(repo, repos[repo]['default_branch'], codebases, workdir=P(str(repo).upper()+'_ROOT')))
steps += [
define('UCHROOT_SRC_ROOT', uchroot_src_root),
ucmd('cmake', P('UCHROOT_SRC_ROOT'),
'-DCMAKE_BUILD_TYPE=Debug',
'-DCMAKE_C_FLAGS=-g -fno-omit-frame-pointer',
'-DCMAKE_CXX_FLAGS=-g -fno-omit-frame-pointer',
'-DBUILD_SHARED_LIBS=On',
'-DLLVM_TARGETS_TO_BUILD=X86',
'-DLLVM_BINUTILS_INCDIR=/usr/include',
'-DLLVM_ENABLE_PIC=On',
'-DLLVM_ENABLE_ASSERTIONS=On',
'-DLLVM_ENABLE_TERMINFO=Off',
'-G', 'Ninja',
env={
'PATH': '/opt/cmake/bin:/usr/local/bin:/usr/bin:/bin'
},
name='cmake',
description='cmake O3, Assertions, PIC, Shared'),
ucompile('ninja', haltOnFailure=True, name='build VaRA'),
ucompile('ninja', 'check-vara', haltOnFailure=False, name='run VaRA regression tests'),
ucmd('python3', 'tidy-vara.py', haltOnFailure=False, workdir='vara-llvm/tools/VaRA/test/', name='run Clang-Tidy'),
]
c['builders'].append(builder('build-' + project_name, None, accepted_builders,
tags=['vara'], factory=util.BuildFactory(steps)))
def schedule(c):
c['schedulers'].extend([
s_abranch('build-' + project_name + '-sched', codebase, ['build-' + project_name],
change_filter=filter.ChangeFilter(branch_re=trigger_branches),
treeStableTimer=5 * 60),
s_force('force-build-' + project_name, force_codebase, ['build-' + project_name]),
s_trigger('trigger-build-' + project_name, codebase, ['build-' + project_name]),
s_nightly('nightly-sched-build-' + project_name, codebase,
['build-vara'],
hour=22, minute=0)
])
# yapf: enable
register(sys.modules[__name__])
|
Python
| 0.000002
|
@@ -3153,16 +3153,46 @@
ucmd(
+'PATH=/mnt/build/bin/:$PATH',
'python3
|
93870690f17a4baddeb33549a1f6c67eeee1abe0
|
Increase cache tile duration from 6 hours to 1 week
|
src/adhocracy/lib/tiles/util.py
|
src/adhocracy/lib/tiles/util.py
|
import logging
from time import time
from pylons import tmpl_context as c
from adhocracy import config
from adhocracy.lib.cache import memoize
log = logging.getLogger(__name__)
class BaseTile(object):
'''
Base class for tiles
'''
def render_tile(template_name, def_name, tile, cached=False, **kwargs):
from adhocracy.lib import templating
begin_time = time()
def render():
return templating.render_def(template_name, def_name,
tile=tile, **kwargs)
rendered = ""
if cached and config.get_bool('adhocracy.cache_tiles'):
@memoize('tile_cache' + template_name + def_name, 86400 / 4)
def _cached(**kwargs):
return render()
rendered = _cached(locale=c.locale, **kwargs)
else:
rendered = render()
if False:
log.debug("Rendering tile %s:%s took %sms" % (
template_name, def_name, (time() - begin_time) * 1000))
return rendered
|
Python
| 0
|
@@ -657,18 +657,18 @@
ame,
+ 7 *
86400
- / 4
)%0A
|
d5691c8031a32e0cfadc74e9fffad8a9e04bc63c
|
enable search navbar entry in production
|
portal/base/context_processors.py
|
portal/base/context_processors.py
|
from django.conf import settings
def search_disabled(request):
"""Facility for disabling search functionality.
This may be used in the future to automatically disable search if the search
backend goes down.
"""
return dict(SEARCH_DISABLED=not settings.DEBUG)
|
Python
| 0
|
@@ -222,20 +222,60 @@
%22%22%22%0A
+ return dict(SEARCH_DISABLED=False)
%0A
+ #
return
|
87b9910a30cb915f5b99a17b0b49570b0027e665
|
load help module
|
gbot.py
|
gbot.py
|
#!/usr/bin/env python
# =============================================================================
# file = gbot.py
# description = IRC bot
# author = GR <https://github.com/shortdudey123>
# create_date = 2014-07-09
# mod_date = 2014-07-09
# version = 0.1
# usage = called as a class
# notes =
# python_ver = 2.7.6
# =============================================================================
import src.bot as bot
__IDENTIFY__ = ''
if __name__ == "__main__":
gbot = bot.IRCBot(server="chat.freenode.com", nick="grbot", port=6667, realName='gbot', identify=__IDENTIFY__, debug=True, connectDelay=4, identVerifyCall='ACC')
gbot.setDefaultChannels({'##gbot': ''})
gbot.addAdmin("shortdudey123")
gbot.loadModules(['opme', 'coreVersion', 'moduleInfo'])
gbot.run()
|
Python
| 0.000001
|
@@ -763,16 +763,24 @@
uleInfo'
+, 'help'
%5D)%0A g
|
9f5afd72bf6dbb44ba764f6731c6313f0cb94bce
|
Use default outputs in shortcuts/utils.py
|
prompt_toolkit/shortcuts/utils.py
|
prompt_toolkit/shortcuts/utils.py
|
from __future__ import unicode_literals
from prompt_toolkit.output.defaults import create_output
from prompt_toolkit.renderer import print_formatted_text as renderer_print_formatted_text
from prompt_toolkit.styles import default_style, BaseStyle
import six
__all__ = (
'print_formatted_text',
'clear',
'set_title',
'clear_title',
)
def print_formatted_text(formatted_text, style=None, true_color=False, file=None):
"""
Print a list of (style_str, text) tuples in the given style to the output.
E.g.::
style = Style.from_dict({
'hello': '#ff0066',
'world': '#884444 italic',
})
fragments = [
('class:hello', 'Hello'),
('class:world', 'World'),
]
print_formatted_text(fragments, style=style)
If you want to print a list of Pygments tokens, use
``prompt_toolkit.style.token_list_to_formatted_text`` to do the conversion.
:param text_fragments: List of ``(style_str, text)`` tuples.
:param style: :class:`.Style` instance for the color scheme.
:param true_color: When True, use 24bit colors instead of 256 colors.
:param file: The output file. This can be `sys.stdout` or `sys.stderr`.
"""
if style is None:
style = default_style()
assert isinstance(style, BaseStyle)
output = create_output(true_color=true_color, stdout=file)
renderer_print_formatted_text(output, formatted_text, style)
def clear():
"""
Clear the screen.
"""
out = create_output()
out.erase_screen()
out.cursor_goto(0, 0)
out.flush()
def set_title(text):
"""
Set the terminal title.
"""
assert isinstance(text, six.text_type)
output = create_output()
output.set_title(text)
def clear_title():
"""
Erase the current title.
"""
set_title('')
|
Python
| 0.000001
|
@@ -76,22 +76,27 @@
import
-create
+get_default
_output%0A
@@ -406,30 +406,14 @@
ne,
-true_color=False, file
+output
=Non
@@ -1064,158 +1064,8 @@
me.%0A
- :param true_color: When True, use 24bit colors instead of 256 colors.%0A :param file: The output file. This can be %60sys.stdout%60 or %60sys.stderr%60.%0A
@@ -1180,56 +1180,37 @@
t =
-create_output(true_color=true_color, stdout=file
+output or get_default_output(
)%0A
@@ -1331,30 +1331,35 @@
%22%0A out =
-create
+get_default
_output()%0A
@@ -1549,14 +1549,19 @@
t =
-create
+get_default
_out
|
2c350cbbd90afaab38223fdfe40737f72bf7974a
|
Set --device-type as required arg for harvest_tracking_email command.
|
tracking/management/commands/harvest_tracking_email.py
|
tracking/management/commands/harvest_tracking_email.py
|
from django.core.management.base import BaseCommand
from tracking.harvest import harvest_tracking_email
class Command(BaseCommand):
help = "Runs harvest_tracking_email to harvest points from emails"
def add_arguments(self, parser):
parser.add_argument(
'--device-type', action='store', dest='device_type', default=None,
help='Tracking device type, one of: iriditrak, dplus, spot, mp70')
def handle(self, *args, **options):
# Specify the device type to harvest from the mailbox.
device_type = None
if options['device_type'] and options['device_type'] in ('iriditrak', 'dplus', 'spot', 'mp70'):
device_type = options['device_type']
harvest_tracking_email(device_type)
|
Python
| 0
|
@@ -329,16 +329,31 @@
e_type',
+ required=True,
default
|
4b148d5b6fda0a8b44109e2024f61df30b981938
|
Add docstrings.
|
HARK/datasets/cpi/us/CPITools.py
|
HARK/datasets/cpi/us/CPITools.py
|
# -*- coding: utf-8 -*-
"""
Created on Wed Jan 20 18:07:41 2021
@author: Mateo
"""
import urllib.request
import pandas as pd
import warnings
import numpy as np
def download_cpi_series():
urllib.request.urlretrieve("https://www.bls.gov/cpi/research-series/r-cpi-u-rs-allitems.xlsx",
"r-cpi-u-rs-allitems.xlsx")
def get_cpi_series():
cpi = pd.read_excel("r-cpi-u-rs-allitems.xlsx", skiprows = 5,
usecols = "A:N", index_col=0)
return cpi
def cpi_deflator(from_year, to_year, base_month = None):
# Check month is conforming
if base_month is not None:
months = ['JAN','FEB','MAR','APR','MAY','JUNE',
'JULY','AUG','SEP','OCT','NOV','DEC']
assert base_month in months, ('If a month is provided, it must be ' +
'one of ' + ','.join(months) + '.')
column = base_month
else:
warnings.warn('No base month was provided. Using annual CPI averages.')
column = 'AVG'
# Get cpi and subset the columns we need.
cpi = get_cpi_series()
cpi_series = cpi[[column]].dropna()
try:
deflator = np.divide(cpi_series.loc[from_year].to_numpy(),
cpi_series.loc[to_year].to_numpy())
except KeyError as e:
message = ("Could not find a CPI value for the requested " +
"year-month combinations.")
raise Exception(message).with_traceback(e.__traceback__)
return deflator
#cpi_deflator(1989,2007, 'OCT')
#cpi_deflator(1980,2010)
|
Python
| 0
|
@@ -190,396 +190,2017 @@
-%0A urllib.request.urlretrieve(%22https://www.bls.gov/cpi/research-series/r-cpi-u-rs-allitems.xlsx%22,%0A %22r-cpi-u-rs-allitems.xlsx%22)%0A%0Adef get_cpi_series():%0A %0A cpi = pd.read_excel(%22r-cpi-u-rs-allitems.xlsx%22, skiprows = 5,%0A usecols = %22A:N%22, index_col=0)%0A %0A return cpi%0A %0Adef cpi_deflator(from_year, to_year, base_month = None):
+%22%22%22%0A A method that downloads the cpi research series file directly from the%0A bls site onto the working directory. This is the file that the rest of%0A the functions in this script use and is placed in HARK/datasets/cpi/us.%0A This function is not for users but for whenever mantainers want to update%0A the cpi series as new data comes out.%0A%0A Returns%0A -------%0A None.%0A%0A %22%22%22%0A urllib.request.urlretrieve(%22https://www.bls.gov/cpi/research-series/r-cpi-u-rs-allitems.xlsx%22,%0A %22r-cpi-u-rs-allitems.xlsx%22)%0A%0Adef get_cpi_series():%0A %22%22%22%0A This function reads the cpi series currently in the toolbox and returns it%0A as a pandas dataframe.%0A%0A Returns%0A -------%0A cpi : Pandas DataFrame%0A DataFrame representation of the CPI research series file from the%0A Bureau of Labor Statistics.%0A%0A %22%22%22%0A cpi = pd.read_excel(%22r-cpi-u-rs-allitems.xlsx%22, skiprows = 5,%0A usecols = %22A:N%22, index_col=0)%0A %0A return cpi%0A %0Adef cpi_deflator(from_year, to_year, base_month = None):%0A %22%22%22%0A Finds cpi deflator to transform quantities measured in %22from_year%22 U.S.%0A dollars to %22to_year%22 U.S. dollars.%0A The deflators are computed using the %22r-cpi-u-rs%22 series from the BLS.%0A%0A Parameters%0A ----------%0A from_year : int%0A Base year in which the nominal quantities are currently expressed.%0A to_year : int%0A Target year in which you wish to express the quantities.%0A base_month : str, optional%0A Month at which to take the CPI measurements to calculate the deflator.%0A The default is None, and in this case annual averages of the CPI are%0A used.%0A%0A Returns%0A -------%0A deflator : numpy array%0A A length-1 numpy array with the deflator that, when multiplied by the%0A original nominal quantities, rebases them to %22to_year%22 U.S. dollars.%0A%0A %22%22%22%0A %0A # Check years are conforming%0A assert type(from_year) is int and type(to_year) is int, %22Years must be integers.%22
%0A
@@ -3264,62 +3264,4 @@
ator
-%0A%0A#cpi_deflator(1989,2007, 'OCT')%0A#cpi_deflator(1980,2010)
|
4d247da1ecd39bcd699a55b5387412a1ac9e1582
|
Split Energy and Environment, change Civil Liberties to Social Justice
|
txlege84/topics/management/commands/bootstraptopics.py
|
txlege84/topics/management/commands/bootstraptopics.py
|
from django.core.management.base import BaseCommand
from topics.models import Topic
class Command(BaseCommand):
help = u'Bootstrap the topic lists in the database.'
def handle(self, *args, **kwargs):
self.load_topics()
def load_topics(self):
self.stdout.write(u'Loading hot list topics...')
topics = [
u'Budget & Taxes',
u'Business & Technology',
u'Civil Liberties',
u'Criminal Justice',
u'Energy & Environment',
u'Ethics',
u'Health & Human Services',
u'Higher Education',
u'Immigration & Border Security',
u'Public Education',
u'Transportation',
]
for topic in topics:
Topic.objects.get_or_create(name=topic)
|
Python
| 0
|
@@ -410,40 +410,8 @@
y',%0A
- u'Civil Liberties',%0A
@@ -459,19 +459,33 @@
u'Energy
- &
+',%0A u'
Environm
@@ -657,32 +657,63 @@
lic Education',%0A
+ u'Social Justice',%0A
u'Tr
|
2bf756404700f4c38e2f3895dfa8aba2d8dc13be
|
Refactor and remove char2int
|
hash.py
|
hash.py
|
class HashTable(object):
"""docstring for HashTable"""
table_size = 0
entries_count = 0
alphabet_size = 52
def __init__(self, size=1024):
self.table_size = size
self.hashtable = [[] for i in range(size)]
def __repr__(self):
return "<HashTable: {}>".format(self.hashtable)
def __len__(self):
count = 0
for item in self.hashtable:
if len(item) != 0:
count += 1
return count
def char2int(self, char):
"""Convert a alpha character to an int."""
# offset for ASCII table
# if char >= 'A' and char <= 'Z':
# return ord(char) - 65
# elif char >= 'a' and char <= 'z':
# return ord(char) - 65 - 7
return ord(char)
def hashing(self, key):
"""pass"""
hash_ = 0
for i, c in enumerate(key):
hash_ += pow(
self.alphabet_size, len(key) - i - 1) * self.char2int(c)
return hash_ % self.table_size
def set(self, key, value):
if not isinstance(key, str):
raise TypeError('Only strings may be used as keys.')
hash_ = self.hashing(key)
for i, item in enumerate(self.hashtable[hash_]):
if item[0] == key:
del self.hashtable[hash_][i]
self.entries_count -= 1
self.hashtable[hash_].append((key, value))
self.entries_count += 1
def get(self, key):
hash_ = self.hashing(key)
for i, item in enumerate(self.hashtable[hash_]):
if item[0] == key:
return self.hashtable[hash_]
raise KeyError('Key not in hash table.')
if __name__ == '__main__':
pass
|
Python
| 0.000002
|
@@ -479,310 +479,8 @@
nt%0A%0A
- def char2int(self, char):%0A %22%22%22Convert a alpha character to an int.%22%22%22%0A # offset for ASCII table%0A # if char %3E= 'A' and char %3C= 'Z':%0A # return ord(char) - 65%0A # elif char %3E= 'a' and char %3C= 'z':%0A # return ord(char) - 65 - 7%0A return ord(char)%0A%0A
@@ -495,32 +495,32 @@
ing(self, key):%0A
+
%22%22%22pass%22
@@ -662,21 +662,11 @@
) *
-self.char2int
+ord
(c)%0A
|
3af8cfa40a6770e6940ec7140c92ad51532a4e73
|
improve re usage
|
IPython/core/magics/packaging.py
|
IPython/core/magics/packaging.py
|
"""Implementation of packaging-related magic functions.
"""
#-----------------------------------------------------------------------------
# Copyright (c) 2018 The IPython Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
import re
import shlex
import sys
from pathlib import Path
from IPython.core.magic import Magics, magics_class, line_magic
def _is_conda_environment():
"""Return True if the current Python executable is in a conda env"""
# TODO: does this need to change on windows?
return Path(sys.prefix, "conda-meta", "history").exists()
def _get_conda_executable():
"""Find the path to the conda executable"""
# Check if there is a conda executable in the same directory as the Python executable.
# This is the case within conda's root environment.
conda = Path(sys.executable).parent / "conda"
if conda.isfile():
return str(conda)
# Otherwise, attempt to extract the executable from conda history.
# This applies in any conda environment.
R = re.compile(r"^#\s*cmd:\s*(?P<command>.*conda)\s[create|install]")
with open(Path(sys.prefix, "conda-meta", "history")) as f:
for line in f:
match = R.match(line)
if match:
return match.groupdict()['command']
# Fallback: assume conda is available on the system path.
return "conda"
CONDA_COMMANDS_REQUIRING_PREFIX = {
'install', 'list', 'remove', 'uninstall', 'update', 'upgrade',
}
CONDA_COMMANDS_REQUIRING_YES = {
'install', 'remove', 'uninstall', 'update', 'upgrade',
}
CONDA_ENV_FLAGS = {'-p', '--prefix', '-n', '--name'}
CONDA_YES_FLAGS = {'-y', '--y'}
@magics_class
class PackagingMagics(Magics):
"""Magics related to packaging & installation"""
@line_magic
def pip(self, line):
"""Run the pip package manager within the current kernel.
Usage:
%pip install [pkgs]
"""
self.shell.system(' '.join([sys.executable, '-m', 'pip', line]))
print("Note: you may need to restart the kernel to use updated packages.")
@line_magic
def conda(self, line):
"""Run the conda package manager within the current kernel.
Usage:
%conda install [pkgs]
"""
if not _is_conda_environment():
raise ValueError("The python kernel does not appear to be a conda environment. "
"Please use ``%pip install`` instead.")
conda = _get_conda_executable()
args = shlex.split(line)
command = args[0]
args = args[1:]
extra_args = []
# When the subprocess does not allow us to respond "yes" during the installation,
# we need to insert --yes in the argument list for some commands
stdin_disabled = getattr(self.shell, 'kernel', None) is not None
needs_yes = command in CONDA_COMMANDS_REQUIRING_YES
has_yes = set(args).intersection(CONDA_YES_FLAGS)
if stdin_disabled and needs_yes and not has_yes:
extra_args.append("--yes")
# Add --prefix to point conda installation to the current environment
needs_prefix = command in CONDA_COMMANDS_REQUIRING_PREFIX
has_prefix = set(args).intersection(CONDA_ENV_FLAGS)
if needs_prefix and not has_prefix:
extra_args.extend(["--prefix", sys.prefix])
self.shell.system(' '.join([conda, command] + extra_args + args))
print("\nNote: you may need to restart the kernel to use updated packages.")
|
Python
| 0
|
@@ -1197,23 +1197,103 @@
-R = re.compile(
+history = Path(sys.prefix, %22conda-meta%22, %22history%22).read_text()%0A match = re.search(%0A
r%22%5E#
@@ -1345,138 +1345,61 @@
ll%5D%22
-)
+,
%0A
-with open(Path(sys.prefix, %22conda-meta%22, %22history%22)) as f:%0A for line in f:%0A match = R.match(line)%0A
+ history,%0A flags=re.MULTILINE,%0A )%0A
@@ -1408,24 +1408,16 @@
match:%0A
-
@@ -1445,17 +1445,17 @@
t()%5B
-'
+%22
command
-'
+%22
%5D%0A
|
4d71ffaccd7ac2911c17d61c7632115ad09aaedb
|
Add tests for CMark.should_test
|
utils/swift_build_support/tests/products/test_cmark.py
|
utils/swift_build_support/tests/products/test_cmark.py
|
# tests/products/test_ninja.py ----------------------------------*- python -*-
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2021 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
# ----------------------------------------------------------------------------
import argparse
import os
import shutil
import sys
import tempfile
import unittest
try:
# py2
from StringIO import StringIO
except ImportError:
# py3
from io import StringIO
# from swift_build_support import cmake
from swift_build_support import shell
# from swift_build_support.products import CMark
from swift_build_support.targets import StdlibDeploymentTarget
from swift_build_support.toolchain import host_toolchain
from swift_build_support.workspace import Workspace
class CMarkTestCase(unittest.TestCase):
def setUp(self):
# Setup workspace
tmpdir1 = os.path.realpath(tempfile.mkdtemp())
tmpdir2 = os.path.realpath(tempfile.mkdtemp())
os.makedirs(os.path.join(tmpdir1, 'cmark'))
self.workspace = Workspace(source_root=tmpdir1,
build_root=tmpdir2)
self.host = StdlibDeploymentTarget.host_target()
# Setup toolchain
self.toolchain = host_toolchain()
self.toolchain.cc = '/path/to/cc'
self.toolchain.cxx = '/path/to/cxx'
# Setup args
self.args = argparse.Namespace(
build_cmark=True,
cmake_generator="Ninja",
cmark_build_type="Release",
rebuild=False,
extra_cmake_options=[],
skip_build=False,
darwin_deployment_version_osx="10.9",
cmark_build_variant="Debug",
export_compile_commands=False,
reconfigure=False,
distcc=None,
sccache=None,
cmake_c_launcher=None,
cmake_cxx_launcher=None,
clang_user_visible_version=None,
build_ninja=False,
enable_asan=False,
enable_lsan=False,
enable_sanitize_coverage=False,
enable_tsan=False,
enable_ubsan=False)
# Setup shell
shell.dry_run = True
self._orig_stdout = sys.stdout
self._orig_stderr = sys.stderr
self.stdout = StringIO()
self.stderr = StringIO()
sys.stdout = self.stdout
sys.stderr = self.stderr
def tearDown(self):
shutil.rmtree(self.workspace.build_root)
shutil.rmtree(self.workspace.source_root)
sys.stdout = self._orig_stdout
sys.stderr = self._orig_stderr
shell.dry_run = False
self.workspace = None
self.toolchain = None
self.args = None
def test_build(self):
# Test disabled until we've moved to cmake toolchains
True
# cmark = CMark(
# args=self.args,
# toolchain=self.toolchain,
# source_dir=self.workspace.source_root,
# build_dir=self.workspace.build_root)
# cmark.build(host_target=self.host.name)
# _cmake = cmake.CMake(self.args, self.toolchain)
# self.assertEqual(self.stdout.getvalue(), """\
# + pushd {build_dir}
# + {cmake} -DCMAKE_BUILD_TYPE:STRING={build_variant} {cmake_args} {source_dir}
# + popd
# + {cmake} --build {build_dir} --config {build_variant} -- all
# """.format(build_dir=self.workspace.build_root,
# source_dir=self.workspace.source_root,
# cmake=self.toolchain.cmake,
# cmake_args=' '.join(_cmake.common_options()),
# build_variant=self.args.cmark_build_variant))
|
Python
| 0.000001
|
@@ -760,18 +760,16 @@
t shell%0A
-#
from swi
@@ -3833,8 +3833,1081 @@
riant))%0A
+%0A def test_should_test(self):%0A cmark = CMark(%0A args=argparse.Namespace(test_cmark=True, cross_compile_hosts=%5B%5D),%0A toolchain=self.toolchain,%0A source_dir=self.workspace.source_root,%0A build_dir=self.workspace.build_root)%0A%0A self.assertTrue(cmark.should_test(self.host.name))%0A%0A def test_should_skip_test(self):%0A cmark = CMark(%0A args=argparse.Namespace(test_cmark=False, cross_compile_hosts=%5B%5D),%0A toolchain=self.toolchain,%0A source_dir=self.workspace.source_root,%0A build_dir=self.workspace.build_root)%0A%0A self.assertFalse(cmark.should_test(self.host.name))%0A%0A def test_should_skip_test_cross_compile(self):%0A cmark = CMark(%0A args=argparse.Namespace(test_cmark=True,%0A cross_compile_hosts=%5Bself.host.name%5D),%0A toolchain=self.toolchain,%0A source_dir=self.workspace.source_root,%0A build_dir=self.workspace.build_root)%0A%0A self.assertFalse(cmark.should_test(self.host.name))%0A
|
3039149ca20e9c472340495e4130e331d9c546b3
|
Fix nums assignment properly
|
calc.py
|
calc.py
|
import sys
def add_all(nums):
return sum(nums)
def multiply_all(nums):
return reduce(lambda a, b: a*b, nums)
if __name__ == '__main__':
command =sys.argv[1]
nums=map(float(sys.argv[2:]))
if command=='add':
print add_all(nums)
if command=='multiply':
print multiply_all(nums)
|
Python
| 0.000001
|
@@ -171,17 +171,18 @@
ap(float
-(
+,
sys.argv
@@ -186,17 +186,16 @@
rgv%5B2:%5D)
-)
%0A%09if com
|
fd8caec8567178abe09abc810f1e96bfc4bb531b
|
Fix bug in 'multiply' support
|
calc.py
|
calc.py
|
import sys
def add_all(nums):
return sum(nums)
def multiply_all(nums):
return reduce(lambda a, b: a * b, nums)
if __name__== '__main__':
command = sys.argv[1]
nums = map(float, sys.argv[2:])
if command == 'add':
print(add_all(nums))
elif command == 'multiply':
print(multiply_all(sums))
|
Python
| 0.000001
|
@@ -286,15 +286,15 @@
ply_all(
-s
+n
ums))%0A
|
c3ba924c5fe3fef3f2dd43a9f43eacdebb8d8c13
|
Make the new init script executable
|
init.py
|
init.py
|
#!/usr/bin/env python3
"""Wurstminebot init script.
Usage:
init.py start | stop | restart | status
init.py -h | --help
init.py --version
Options:
-h, --help Print this message and exit.
--version Print version info and exit.
"""
from docopt import docopt
import os
import os.path
import signal
import subprocess
import sys
KEEPALIVE = '/var/local/wurstmineberg/wurstminebot_keepalive'
def _fork(func):
#FROM http://stackoverflow.com/a/6011298/667338
# do the UNIX double-fork magic, see Stevens' "Advanced Programming in the UNIX Environment" for details (ISBN 0201563177)
try:
pid = os.fork()
if pid > 0:
# parent process, return and keep running
return
except OSError as e:
print("fork #1 failed: %d (%s)" % (e.errno, e.strerror), file=sys.stderr)
sys.exit(1)
os.setsid()
# do second fork
try:
pid = os.fork()
if pid > 0:
# exit from second parent
sys.exit(0)
except OSError as e:
print("fork #2 failed: %d (%s)" % (e.errno, e.strerror), file=sys.stderr)
sys.exit(1)
with open(os.path.devnull) as devnull:
sys.stdin = devnull
sys.stdout = devnull
func() # do stuff
os._exit(os.EX_OK) # all done
def start():
def _start():
with open(KEEPALIVE, 'a'):
pass # create the keepalive file
while os.path.exists(KEEPALIVE):
with open(os.path.devnull) as devnull:
p = subprocess.Popen('wurstminebot < /var/local/wurstmineberg/irc', shell=True, stdout=devnull)
with open(KEEPALIVE, 'a') as keepalive:
print(str(p.pid), file=keepalive)
p.communicate()
_fork(_start)
def status():
return os.path.exists(KEEPALIVE)
def stop():
pid = None
try:
with open(KEEPALIVE) as keepalive:
for line in keepalive:
pid = int(line.strip())
except FileNotFoundError:
return # not running
else:
os.remove(KEEPALIVE)
if pid is not None:
os.kill(pid, signal.SIGKILL)
if __name__ == '__main__':
arguments = docopt(__doc__, version='0.1.0')
if arguments['start']:
start()
elif arguments['stop']:
stop()
elif arguments['restart']:
stop()
start()
elif arguments['status']:
print('wurstminebot ' + ('is' if status() else 'is not') + ' running.')
|
Python
| 0.00001
| |
183448b17cfd910444d3807da80ef8549622fce4
|
test the urltopath
|
init.py
|
init.py
|
import yumoter
yumoter = yumoter.yumoter('config/repos.json', '/home/aarwine/git/yumoter/repos')
yumoter.loadRepos("6.4", "wildwest")
a = yumoter._returnNewestByNameArch(["openssl"])
a = a[0]
print a
print "name", a.name
print "arch", a.arch
print "epoch", a.epoch
print "version", a.version
print "release", a.release
print "size", a.size
print "remote_url", a.remote_url
print yumoter._urlToPromoPath(a.remote_url)
b = yumoter.getDeps(a)
print "###"
for pkg in b:
print pkg
for dep in b[pkg]:
print "\t%s - %s" % (dep, dep.remote_url)
|
Python
| 0.000018
|
@@ -369,16 +369,55 @@
te_url%0A%0A
+print yumoter._urlToPath(a.remote_url)%0A
print yu
|
e1e7fec43e5303232833caac5df1ed22526b41c3
|
Fix check of notebook path
|
IPython/html/services/notebooks/tests/test_nbmanager.py
|
IPython/html/services/notebooks/tests/test_nbmanager.py
|
"""Tests for the notebook manager."""
import os
from tornado.web import HTTPError
from unittest import TestCase
from tempfile import NamedTemporaryFile
from IPython.utils.tempdir import TemporaryDirectory
from IPython.utils.traitlets import TraitError
from IPython.html.utils import url_path_join
from ..filenbmanager import FileNotebookManager
from ..nbmanager import NotebookManager
class TestFileNotebookManager(TestCase):
def test_nb_dir(self):
with TemporaryDirectory() as td:
fm = FileNotebookManager(notebook_dir=td)
self.assertEqual(fm.notebook_dir, td)
def test_create_nb_dir(self):
with TemporaryDirectory() as td:
nbdir = os.path.join(td, 'notebooks')
fm = FileNotebookManager(notebook_dir=nbdir)
self.assertEqual(fm.notebook_dir, nbdir)
def test_missing_nb_dir(self):
with TemporaryDirectory() as td:
nbdir = os.path.join(td, 'notebook', 'dir', 'is', 'missing')
self.assertRaises(TraitError, FileNotebookManager, notebook_dir=nbdir)
def test_invalid_nb_dir(self):
with NamedTemporaryFile() as tf:
self.assertRaises(TraitError, FileNotebookManager, notebook_dir=tf.name)
def test_get_os_path(self):
# full filesystem path should be returned with correct operating system
# separators.
with TemporaryDirectory() as td:
nbdir = os.path.join(td, 'notebooks')
fm = FileNotebookManager(notebook_dir=nbdir)
path = fm.get_os_path('test.ipynb', '/path/to/notebook/')
rel_path_list = '/path/to/notebook/test.ipynb'.split('/')
fs_path = os.path.join(fm.notebook_dir, *rel_path_list)
self.assertEqual(path, fs_path)
fm = FileNotebookManager(notebook_dir=nbdir)
path = fm.get_os_path('test.ipynb')
fs_path = os.path.join(fm.notebook_dir, 'test.ipynb')
self.assertEqual(path, fs_path)
fm = FileNotebookManager(notebook_dir=nbdir)
path = fm.get_os_path('test.ipynb', '////')
fs_path = os.path.join(fm.notebook_dir, 'test.ipynb')
self.assertEqual(path, fs_path)
class TestNotebookManager(TestCase):
def make_dir(self, abs_path, rel_path):
"""make subdirectory, rel_path is the relative path
to that directory from the location where the server started"""
os_path = os.path.join(abs_path, rel_path)
try:
os.makedirs(os_path)
except OSError:
print "Directory already exists."
def test_create_notebook_model(self):
with TemporaryDirectory() as td:
# Test in root directory
nm = FileNotebookManager(notebook_dir=td)
model = nm.create_notebook_model()
assert isinstance(model, dict)
self.assertIn('name', model)
self.assertIn('path', model)
self.assertEqual(model['name'], 'Untitled0.ipynb')
self.assertEqual(model['path'], '/')
# Test in sub-directory
sub_dir = '/foo/'
self.make_dir(nm.notebook_dir, 'foo')
model = nm.create_notebook_model(None, sub_dir)
assert isinstance(model, dict)
self.assertIn('name', model)
self.assertIn('path', model)
self.assertEqual(model['name'], 'Untitled0.ipynb')
self.assertEqual(model['path'], sub_dir)
def test_get_notebook_model(self):
with TemporaryDirectory() as td:
# Test in root directory
# Create a notebook
nm = FileNotebookManager(notebook_dir=td)
model = nm.create_notebook_model()
name = model['name']
path = model['path']
# Check that we 'get' on the notebook we just created
model2 = nm.get_notebook_model(name, path)
assert isinstance(model2, dict)
self.assertIn('name', model2)
self.assertIn('path', model2)
self.assertEqual(model['name'], name)
self.assertEqual(model['path'], path)
# Test in sub-directory
sub_dir = '/foo/'
self.make_dir(nm.notebook_dir, 'foo')
model = nm.create_notebook_model(None, sub_dir)
model2 = nm.get_notebook_model(name, sub_dir)
assert isinstance(model2, dict)
self.assertIn('name', model2)
self.assertIn('path', model2)
self.assertIn('content', model2)
self.assertEqual(model2['name'], 'Untitled0.ipynb')
self.assertEqual(model2['path'], sub_dir)
def test_update_notebook_model(self):
with TemporaryDirectory() as td:
# Test in root directory
# Create a notebook
nm = FileNotebookManager(notebook_dir=td)
model = nm.create_notebook_model()
name = model['name']
path = model['path']
# Change the name in the model for rename
model['name'] = 'test.ipynb'
model = nm.update_notebook_model(model, name, path)
assert isinstance(model, dict)
self.assertIn('name', model)
self.assertIn('path', model)
self.assertEqual(model['name'], 'test.ipynb')
# Make sure the old name is gone
self.assertRaises(HTTPError, nm.get_notebook_model, name, path)
# Test in sub-directory
# Create a directory and notebook in that directory
sub_dir = '/foo/'
self.make_dir(nm.notebook_dir, 'foo')
model = nm.create_notebook_model(None, sub_dir)
name = model['name']
path = model['path']
# Change the name in the model for rename
model['name'] = 'test_in_sub.ipynb'
model = nm.update_notebook_model(model, name, path)
assert isinstance(model, dict)
self.assertIn('name', model)
self.assertIn('path', model)
self.assertEqual(model['name'], 'test_in_sub.ipynb')
self.assertEqual(model['path'], sub_dir)
# Make sure the old name is gone
self.assertRaises(HTTPError, nm.get_notebook_model, name, path)
def test_save_notebook_model(self):
with TemporaryDirectory() as td:
# Test in the root directory
# Create a notebook
nm = FileNotebookManager(notebook_dir=td)
model = nm.create_notebook_model()
name = model['name']
path = model['path']
# Get the model with 'content'
full_model = nm.get_notebook_model(name, path)
# Save the notebook
model = nm.save_notebook_model(full_model, name, path)
assert isinstance(model, dict)
self.assertIn('name', model)
self.assertIn('path', model)
self.assertEqual(model['name'], name)
self.assertEqual(model['path'], path)
# Test in sub-directory
# Create a directory and notebook in that directory
sub_dir = '/foo/'
self.make_dir(nm.notebook_dir, 'foo')
model = nm.create_notebook_model(None, sub_dir)
name = model['name']
path = model['path']
model = nm.get_notebook_model(name, path)
# Change the name in the model for rename
model = nm.save_notebook_model(model, name, path)
assert isinstance(model, dict)
self.assertIn('name', model)
self.assertIn('path', model)
self.assertEqual(model['name'], 'Untitled0.ipynb')
self.assertEqual(model['path'], sub_dir)
def test_delete_notebook_model(self):
with TemporaryDirectory() as td:
# Test in the root directory
# Create a notebook
nm = FileNotebookManager(notebook_dir=td)
model = nm.create_notebook_model()
name = model['name']
path = model['path']
# Delete the notebook
nm.delete_notebook_model(name, path)
# Check that a 'get' on the deleted notebook raises and error
self.assertRaises(HTTPError, nm.get_notebook_model, name, path)
|
Python
| 0.000001
|
@@ -3045,17 +3045,16 @@
ath'%5D, '
-/
')%0A%0A
|
27543f73244c7312ea511c7e00d9eecf7b7525e9
|
store model in self.model
|
cost.py
|
cost.py
|
"""
Cost classes: classes that encapsulate the cost evaluation for the DAE
training criterion.
"""
# Standard library imports
from itertools import izip
# Third-party imports
from theano import tensor
class SupervisedCost(object):
"""
A cost object is allocated in the same fashion as other
objects in this file, with a 'conf' dictionary (or object
supporting __getitem__) containing relevant hyperparameters.
"""
def __init__(self, conf, model):
self.conf = conf
# TODO: Do stuff depending on conf parameters (for example
# use different cross-entropy if act_end == "tanh" or not)
def __call__(self, *inputs):
"""Symbolic expression denoting the reconstruction error."""
raise NotImplementedError()
class MeanSquaredError(SupervisedCost):
"""
Symbolic expression for mean-squared error between the input and the
denoised reconstruction.
"""
def __call__(self, prediction, target):
msq = lambda p, t: ((p - t)**2).sum(axis=1).mean()
if isinstance(prediction, tensor.Variable):
return msq(prediction, target)
else:
pairs = izip(prediction, target)
# TODO: Think of something more sensible to do than sum(). On one
# hand, if we're treating everything in parallel it should return
# a list. On the other, we need a scalar for everything else to
# work.
# This will likely get refactored out into a "costs" module or
# something like that.
return sum([msq(p, t) for p, t in pairs])
class CrossEntropy(SupervisedCost):
"""
Symbolic expression for elementwise cross-entropy between input
and reconstruction. Use for binary-valued features (but not for,
e.g., one-hot codes).
"""
def __call__(self, prediction, target):
ce = lambda x, z: x * tensor.log(z) + (1 - x) * tensor.log(1 - z)
if isinstance(prediction, tensor.Variable):
return ce(prediction, target)
pairs = izip(prediction, target)
return sum([ce(p, t).sum(axis=1).mean() for p, t in pairs])
##################################################
def get(str):
""" Evaluate str into a cost object, if it exists """
obj = globals()[str]
if issubclass(obj, Cost):
return obj
else:
raise NameError(str)
|
Python
| 0.000001
|
@@ -624,16 +624,43 @@
or not)
+%0A self.model = model
%0A%0A de
|
a04b18b8fbc8626b5592593a2b6ce635921a1e34
|
Delete text after entered, but this time actually do it
|
data.py
|
data.py
|
from twitter import *
from tkinter import *
def showTweets(x, num):
# display a number of new tweets and usernames
for i in range(0, num):
line1 = (x[i]['user']['screen_name'])
line2 = (x[i]['text'])
w = Label(master, text=line1 + "\n" + line2 + "\n\n")
w.pack()
def getTweets():
x = t.statuses.home_timeline(screen_name="AndrewKLeech")
return x
def tweet():
global entryWidget
if entryWidget.get().strip() == "":
print("Empty")
else:
#t.statuses.update(status=entryWidget.get().strip())
#entryWidget.put().strip()
entryWidget.insert(0,'')
print("working")
# Put in token, token_key, con_secret, con_secret_key
t = Twitter(
auth=OAuth('705153959368007680-F5OUf8pvmOlXku1b7gpJPSAToqzV4Fb', 'bEGLkUJBziLc17EuKLTAMio8ChmFxP9aHYADwRXnxDsoC',
'gYDgR8lcTGcVZS9ucuEIYsMuj', '1dwHsLDN2go3aleQ8Q2vcKRfLETc51ipsP8310ayizL2p3Ycii'))
numberOfTweets = 5
master = Tk()
showTweets(getTweets(), numberOfTweets)
master.title("Tkinter Entry Widget")
master["padx"] = 40
master["pady"] = 20
# Create a text frame to hold the text Label and the Entry widget
textFrame = Frame(master)
#Create a Label in textFrame
entryLabel = Label(textFrame)
entryLabel["text"] = "Make a new Tweet:"
entryLabel.pack(side=LEFT)
# Create an Entry Widget in textFrame
entryWidget = Entry(textFrame)
entryWidget["width"] = 50
entryWidget.pack(side=LEFT)
textFrame.pack()
button = Button(master, text="Submit", command=tweet)
button.pack()
master.mainloop()
|
Python
| 0.000002
|
@@ -513,17 +513,16 @@
-#
t.status
@@ -573,17 +573,16 @@
-#
entryWid
@@ -589,53 +589,20 @@
get.
-put().strip()%0A entryWidget.insert(0,''
+delete(0,END
)%0A
|
b0648ee8a64a0ee5a1a605c216d3a88e6e72f083
|
Fix failing protobuf contentview test
|
test/test_contentview.py
|
test/test_contentview.py
|
from libmproxy.exceptions import ContentViewException
from netlib.http import Headers
import netlib.utils
from netlib import encoding
import libmproxy.contentviews as cv
import tutils
try:
import pyamf
except ImportError:
pyamf = None
try:
import cssutils
except:
cssutils = None
class TestContentView:
def test_view_auto(self):
v = cv.ViewAuto()
f = v(
"foo",
headers=Headers()
)
assert f[0] == "Raw"
f = v(
"<html></html>",
headers=Headers(content_type="text/html")
)
assert f[0] == "HTML"
f = v(
"foo",
headers=Headers(content_type="text/flibble")
)
assert f[0] == "Raw"
f = v(
"<xml></xml>",
headers=Headers(content_type="text/flibble")
)
assert f[0].startswith("XML")
def test_view_urlencoded(self):
d = netlib.utils.urlencode([("one", "two"), ("three", "four")])
v = cv.ViewURLEncoded()
assert v(d)
d = netlib.utils.urlencode([("adsfa", "")])
v = cv.ViewURLEncoded()
assert v(d)
def test_view_html(self):
v = cv.ViewHTML()
s = "<html><br><br></br><p>one</p></html>"
assert v(s)
s = "gobbledygook"
assert not v(s)
def test_view_html_outline(self):
v = cv.ViewHTMLOutline()
s = "<html><br><br></br><p>one</p></html>"
assert v(s)
def test_view_json(self):
cv.VIEW_CUTOFF = 100
v = cv.ViewJSON()
assert v("{}")
assert not v("{")
assert v("[1, 2, 3, 4, 5]")
def test_view_xml(self):
v = cv.ViewXML()
assert v("<foo></foo>")
assert not v("<foo>")
s = """<?xml version="1.0" encoding="UTF-8"?>
<?xml-stylesheet title="XSL_formatting"?>
<rss
xmlns:media="http://search.yahoo.com/mrss/"
xmlns:atom="http://www.w3.org/2005/Atom"
version="2.0">
</rss>
"""
assert v(s)
def test_view_raw(self):
v = cv.ViewRaw()
assert v("foo")
def test_view_javascript(self):
v = cv.ViewJavaScript()
assert v("[1, 2, 3]")
assert v("[1, 2, 3")
assert v("function(a){[1, 2, 3]}")
def test_view_css(self):
v = cv.ViewCSS()
with open(tutils.test_data.path('data/1.css'), 'r') as fp:
fixture_1 = fp.read()
result = v('a')
if cssutils:
assert len(list(result[1])) == 0
else:
assert len(list(result[1])) == 1
result = v(fixture_1)
if cssutils:
assert len(list(result[1])) > 1
else:
assert len(list(result[1])) == 1
def test_view_hex(self):
v = cv.ViewHex()
assert v("foo")
def test_view_image(self):
v = cv.ViewImage()
p = tutils.test_data.path("data/image.png")
assert v(file(p, "rb").read())
p = tutils.test_data.path("data/image.gif")
assert v(file(p, "rb").read())
p = tutils.test_data.path("data/image-err1.jpg")
assert v(file(p, "rb").read())
p = tutils.test_data.path("data/image.ico")
assert v(file(p, "rb").read())
assert not v("flibble")
def test_view_multipart(self):
view = cv.ViewMultipart()
v = """
--AaB03x
Content-Disposition: form-data; name="submit-name"
Larry
--AaB03x
""".strip()
h = Headers(content_type="multipart/form-data; boundary=AaB03x")
assert view(v, headers=h)
h = Headers()
assert not view(v, headers=h)
h = Headers(content_type="multipart/form-data")
assert not view(v, headers=h)
h = Headers(content_type="unparseable")
assert not view(v, headers=h)
def test_get_content_view(self):
r = cv.get_content_view(
cv.get("Raw"),
"[1, 2, 3]",
headers=Headers(content_type="application/json")
)
assert "Raw" in r[0]
r = cv.get_content_view(
cv.get("Auto"),
"[1, 2, 3]",
headers=Headers(content_type="application/json")
)
assert r[0] == "JSON"
r = cv.get_content_view(
cv.get("Auto"),
"[1, 2",
headers=Headers(content_type="application/json")
)
assert "Raw" in r[0]
tutils.raises(
ContentViewException,
cv.get_content_view,
cv.get("AMF"),
"[1, 2",
headers=Headers()
)
r = cv.get_content_view(
cv.get("Auto"),
encoding.encode('gzip', "[1, 2, 3]"),
headers=Headers(
content_type="application/json",
content_encoding="gzip"
)
)
assert "decoded gzip" in r[0]
assert "JSON" in r[0]
r = cv.get_content_view(
cv.get("XML"),
encoding.encode('gzip', "[1, 2, 3]"),
headers=Headers(
content_type="application/json",
content_encoding="gzip"
)
)
assert "decoded gzip" in r[0]
assert "Raw" in r[0]
if pyamf:
def test_view_amf_request():
v = cv.ViewAMF()
p = tutils.test_data.path("data/amf01")
assert v(file(p, "rb").read())
p = tutils.test_data.path("data/amf02")
assert v(file(p, "rb").read())
def test_view_amf_response():
v = cv.ViewAMF()
p = tutils.test_data.path("data/amf03")
assert v(file(p, "rb").read())
if cv.ViewProtobuf.is_available():
def test_view_protobuf_request():
v = cv.ViewProtobuf()
p = tutils.test_data.path("data/protobuf01")
content_type, output = v(file(p, "rb").read())
assert content_type == "Protobuf"
assert output[0].text == '1: "3bbc333c-e61c-433b-819a-0b9a8cc103b8"'
def test_get_by_shortcut():
assert cv.get_by_shortcut("h")
|
Python
| 0.000002
|
@@ -5972,16 +5972,21 @@
tput
-%5B0%5D.text
+.next()%5B0%5D%5B1%5D
==
|
2b12019b0e6b5881e38d48cc7981936fe5e7c81d
|
Fix a bug with info pages
|
shortener/links/views.py
|
shortener/links/views.py
|
from django.db.models import Q
from django.http import Http404
from django.shortcuts import get_object_or_404
from django.utils.baseconv import base64
from django.utils import timezone
from django.views.generic import RedirectView, ListView, DetailView
from .models import Link
from linkmetrics.models import LinkLog
class LinkRedirectView(RedirectView):
permanent = True
query_string = True
def get_redirect_url(self, **kwargs):
identifier = self.kwargs['identifier']
# If identifier includes a link it means we don't need to do a base64
# decode. Just a fetch based on the identifier
if '-' in identifier or '_' in identifier or '.' in identifider:
link = get_object_or_404(Link, identifier=identifier)
link.log(self.request)
return link.original_url
# decode based on the identifier
pk = base64.decode(identifier)
try:
link = Link.objects.get(Q(pk=pk) | Q(identifier=identifier))
except Link.DoesNotExist:
raise Http404
link.log(self.request)
return link.original_url
class LinkListView(ListView):
model = Link
class LinkDetailView(DetailView):
model = Link
def get_object(self):
identifier = self.kwargs['identifier']
if '-' in identifier or '_' in identifier or '.' in identifider:
return get_object_or_404(Link, identifier=identifier)
# decode based on the identifier
pk = base64.decode(identifier)
try:
link = Link.objects.get(Q(pk=pk) | Q(identifier=identifier))
except Link.DoesNotExist:
raise Http404
return link
def get_context_data(self, *args, **kwargs):
context = super(LinkDetailView, self).get_context_data(**kwargs)
# Ghetto style just to get it working
counts = []
for date in self.object.linklog_set.dates('created', 'day'):
count = LinkLog.objects.filter(
created__day=date.day,
created__month=date.month,
created__year=date.year,
link=self.object
).count()
counts.append(
{"date": date + timezone.timedelta(1), # timezone to fix weird off-by-one
"count": count}
)
context['counts'] = counts
return context
|
Python
| 0.000002
|
@@ -681,33 +681,32 @@
'.' in identifi
-d
er:%0A
@@ -1369,17 +1369,16 @@
identifi
-d
er:%0A
|
de80ad2bd5915533e990bf1a161fa4079c9a7550
|
Add a new class for filtering, but not use it yet #67
|
siebenapp/enumeration.py
|
siebenapp/enumeration.py
|
import collections
import math
from dataclasses import dataclass
from typing import List, Dict, Tuple, Any, Set, Iterable
from siebenapp.domain import Graph, Command, HoldSelect, Select
from siebenapp.goaltree import Goals
from siebenapp.zoom import Zoom
@dataclass(frozen=True)
class ToggleOpenView(Command):
"""Switch between "only open goals" and "all goals" views"""
@dataclass(frozen=True)
class ToggleSwitchableView(Command):
"""Switch between "only switchable goals" and "all goals" views"""
class BidirectionalIndex:
NOT_FOUND = -2
def __init__(self, goals: Iterable[int]):
self.m = {g: i + 1 for i, g in enumerate(sorted(g for g in goals if g > 0))}
self.length = len(self.m)
def forward(self, goal_id: int) -> int:
if goal_id < 0:
return goal_id
goal_id = self.m[goal_id]
new_id = goal_id % 10
if self.length > 10:
new_id += 10 * ((goal_id - 1) // 10 + 1)
if self.length > 90:
new_id += 100 * ((goal_id - 1) // 100 + 1)
if self.length > 900:
new_id += 1000 * ((goal_id - 1) // 1000 + 1)
return new_id
def backward(self, goal_id: int) -> int:
possible_selections: List[int] = [
g for g in self.m if self.forward(g) == goal_id
]
if len(possible_selections) == 1:
return possible_selections[0]
return BidirectionalIndex.NOT_FOUND
class Enumeration(Graph):
def __init__(self, goaltree: Graph) -> None:
super().__init__()
self.goaltree = goaltree
self.selection_cache: List[int] = []
self._open: bool = True
self._top: bool = False
self._goal_filter: Set[int] = set()
self._update_mapping()
def _update_mapping(self, clear_cache: bool = False) -> None:
self._goal_filter = self._update_top_mapping(self._update_open_mapping())
if clear_cache:
self.selection_cache.clear()
def _update_open_mapping(self) -> Set[int]:
if not self._open:
return set(self.goaltree.q().keys())
return {k for k, v in self.goaltree.q(keys="open").items() if v["open"]}
def _update_top_mapping(self, original_mapping: Set[int]) -> Set[int]:
if not self._top:
return set(original_mapping)
goals = {
k
for k, v in self.goaltree.q(keys="open,switchable").items()
if v["open"] and v["switchable"] and k in original_mapping
}
if goals and self.goaltree.settings("selection") not in goals:
self.goaltree.accept(Select(min(goals)))
if goals and self.goaltree.settings("previous_selection") not in goals:
self.accept(HoldSelect())
return goals
def _id_mapping(
self, keys: str = "name"
) -> Tuple[Dict[int, Any], BidirectionalIndex]:
goals = self.goaltree.q(keys)
goals = {k: v for k, v in goals.items() if k in self._goal_filter}
if self._top:
for attrs in goals.values():
if "edge" in attrs:
attrs["edge"] = []
elif self._open:
for attrs in goals.values():
if "edge" in attrs:
attrs["edge"] = [
e for e in attrs["edge"] if e[0] in self._goal_filter
]
return goals, BidirectionalIndex(goals)
def accept(self, command: Command) -> None:
if isinstance(command, Select):
self._select(command)
elif isinstance(command, ToggleOpenView):
self._open = not self._open
self._update_mapping(clear_cache=True)
elif isinstance(command, ToggleSwitchableView):
self._top = not self._top
self._update_mapping(clear_cache=True)
else:
self.goaltree.accept(command)
def events(self) -> collections.deque:
return self.goaltree.events()
def q(self, keys: str = "name") -> Dict[int, Any]:
self._update_mapping()
result: Dict[int, Any] = dict()
goals, index = self._id_mapping(keys)
for old_id, val in goals.items():
new_id = index.forward(old_id)
result[new_id] = dict((k, v) for k, v in val.items() if k != "edge")
if "edge" in val:
result[new_id]["edge"] = [
(index.forward(edge[0]), edge[1]) for edge in val["edge"]
]
return result
def _select(self, command: Select):
self._update_mapping()
goal_id = command.goal_id
goals, index = self._id_mapping()
if goal_id >= 10:
self.selection_cache = []
if self.selection_cache:
goal_id = 10 * self.selection_cache.pop() + goal_id
if goal_id > max(index.forward(k) for k in goals.keys()):
goal_id %= int(pow(10, int(math.log(goal_id, 10))))
original_id = index.backward(goal_id)
if original_id != BidirectionalIndex.NOT_FOUND:
self.goaltree.accept(Select(original_id))
self.selection_cache = []
else:
self.selection_cache.append(goal_id)
|
Python
| 0
|
@@ -1442,16 +1442,472 @@
FOUND%0A%0A%0A
+class OpenView(Graph):%0A %22%22%22Non-persistent view layer that allows to switch%0A between only-open and all goals%22%22%22%0A%0A def __init__(self, goaltree: Graph):%0A self.goaltree = goaltree%0A%0A def accept(self, command: Command) -%3E None:%0A self.goaltree.accept(command)%0A%0A def events(self) -%3E collections.deque:%0A return self.goaltree.events()%0A%0A def q(self, keys: str = %22name%22) -%3E Dict%5Bint, Any%5D:%0A return self.goaltree.q(keys)%0A%0A%0A
class En
|
2ecdd27d96da12bf44a5751b7d76cedf05c1e620
|
don't remove all whitespace
|
scripts/DYKChecker.py
|
scripts/DYKChecker.py
|
# -*- coding: utf-8 -*-
"""DYKChecker!"""
__version__ = "1.0.1"
__author__ = "Sorawee Porncharoenwase"
import json
import init
import wp
import pywikibot
from wp import lre
def glob():
pass
def main():
page = wp.Page(wp.toutf(raw_input()))
dic = {}
while True:
try:
text = page.get()
break
except pywikibot.NoPage:
dic["error"] = u"ΰΉΰΈ‘ΰΉΰΈ‘ΰΈ΅ΰΈ«ΰΈΰΉΰΈ²ΰΈΰΈ±ΰΈΰΈΰΈ₯ΰΉΰΈ²ΰΈ§"
break
except pywikibot.IsRedirectPage:
page = page.getRedirectTarget()
except:
dic["error"] = u"ΰΉΰΈΰΈ΄ΰΈΰΈΰΉΰΈΰΈΰΈ΄ΰΈΰΈΰΈ₯ΰΈ²ΰΈΰΉΰΈ‘ΰΉΰΈΰΈ£ΰΈ²ΰΈΰΈͺΰΈ²ΰΉΰΈ«ΰΈΰΈΈ"
break
oldtext = text
if "error" not in dic:
# delete all references
text, numinline = lre.subn(r"(?s)<ref.*?</ref>", "", text)
text = lre.rmsym(r"\{\{", r"\}\}", text)
text = lre.rmsym(r"\{\|", r"\|\}", text)
text = pywikibot.removeDisabledParts(text)
text = pywikibot.removeHTMLParts(text)
text = pywikibot.removeLanguageLinks(text)
text = pywikibot.removeCategoryLinks(text)
subst = lre.subst()
# delete all spaces
subst.append((r"\s+", " "))
# delete all external links
subst.append((r"(?s)(?<!\[)\[(?!\[).*?\]", ""))
subst.append((r"[\[\]]", ""))
text = subst.process(text)
dic["newtext"] = text
dic["len"] = {}
dic["len"]["value"] = len(text)
dic["len"]["result"] = "passed" if (dic["len"]["value"] >= 2000) else "failed"
print json.dumps(dic)
if __name__ == "__main__":
args, site, conf = wp.pre("DYK Checker")
try:
glob()
main()
except:
wp.posterror()
else:
wp.post()
|
Python
| 0.689513
|
@@ -1111,36 +1111,8 @@
t()%0A
- # delete all spaces%0A
@@ -1135,11 +1135,14 @@
((r%22
-%5Cs+
+%5B %5Ct%5D*
%22, %22
@@ -1158,36 +1158,36 @@
- # delete all external links
+subst.append((r%22%5Cn*%22, %22%5Cn%22))
%0A
|
82689e758d15aad49c7ca6f9c64eed5ea82760b7
|
Add PlaceholderField to the south ignore list.
|
fluent_contents/models/fields.py
|
fluent_contents/models/fields.py
|
from django.contrib.contenttypes.generic import GenericRelation, GenericRel
from django.contrib.contenttypes.models import ContentType
from django.db.models.query_utils import Q
from django.utils.functional import lazy
from django.utils.text import capfirst
from fluent_contents.forms.fields import PlaceholderFormField
from fluent_contents.models import Placeholder, ContentItem
__all__ = (
'PlaceholderRelation', 'ContentItemRelation',
'PlaceholderField',
)
# The PlaceholderField is inspired by Django CMS
# Yet uses a different methology to access the fields.
#
# In Django CMS it's a ForeignKey to Placeholder.
# Here, the Placeholder has a GenericForeignKey to the parent - hence it will be deleted when the parent is removed -
# so the PlaceholderField is merely a reverse GenericRelation.
#
# In the admin, the logic of the PlaceholderEditor code can be reused.
class PlaceholderRelation(GenericRelation):
"""
A :class:`~django.contrib.contenttypes.generic.GenericRelation` which can be applied to a parent model that
is expected to be referenced be a :class:`~fluent_contents.models.Placeholder`. For example:
.. code-block:: python
class Page(models.Model):
placeholder_set = PlaceholderRelation()
"""
def __init__(self, **kwargs):
defaults = {
'limit_choices_to': Q(
parent_type=lazy(lambda: ContentType.objects.get_for_model(Placeholder), ContentType)()
)
}
defaults.update(kwargs)
super(PlaceholderRelation, self).__init__(to=Placeholder,
object_id_field='parent_id', content_type_field='parent_type', **defaults)
class ContentItemRelation(GenericRelation):
"""
A :class:`~django.contrib.contenttypes.generic.GenericRelation` which can be applied to a parent model that
is expected to be referenced by the :class:`~fluent_contents.models.ContentItem` classes. For example:
.. code-block:: python
class Page(models.Model):
contentitem_set = ContentItemRelation()
"""
def __init__(self, **kwargs):
super(ContentItemRelation, self).__init__(to=ContentItem,
object_id_field='parent_id', content_type_field='parent_type', **kwargs)
class PlaceholderRel(GenericRel):
"""
The internal :class:`~django.contrib.contenttypes.generic.GenericRel`
that is used by the :class:`PlaceholderField` to support queries.
"""
def __init__(self, slot):
limit_choices_to = Q(
parent_type=lazy(lambda: ContentType.objects.get_for_model(Placeholder), ContentType)(),
slot=slot,
)
# TODO: make sure reverse queries work properly
super(PlaceholderRel, self).__init__(
to=Placeholder,
related_name=None, # NOTE: must be unique for app/model/slot.
limit_choices_to=limit_choices_to
)
class PlaceholderFieldDescriptor(object):
"""
This descriptor is placed on the PlaceholderField model instance
by the :func:`~PlaceholderField.contribute_to_class` function.
This causes ``instance.field`` to return a :class:`~fluent_contents.models.Placeholder` object.
"""
def __init__(self, slot):
"""Set the slot this descriptor is created for."""
self.slot = slot
def __get__(self, instance, instance_type=None):
"""Return the placeholder by slot."""
if instance is None:
return self
return Placeholder.objects.get_by_slot(instance, self.slot)
def __set__(self, instance, value):
if instance is None:
raise AttributeError("Descriptor must be accessed via instance")
if value == "-DUMMY-":
return
raise NotImplementedError("Setting Placeholder value is not supported, use Placeholder.objects.create_for_parent() instead.")
class PlaceholderField(PlaceholderRelation):
"""
The model field to add :class:`~fluent_contents.models.ContentItem` objects to a model.
:param slot: A programmatic name to identify the placeholder.
:param plugins: Optional, define which plugins are allowed to be used. This can be a list of names, or :class:`~fluent_contents.extensions.ContentPlugin` references.
:type slot: str
:type plugins: list
This class provides the form fields for the field. Use this class in a model to use it:
.. code-block:: python
class Article(models.Model):
contents = PlaceholderField("article_contents")
The data itself is stored as reverse relation in the :class:`~fluent_contents.models.ContentItem` object.
Hence, all contents will be cleaned up properly when the parent model is deleted.
The placeholder will be displayed in the admin:
.. image:: /images/admin/placeholderfieldadmin1.png
:width: 770px
:height: 562px
:alt: django-fluent-contents placeholder field preview
"""
def __init__(self, slot, plugins=None, **kwargs):
"""
Initialize the placeholder field.
"""
super(PlaceholderField, self).__init__(**kwargs)
self.slot = slot
self._plugins = plugins
# Overwrite some hardcoded defaults from the base class.
self.editable = True
self.blank = True # TODO: support blank: False to enforce adding at least one plugin.
self.rel = PlaceholderRel(self.slot) # This support queries
def formfield(self, **kwargs):
"""
Returns a :class:`PlaceholderFormField` instance for this database Field.
"""
defaults = {
'label': capfirst(self.verbose_name),
'help_text': self.help_text,
'required': not self.blank,
}
defaults.update(kwargs)
return PlaceholderFormField(slot=self.slot, plugins=self._plugins, **defaults)
def contribute_to_class(self, cls, name):
"""
Internal Django method to associate the field with the Model; it assigns the descriptor.
"""
super(PlaceholderField, self).contribute_to_class(cls, name)
# overwrites what instance.<colname> returns; give direct access to the placeholder
setattr(cls, name, PlaceholderFieldDescriptor(self.slot))
# Make placeholder fields easy to find
if not hasattr(cls._meta, 'placeholder_fields'):
cls._meta.placeholder_fields = {}
cls._meta.placeholder_fields[name] = self
# Configure the revere relation if possible.
# TODO: make sure reverse queries work properly
if self.rel.related_name is None:
# Make unique for model (multiple models can use same slotnane)
self.rel.related_name = '{app}_{model}_{slot}_FIXME'.format(
app=cls._meta.app_label,
model=cls._meta.object_name.lower(),
slot=self.slot
)
# Remove attribute must exist for the delete page. Currently it's not actively used.
# The regular ForeignKey assigns a ForeignRelatedObjectsDescriptor to it for example.
# In this case, the PlaceholderRelation is already the reverse relation.
# Being able to move forward from the Placeholder to the derived models does not have that much value.
setattr(self.rel.to, self.rel.related_name, None)
@property
def plugins(self):
"""
Get the set of plugins that this field may display.
"""
from fluent_contents import extensions
if self._plugins is None:
return extensions.plugin_pool.get_plugins()
else:
return extensions.plugin_pool.get_plugins_by_name(*self._plugins)
def value_from_object(self, obj):
"""
Internal Django method, used to return the placeholder ID when exporting the model instance.
"""
try:
# not using self.attname, access the descriptor instead.
placeholder = getattr(obj, self.name)
except Placeholder.DoesNotExist:
return None # Still allow ModelForm / admin to open and create a new Placeholder if the table was truncated.
return placeholder.id if placeholder else None # Be consistent with other fields, like ForeignKey
try:
from south.modelsinspector import add_ignored_fields
except ImportError:
pass
else:
# South 0.7.x ignores GenericRelation fields but doesn't ignore subclasses.
# Taking the same fix as applied in http://south.aeracode.org/ticket/414
_name_re = "^" + __name__.replace(".", "\.")
add_ignored_fields((
_name_re + "\.PlaceholderRelation",
_name_re + "\.ContentItemRelation",
))
|
Python
| 0
|
@@ -8613,16 +8613,57 @@
ields((%0A
+ _name_re + %22%5C.PlaceholderField%22,%0A
|
e5daf961a0f1c45b1f59193d1e24a6bf0ae47e9d
|
add store name to pickup history join and leave
|
foodsaving/stores/serializers.py
|
foodsaving/stores/serializers.py
|
from datetime import timedelta
import dateutil.rrule
from django.db import transaction
from django.dispatch import Signal
from django.utils import timezone
from rest_framework import serializers
from django.utils.translation import ugettext as _
from config import settings
from foodsaving.history.utils import get_changed_data
from foodsaving.stores.models import PickupDate as PickupDateModel
from foodsaving.stores.models import PickupDateSeries as PickupDateSeriesModel
from foodsaving.stores.models import Store as StoreModel
post_pickup_create = Signal()
post_pickup_modify = Signal()
post_pickup_join = Signal()
post_pickup_leave = Signal()
post_series_create = Signal()
post_series_modify = Signal()
post_store_create = Signal()
post_store_modify = Signal()
class PickupDateSerializer(serializers.ModelSerializer):
class Meta:
model = PickupDateModel
fields = ['id', 'date', 'series', 'store', 'max_collectors', 'collector_ids']
update_fields = ['date', 'max_collectors']
extra_kwargs = {
'series': {'read_only': True},
}
collector_ids = serializers.PrimaryKeyRelatedField(
source='collectors',
many=True,
read_only=True
)
def validate_store(self, store):
if not self.context['request'].user.groups.filter(store=store).exists():
raise serializers.ValidationError(_('You are not member of the store\'s group.'))
return store
def create(self, validated_data):
pickupdate = super().create(validated_data)
post_pickup_create.send(
sender=self.__class__,
group=pickupdate.store.group,
store=pickupdate.store,
user=self.context['request'].user,
payload=self.initial_data
)
return pickupdate
def update(self, pickupdate, validated_data):
selected_validated_data = {}
for attr in self.Meta.update_fields:
if attr in validated_data:
selected_validated_data[attr] = validated_data[attr]
if pickupdate.series:
if attr == 'max_collectors':
selected_validated_data['is_max_collectors_changed'] = True
elif attr == 'date':
selected_validated_data['is_date_changed'] = True
changed_data = get_changed_data(pickupdate, selected_validated_data)
super().update(pickupdate, selected_validated_data)
if changed_data:
post_pickup_modify.send(
sender=self.__class__,
group=pickupdate.store.group,
store=pickupdate.store,
user=self.context['request'].user,
payload=changed_data
)
return pickupdate
def validate_date(self, date):
if not date > timezone.now() + timedelta(minutes=10):
raise serializers.ValidationError(_('The date should be in the future.'))
return date
class PickupDateJoinSerializer(serializers.ModelSerializer):
class Meta:
model = PickupDateModel
fields = []
def update(self, pickup_date, validated_data):
user = self.context['request'].user
pickup_date.collectors.add(user)
post_pickup_join.send(
sender=self.__class__,
group=pickup_date.store.group,
store=pickup_date.store,
user=user
)
return pickup_date
class PickupDateLeaveSerializer(serializers.ModelSerializer):
class Meta:
model = PickupDateModel
fields = []
def update(self, pickup_date, validated_data):
user = self.context['request'].user
pickup_date.collectors.remove(user)
post_pickup_leave.send(
sender=self.__class__,
group=pickup_date.store.group,
store=pickup_date.store,
user=user
)
return pickup_date
class PickupDateSeriesSerializer(serializers.ModelSerializer):
class Meta:
model = PickupDateSeriesModel
fields = ['id', 'max_collectors', 'store', 'rule', 'start_date']
update_fields = ('max_collectors', 'start_date', 'rule')
def create(self, validated_data):
series = super().create(validated_data)
series.update_pickup_dates()
post_series_create.send(
sender=self.__class__,
group=series.store.group,
store=series.store,
user=self.context['request'].user,
payload=self.initial_data
)
return series
def update(self, series, validated_data):
selected_validated_data = {}
for attr in self.Meta.update_fields:
if attr in validated_data:
selected_validated_data[attr] = validated_data[attr]
changed_data = get_changed_data(series, selected_validated_data)
super().update(series, selected_validated_data)
series.update_pickup_dates()
if changed_data:
post_series_modify.send(
sender=self.__class__,
group=series.store.group,
store=series.store,
user=self.context['request'].user,
payload=changed_data
)
return series
def validate_store(self, store):
if not self.context['request'].user.groups.filter(store=store).exists():
raise serializers.ValidationError(_('You are not member of the store\'s group.'))
return store
def validate_start_date(self, date):
date = date.replace(second=0, microsecond=0)
return date
def validate_rule(self, rule_string):
rrule = dateutil.rrule.rrulestr(rule_string)
if not isinstance(rrule, dateutil.rrule.rrule):
raise serializers.ValidationError(_('Only single recurrence rules are allowed.'))
return rule_string
class StoreSerializer(serializers.ModelSerializer):
class Meta:
model = StoreModel
fields = ['id', 'name', 'description', 'group', 'address', 'latitude', 'longitude', 'weeks_in_advance']
extra_kwargs = {
'name': {
'min_length': 3
},
'description': {
'trim_whitespace': False,
'max_length': settings.DESCRIPTION_MAX_LENGTH
}
}
def create(self, validated_data):
store = super().create(validated_data)
post_store_create.send(
sender=self.__class__,
group=store.group,
store=store,
user=self.context['request'].user,
payload=self.initial_data
)
return store
def update(self, store, validated_data):
changed_data = get_changed_data(store, validated_data)
store = super().update(store, validated_data)
if 'weeks_in_advance' in changed_data:
with transaction.atomic():
for series in store.series.all():
series.update_pickup_dates()
if changed_data:
post_store_modify.send(
sender=self.__class__,
group=store.group,
store=store,
user=self.context['request'].user,
payload=self.initial_data
)
return store
def validate_group(self, group_id):
if group_id not in self.context['request'].user.groups.all():
raise serializers.ValidationError(_('You are not a member of this group.'))
return group_id
def validate_weeks_in_advance(self, w):
if w < 1:
raise serializers.ValidationError(_('Set at least one week in advance'))
return w
|
Python
| 0
|
@@ -3401,32 +3401,92 @@
user=user
+,%0A payload=%7B'store_name': pickup_date.store.name%7D
%0A )%0A
@@ -3943,24 +3943,84 @@
user=user
+,%0A payload=%7B'store_name': pickup_date.store.name%7D
%0A )%0A
|
5c5de666e86d6f2627df79762b0e3b0f188861d4
|
Fix timestamp comparison on ciresources
|
scripts/claim_vlan.py
|
scripts/claim_vlan.py
|
import MySQLdb
from datetime import datetime, timedelta
db = MySQLdb.connect(host="10.0.196.2",
user="ciuser",
passwd="secret",
db="ciresources")
cur = db.cursor()
f = '%Y-%m-%d %H:%M:%S'
three_hours_ago_dt = datetime.utcnow() - timedelta(hours=3)
three_hours_ago = three_hours_ago_dt.strftime(f)
cur.execute("SELECT * FROM vlans WHERE locked!=true OR timestamp > '%s' LIMIT 1 FOR UPDATE" % three_hours_ago)
row = cur.fetchone()
if row is not None:
min_vlan = row[0]
max_vlan = row[1]
vlans = {"min_vlan": min_vlan, "max_vlan":max_vlan, "timestamp": datetime.now().strftime(f)}
cur.execute("UPDATE vlans SET locked=true, timestamp='%(timestamp)s' where min_vlan=%(min_vlan)s AND max_vlan=%(max_vlan)s" % vlans)
else:
raise Exception("No free VLANs found!")
db.commit()
db.close()
print("%(min_vlan)s:%(max_vlan)s" % vlans)
|
Python
| 0.000049
|
@@ -425,11 +425,9 @@
tamp
- %3E
+%3C
'%25s'
|
77acb5a79c41ac83bb4eb851682edd248c62462c
|
Add test for featured nullable
|
test/test_model/test_model_app.py
|
test/test_model/test_model_app.py
|
# -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2013 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
from default import Test, db, with_context
from nose.tools import assert_raises
from mock import patch
from pybossa.model.app import App
from pybossa.model.user import User
from sqlalchemy.exc import IntegrityError
from factories import AppFactory
class TestModelApp(Test):
@with_context
def test_app_errors(self):
"""Test project model errors."""
app = App(name='Project',
short_name='proj',
description='desc',
owner_id=None)
# App.owner_id should not be nullable
db.session.add(app)
assert_raises(IntegrityError, db.session.commit)
db.session.rollback()
# App.name should not be nullable
user = User(email_addr="john.doe@example.com",
name="johndoe",
fullname="John Doe",
locale="en")
db.session.add(user)
db.session.commit()
user = db.session.query(User).first()
app.owner_id = user.id
app.name = None
db.session.add(app)
assert_raises(IntegrityError, db.session.commit)
db.session.rollback()
app.name = ''
db.session.add(app)
assert_raises(IntegrityError, db.session.commit)
db.session.rollback()
# App.short_name should not be nullable
app.name = "Project"
app.short_name = None
db.session.add(app)
assert_raises(IntegrityError, db.session.commit)
db.session.rollback()
app.short_name = ''
db.session.add(app)
assert_raises(IntegrityError, db.session.commit)
db.session.rollback()
# App.description should not be nullable
db.session.add(app)
app.short_name = "project"
app.description = None
assert_raises(IntegrityError, db.session.commit)
db.session.rollback()
app.description = ''
db.session.add(app)
assert_raises(IntegrityError, db.session.commit)
db.session.rollback()
def test_needs_password_no_password_key(self):
"""Test needs_password returns false if the app has not a password"""
app = AppFactory.build(info={})
assert app.needs_password() is False
@patch('pybossa.model.app.signer')
def test_needs_password_empty_password_key(self, mock_signer):
"""Test needs_password returns false if the app has an empty password"""
mock_signer.loads = lambda x: x
app = AppFactory.build(info={'passwd_hash': None})
assert app.needs_password() is False
@patch('pybossa.model.app.signer')
def test_needs_password_with_password_key_and_value(self, mock_signer):
"""Test needs_password returns true if the app has a password"""
mock_signer.loads = lambda x: x
app = AppFactory.build(info={'passwd_hash': 'mypassword'})
assert app.needs_password() is True
@patch('pybossa.model.app.signer')
def test_check_password(self, mock_signer):
mock_signer.loads = lambda x: x
app = AppFactory.build(info={'passwd_hash': 'mypassword'})
assert app.check_password('mypassword')
@patch('pybossa.model.app.signer')
def test_check_password_bad_password(self, mock_signer):
mock_signer.loads = lambda x: x
app = AppFactory.build(info={'passwd_hash': 'mypassword'})
assert not app.check_password('notmypassword')
|
Python
| 0
|
@@ -2771,16 +2771,246 @@
back()%0A%0A
+ # App.featured should not be nullable%0A app.description = 'description'%0A app.featured = None%0A db.session.add(app)%0A assert_raises(IntegrityError, db.session.commit)%0A db.session.rollback()%0A%0A
%0A def
|
995c89c41bc432be5e49fb1c3f5206b0caff13dd
|
Check for NOPAGE in advanced tests.
|
tests/test_advanced.py
|
tests/test_advanced.py
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
"""
WPTools tests that use the network
"""
from __future__ import print_function
import argparse
import random
import unittest
import tests.titles as titles
import wptools
LANG = [
'en', 'ceb', 'sv', 'de', 'nl', 'fr', 'ru', 'it', 'es', 'war',
'pl', 'vi', 'ja', 'pt', 'zh', 'uk', 'fa', 'ca', 'ar', 'no', 'sh',
'fi', 'hu', 'id', 'ko', 'cs', 'ro', 'sr', 'ms', 'tr', 'eu', 'eo',
'bg', 'da', 'hy', 'min', 'kk', 'sk', 'he', 'lt', 'hr', 'ce', 'et',
'sl', 'be', 'el', 'nn', 'uz', 'simple', 'la', 'az', 'ur', 'hi',
'vo', 'th', 'ka', 'ta', 'cy', 'mk', 'mg', 'oc', 'tl', 'lv', 'ky',
'bs', 'tt', 'new', 'tg', 'sq', 'te', 'pms', 'br', 'bn', 'ml',
'ht', 'jv', 'ast', 'lb', 'mr', 'af', 'sco', 'pnb', 'ga', 'is',
'cv', 'ba', 'azb', 'fy', 'su', 'sw', 'my', 'lmo', 'an', 'yo',
'ne', 'gu', 'io', 'pa', 'nds', 'scn', 'bpy', 'als', 'bar', 'ku',
'kn', 'ia', 'qu', 'ckb', 'mn', 'arz',
]
WIKIS = [
'commons.wikimedia.org',
'en.wikibooks.org',
'en.wikinews.org',
'en.wikiquote.org',
'en.wikisource.org',
'en.wikiversity.org',
'en.wikivoyage.org',
'en.wiktionary.org',
]
class WPToolsPickTest(unittest.TestCase):
"""
SELECTED (cherry-picked) TESTS
"""
def test_selected(self):
"""
Test overall functionality from random i18n choice
"""
title = titles.title()
page = wptools.page(title['title'], lang=title['lang'])
page.get(show=False)
self.assertTrue(page.data['pageid'] is not None)
def test_lookup_unicode_error(self):
"""
Raise LookupError without UnicodeDecodeError. Issue #29
"""
try:
wptools.page('Πε_TEST').get(False)
self.fail("failed to raise LookupError")
except LookupError as detail:
print(detail)
class WPToolsRandomTest(unittest.TestCase):
"""
RANDOM TESTS
"""
def test_random(self):
"""
Get random title from random language wiki
"""
page = wptools.page(lang=random.choice(LANG))
page.get(show=False)
self.assertTrue(page.data['pageid'] is not None)
def test_random_wiki(self):
"""
Get random title from random Wikmedia project
"""
page = wptools.page(wiki=random.choice(WIKIS))
self.assertTrue(page.data['pageid'] is not None)
class WPToolsRestBaseTest(unittest.TestCase):
"""
RESTBase TESTS
"""
class WPToolsToolTest(unittest.TestCase):
"""
WPTOOL TESTS
"""
@staticmethod
def test_wptool():
'''
Get random page via wptool
'''
from scripts.wptool import main
from collections import namedtuple
args = namedtuple('Args', ['H', 'l', 'n', 'q', 's', 't', 'v', 'w'])
cli = {'H': False, 'l': 'en', 'n': False, 'q': False, 's': True,
't': '', 'v': False, 'w': ''}
main(args(**cli))
class WPToolsUtilsTest(unittest.TestCase):
"""
Utils Tests
"""
def test_infobox_subelements(self):
"""
Get infobox data with sub-elements. Issue #66
"""
page = wptools.page("ONE OK ROCK", lang='ja').get_parse(show=False)
infobox = page.data['infobox']
self.assertGreater(len(infobox['Genre'].split('<br')), 5)
def test_infobox_children(self):
"""
Get infobox data with list values. Issue #62
"""
page = wptools.page('Lewisit', lang='de').get_parse(show=False)
infobox = page.data['infobox']
self.assertGreater(len(infobox['Dichte'].split('*')), 1)
class WPToolsWikidataTest(unittest.TestCase):
"""
Wikidata Tests
"""
if __name__ == '__main__':
# unittest.main()
from unittest import TestLoader
suites = {
'pick': TestLoader().loadTestsFromTestCase(WPToolsPickTest),
'rand': TestLoader().loadTestsFromTestCase(WPToolsRandomTest),
'restbase': TestLoader().loadTestsFromTestCase(WPToolsRestBaseTest),
'tool': TestLoader().loadTestsFromTestCase(WPToolsToolTest),
'utils': TestLoader().loadTestsFromTestCase(WPToolsUtilsTest),
'wikidata': TestLoader().loadTestsFromTestCase(WPToolsWikidataTest),
}
suites['all'] = unittest.TestSuite(suites.values())
argp = argparse.ArgumentParser()
argp.add_argument('suite', choices=suites.keys())
args = argp.parse_args()
unittest.TextTestRunner().run(suites[args.suite])
|
Python
| 0
|
@@ -952,16 +952,42 @@
rz',%0A%5D%0A%0A
+NOPAGE = 'aspofinwepobw'%0A%0A
WIKIS =
@@ -1573,32 +1573,300 @@
%5D is not None)%0A%0A
+ def test_not_found(self):%0A %22%22%22%0A Try to get a non-existent page%0A %22%22%22%0A try:%0A wptools.page(NOPAGE).get(False)%0A self.fail(%22failed to raise LookupError%22)%0A except LookupError as detail:%0A print(detail)%0A%0A
def test_loo
|
cad938d023f7c41b95846c0ba04b6e92e721e434
|
Fix flake8
|
tests/cli/test_config.py
|
tests/cli/test_config.py
|
import io
from textwrap import dedent
import pytest
import vdirsyncer.cli.utils
from vdirsyncer import cli
@pytest.fixture
def read_config(tmpdir):
def inner(cfg):
f = io.StringIO(dedent(cfg.format(base=str(tmpdir))))
return cli.utils.read_config(f)
return inner
def test_read_config(read_config, monkeypatch):
errors = []
monkeypatch.setattr('vdirsyncer.cli.cli_logger.error', errors.append)
general, pairs, storages = read_config(u'''
[general]
status_path = /tmp/status/
[pair bob]
a = bob_a
b = bob_b
foo = bar
bam = true
[storage bob_a]
type = filesystem
path = /tmp/contacts/
fileext = .vcf
yesno = false
number = 42
[storage bob_b]
type = carddav
[bogus]
lol = true
''')
assert general == {'status_path': '/tmp/status/'}
assert pairs == {'bob': ('bob_a', 'bob_b', {'bam': True, 'foo': 'bar'})}
assert storages == {
'bob_a': {'type': 'filesystem', 'path': '/tmp/contacts/', 'fileext':
'.vcf', 'yesno': False, 'number': 42,
'instance_name': 'bob_a'},
'bob_b': {'type': 'carddav', 'instance_name': 'bob_b'}
}
assert len(errors) == 1
assert errors[0].startswith('Unknown section')
assert 'bogus' in errors[0]
def test_storage_instance_from_config(monkeypatch):
def lol(**kw):
assert kw == {'foo': 'bar', 'baz': 1}
return 'OK'
monkeypatch.setitem(cli.utils.storage_names._storages,
'lol', lol)
config = {'type': 'lol', 'foo': 'bar', 'baz': 1}
assert cli.utils.storage_instance_from_config(config) == 'OK'
def test_parse_pairs_args():
pairs = {
'foo': ('bar', 'baz', {'conflict_resolution': 'a wins'},
{'storage_option': True}),
'one': ('two', 'three', {'collections': 'a,b,c'}, {}),
'eins': ('zwei', 'drei', {'ha': True}, {})
}
assert sorted(
cli.utils.parse_pairs_args(['foo/foocoll', 'one', 'eins'], pairs)
) == [
('eins', set()),
('foo', {'foocoll'}),
('one', set()),
]
def test_missing_general_section(read_config):
with pytest.raises(cli.CliError) as excinfo:
read_config(u'''
[pair my_pair]
a = my_a
b = my_b
[storage my_a]
type = filesystem
path = {base}/path_a/
fileext = .txt
[storage my_b]
type = filesystem
path = {base}/path_b/
fileext = .txt
''')
assert 'Invalid general section.' in excinfo.value.msg
def test_wrong_general_section(read_config):
with pytest.raises(cli.CliError) as excinfo:
read_config(u'''
[general]
wrong = true
''')
assert 'Invalid general section.' in excinfo.value.msg
assert excinfo.value.problems == [
'general section doesn\'t take the parameters: wrong',
'general section is missing the parameters: status_path'
]
def test_invalid_storage_name():
f = io.StringIO(dedent(u'''
[general]
status_path = {base}/status/
[storage foo.bar]
'''))
with pytest.raises(cli.CliError) as excinfo:
cli.utils.read_config(f)
assert 'invalid characters' in str(excinfo.value).lower()
def test_parse_config_value(capsys):
invalid = object()
def x(s):
try:
rv = cli.utils.parse_config_value(s)
except ValueError:
return invalid
else:
warnings = capsys.readouterr()[1]
return rv, len(warnings.splitlines())
assert x('123 # comment!') is invalid
assert x('True') == ('True', 1)
assert x('False') == ('False', 1)
assert x('Yes') == ('Yes', 1)
assert x('None') == ('None', 1)
assert x('"True"') == ('True', 0)
assert x('"False"') == ('False', 0)
assert x('"123 # comment!"') == ('123 # comment!', 0)
assert x('true') == (True, 0)
assert x('false') == (False, 0)
assert x('null') == (None, 0)
assert x('3.14') == (3.14, 0)
assert x('') == ('', 0)
assert x('""') == ('', 0)
def test_invalid_collections_arg():
f = io.StringIO(dedent(u'''
[general]
status_path = /tmp/status/
[pair foobar]
a = foo
b = bar
collections = [null]
[storage foo]
type = filesystem
path = /tmp/foo/
fileext = .txt
[storage bar]
type = filesystem
path = /tmp/bar/
fileext = .txt
'''))
with pytest.raises(cli.utils.CliError) as excinfo:
cli.utils.read_config(f)
assert (
'Section `pair foobar`: `collections` parameter must be a list of '
'collection names (strings!) or `null`.'
) in str(excinfo.value)
|
Python
| 0
|
@@ -74,16 +74,24 @@
li.utils
+ # noqa
%0Afrom vd
|
ebdc90e269d9ec85b7e6def2ca0701f04caec6ca
|
return proper algorithm in KDE
|
sklearn/neighbors/kde.py
|
sklearn/neighbors/kde.py
|
"""
Kernel Density Estimation
-------------------------
"""
# Author: Jake Vanderplas <jakevdp@cs.washington.edu>
import numpy as np
from ..base import BaseEstimator
from ..utils import array2d, check_random_state
from .ball_tree import BallTree, DTYPE
from .kd_tree import KDTree
VALID_KERNELS = ['gaussian', 'tophat', 'epanechnikov',
'exponential', 'linear', 'cosine']
TREE_DICT = {'ball_tree':BallTree,
'kd_tree':KDTree}
# TODO: implement a brute force version for testing purposes
# TODO: bandwidth estimation
# TODO: create a density estimation base class?
class KernelDensity(BaseEstimator):
"""Kernel Density Estimation
Parameters
----------
bandwidth : float
The bandwidth of the kernel
algorithm : string
The tree algorithm to use. Valid options are
['kd_tree'|'ball_tree'|'auto']. Default is 'auto'.
kernel : string
The kernel to use. Valid kernels are
['gaussian'|'tophat'|'epanechnikov'|'exponential'|'linear'|'cosine']
Default is 'gaussian'.
metric : string
The distance metric to use. Note that not all metrics are
valid with all algorithms. Refer to the documentation of
:class:`BallTree` and :class:`KDTree` for a description of
available algorithms. Note that the normalization of the density
output is correct only for the Euclidean distance metric. Default
is 'euclidean'.
atol : float
The desired absolute tolerance of the result. A larger tolerance will
generally lead to faster execution. Default is 0.
rtol : float
The desired relative tolerance of the result. A larger tolerance will
generally lead to faster execution. Default is 1E-8.
breadth_first : boolean
If true (default), use a breadth-first approach to the problem.
Otherwise use a depth-first approach.
leaf_size : int
Specify the leaf size of the underlying tree. See :class:`BallTree`
or :class:`KDTree` for details. Default is 40.
metric_params : dict
Additional parameters to be passed to the tree for use with the
metric. For more information, see the documentation of
:class:`BallTree` or :class:`KDTree`.
"""
def __init__(self, bandwidth=1.0, algorithm='auto',
kernel='gaussian', metric="euclidean", atol=0, rtol=0,
breadth_first=True, leaf_size=40, metric_params=None):
self.algorithm = algorithm
self.bandwidth = bandwidth
self.kernel = kernel
self.metric = metric
self.atol = atol
self.rtol = rtol
self.breadth_first = breadth_first
self.leaf_size = leaf_size
self.metric_params = metric_params
# run the choose algorithm code so that exceptions will happen here
# we're using clone() in the GenerativeBayes classifier,
# so we can't do this kind of logic in __init__
self._choose_algorithm(self.algorithm, self.metric)
if bandwidth <= 0:
raise ValueError("bandwidth must be positive")
if kernel not in VALID_KERNELS:
raise ValueError("invalid kernel: '{0}'".format(kernel))
def _choose_algorithm(self, algorithm, metric):
# given the algorithm string + metric string, choose the optimal
# algorithm to compute the result.
if algorithm == 'auto':
# use KD Tree if possible
if metric in KDTree.valid_metrics:
return 'kd_tree'
elif metric in BallTree.valid_metrics:
return 'ball_tree'
else:
raise ValueError("invalid metric: '{0}'".format(metric))
elif algorithm in TREE_DICT:
if metric not in TREE_DICT[algorithm].valid_metrics:
raise ValueError("invalid metric for {0}: "
"'{1}'".format(TREE_DICT[algorithm],
metric))
else:
raise ValueError("invalid algorithm: '{0}'".format(algorithm))
def fit(self, X):
"""Fit the Kernel Density model on the data
Parameters
----------
X: array_like, shape (n_samples, n_features)
List of n_features-dimensional data points. Each row
corresponds to a single data point.
"""
algorithm = self._choose_algorithm(self.algorithm, self.metric)
X = array2d(X, order='C', dtype=DTYPE)
kwargs = self.metric_params
if kwargs is None:
kwargs = {}
self.tree_ = TREE_DICT[algorithm](X, metric=self.metric,
leaf_size=self.leaf_size,
**kwargs)
return self
def eval(self, X):
"""Evaluate the model on the data
Parameters
----------
X : array_like
An array of points to query. Last dimension should match dimension
of training data (n_features)
Returns
-------
density : ndarray
The array of log(density) evaluations. This has shape X.shape[:-1]
"""
# The returned density is normalized to the number of points.
# For it to be a probability, we must scale it. For this reason
# we'll also scale atol.
X = array2d(X, order='C', dtype=DTYPE)
N = self.tree_.data.shape[0]
atol_N = self.atol * N
log_density = self.tree_.kernel_density(
X, h=self.bandwidth,
kernel=self.kernel,
atol=atol_N, rtol=self.rtol,
breadth_first=self.breadth_first,
return_log=True)
log_density -= np.log(N)
return log_density
def score(self, X):
"""Compute the log probability under the model.
Parameters
----------
X : array_like, shape (n_samples, n_features)
List of n_features-dimensional data points. Each row
corresponds to a single data point.
Returns
-------
logprob : array_like, shape (n_samples,)
Log probabilities of each data point in X
"""
return np.sum(self.eval(X))
def sample(self, n_samples=1, random_state=None):
"""Generate random samples from the model.
Currently, this is implemented only for gaussian and tophat kernels.
Parameters
----------
n_samples : int, optional
Number of samples to generate. Defaults to 1.
random_state: RandomState or an int seed (0 by default)
A random number generator instance
Returns
-------
X : array_like, shape (n_samples, n_features)
List of samples
"""
# TODO: implement sampling for other valid kernel shapes
if self.kernel not in ['gaussian', 'tophat']:
raise NotImplementedError()
data = np.asarray(self.tree_.data)
rng = check_random_state(random_state)
i = rng.randint(data.shape[0], size=n_samples)
if self.kernel == 'gaussian':
return rng.normal(data[i], self.bandwidth)
elif self.kernel == 'tophat':
return data[i] - 1 + 2 * rng.random_sample(i.shape)
else:
raise NotImplementedError()
|
Python
| 0.00013
|
@@ -4007,32 +4007,61 @@
metric))%0A
+ return algorithm%0A
else:%0A
|
f6b26086155c75a88f8b3481ca7a5fbeb0a225e4
|
Add empty line between parameter and return value description
|
sklearn/utils/_ransac.py
|
sklearn/utils/_ransac.py
|
import numpy as np
def ransac(X, y, estimator, min_n_samples, residual_threshold,
is_data_valid=None, is_model_valid=None, max_trials=100,
stop_n_inliers=np.inf, stop_score=np.inf):
"""Fit a model to data with the RANSAC (random sample consensus) algorithm.
Parameters
----------
X : numpy array or sparse matrix of shape [n_samples, n_features]
Training data.
y : numpy array of shape [n_samples, n_targets]
Target values
estimator : object
Estimator object which implements the following methods:
* `fit(X, y)`: Fit model to given training data and target values.
* `predict(X)`: Predict using the estimated model.
* `score(X)`: Returns the mean accuracy on the given test data.
residual_threshold : float
Maximum residual for a data sample to be classified as an inlier.
is_data_valid : function, optional
This function is called with the randomly selected data before the
model is fitted to it: `is_data_valid(X, y)`.
is_model_valid : function, optional
This function is called with the estimated model and the randomly
selected data: `is_model_valid(model, X, y)`, .
max_trials : int, optional
Maximum number of iterations for random sample selection.
stop_n_inliers : int, optional
Stop iteration if at least this number of inliers are found.
stop_score : float, optional
Stop iteration if score is greater equal than this threshold.
Returns
-------
n_trials : int
Number of random selection trials.
inlier_mask : bool array of shape [n_samples]
Boolean mask of inliers classified as ``True``.
Raises
------
ValueError: If no valid consensus set could be found.
Notes
-----
RANSAC is an iterative algorithm for the robust estimation of parameters
from a subset of inliers from the complete data set. Each iteration
performs the following steps:
1. Select `min_n_samples` random samples from the original data and check
whether the set of data is valid (see `is_data_valid`).
2. Fit a model to the random subset (`estimator.fit`) and check whether
the estimated model is valid (see `is_model_valid`).
3. Classify all data as inliers or outliers by calculating the residuals
to the estimated model (`estimator.predict(X) - y`) - all data samples
with absolute residuals smaller than the `residual_threshold` are
considered as inliers.
4. Save fitted model as best model if number of inlier samples is
maximal. In case the current estimated model has the same number of
inliers, it is only considered as the best model if it has better score.
These steps are performed either a maximum number of times (`max_trials`)
or until one of the special stop criteria are met (see `stop_n_inliers` and
`stop_score`). The final model is estimated using all inlier samples of the
previously determined best model.
References
----------
.. [1] http://en.wikipedia.org/wiki/RANSAC
.. [2] http://www.cs.columbia.edu/~belhumeur/courses/compPhoto/ransac.pdf
.. [3] http://www.bmva.org/bmvc/2009/Papers/Paper355/Paper355.pdf
"""
best_n_inliers = 0
best_score = np.inf
best_inlier_mask = None
best_inlier_X = None
best_inlier_y = None
# number of data samples
n_samples = X.shape[0]
for n_trials in range(max_trials):
# choose random sample set
random_idxs = np.random.randint(0, n_samples, min_n_samples)
rsample_X = X[random_idxs]
rsample_y = y[random_idxs]
# check if random sample set is valid
if is_data_valid is not None and not is_data_valid(X, y):
continue
# fit model for current random sample set
estimator.fit(rsample_X, rsample_y)
# check if estimated model is valid
if is_model_valid is not None and not is_model_valid(estimator,
rsample_X,
rsample_y):
continue
# residuals of all data for current random sample model
rsample_residuals = np.abs(estimator.predict(X) - y)
# classify data into inliers and outliers
rsample_inlier_mask = rsample_residuals < residual_threshold
rsample_n_inliers = np.sum(rsample_inlier_mask)
# less inliers -> skip current random sample
if rsample_n_inliers < best_n_inliers \
or rsample_n_inliers == best_n_inliers == 0:
continue
# extract inlier data set
rsample_inlier_X = X[rsample_inlier_mask]
rsample_inlier_y = y[rsample_inlier_mask]
# score of inlier data set
rsample_score = estimator.score(rsample_inlier_X, rsample_inlier_y)
# same number of inliers but worse score -> skip current random sample
if rsample_n_inliers == best_n_inliers and rsample_score < best_score:
continue
# save current random sample as best sample
best_n_inliers = rsample_n_inliers
best_score = rsample_score
best_inlier_mask = rsample_inlier_mask
best_inlier_X = rsample_inlier_X
best_inlier_y = rsample_inlier_y
# break if sufficient number of inliers or score is reached
if best_n_inliers >= stop_n_inliers or best_score >= stop_score:
break
# if none of the iterations met the required criteria
if best_inlier_mask is None:
raise ValueError("RANSAC could not find valid consensus set.")
# estimate final model using all inliers
estimator.fit(best_inlier_X, best_inlier_y)
return n_trials + 1, best_inlier_mask
|
Python
| 0.000005
|
@@ -399,24 +399,25 @@
ining data.%0A
+%0A
y : nump
@@ -477,16 +477,18 @@
t values
+.%0A
%0A est
@@ -775,16 +775,17 @@
t data.%0A
+%0A
resi
@@ -881,16 +881,17 @@
inlier.%0A
+%0A
is_d
@@ -1050,16 +1050,17 @@
X, y)%60.%0A
+%0A
is_m
@@ -1221,16 +1221,17 @@
y)%60, .%0A
+%0A
max_
@@ -1319,16 +1319,17 @@
ection.%0A
+%0A
stop
@@ -1424,16 +1424,17 @@
found.%0A
+%0A
stop
@@ -1572,16 +1572,16 @@
s : int%0A
-
@@ -1615,16 +1615,17 @@
trials.%0A
+%0A
inli
|
efc3a2c31a00a2139f55ca5ce9f3cf4dac1dea1f
|
address comments
|
tests/debug_nans_test.py
|
tests/debug_nans_test.py
|
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for --debug_nans."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import absltest
from absl.testing import parameterized
import numpy as onp
import jax
from jax import test_util as jtu
from jax.test_util import check_grads
from jax import numpy as np
from jax import random
from jax.config import config
config.parse_flags_with_absl()
class DebugNaNsTest(jtu.JaxTestCase):
def setUp(self):
self.cfg = config.read("jax_debug_nans")
config.update("jax_debug_nans", True)
def tearDown(self):
config.update("jax_debug_nans", self.cfg)
def testSingleResultPrimitive(self):
A = np.array([[1., 2.], [2., 3.]])
B = np.tanh(A)
def testMultipleResultPrimitive(self):
A = np.array([[1., 2.], [2., 3.]])
D, V = np.linalg.eig(A)
def testJitComputation(self):
A = np.array([[1., 2.], [2., 3.]])
B = jax.jit(np.tanh)(A)
|
Python
| 0
|
@@ -1038,16 +1038,17 @@
tCase):%0A
+%0A
def se
@@ -1145,16 +1145,17 @@
, True)%0A
+%0A
def te
@@ -1308,24 +1308,25 @@
np.tanh(A)%0A
+%0A
def testMu
@@ -1389,24 +1389,24 @@
%5B2., 3.%5D%5D)%0A
-
D, V = n
@@ -1421,16 +1421,17 @@
.eig(A)%0A
+%0A
def te
|
eee6dd8f6d7555f97452fb5734e299203b337ace
|
Fix fast_classifier
|
tests/fast_classifier.py
|
tests/fast_classifier.py
|
import numpy as np
class fast_classifier(object):
def __init__(self):
pass
def train(self, features, labels):
examples = {}
for f,lab in zip(features, labels):
if lab not in examples:
examples[lab] = f
return fast_model(examples)
class fast_model(object):
def __init__(self, examples):
self.examples = examples
def apply(self, features):
res = []
for f in features:
cur = None
best = +np.inf
for k,v in self.examples.iteritems():
dist = np.dot(v-f, v-f)
if dist < best:
best = dist
cur = k
res.append(k)
return res
|
Python
| 0
|
@@ -394,82 +394,61 @@
-%0A
-def apply(self, features):%0A res = %5B%5D%0A for f in features
+assert len(self.examples)%0A%0A def apply(self, f)
:%0A
@@ -453,23 +453,20 @@
-
- cur
+best
= None%0A
@@ -473,24 +473,24 @@
-
best
+_val
= +np.i
@@ -496,28 +496,24 @@
inf%0A
-
-
for k,v in s
@@ -541,13 +541,9 @@
s():
- %0A
+%0A
@@ -550,17 +550,16 @@
-
dist = n
@@ -574,20 +574,16 @@
f, v-f)%0A
-
@@ -600,22 +600,22 @@
t %3C best
-:%0A
+_val:%0A
@@ -629,20 +629,17 @@
best =
-dist
+k
%0A
@@ -651,45 +651,23 @@
- cur = k%0A res.append(k)
+best_val = dist
%0A
@@ -678,14 +678,15 @@
return
-r
+b
es
+t
%0A%0A%0A
|
83a15b47ecac219c2fe4ca1e49cfa9055f9197d2
|
Add some tests
|
tests/test_features.py
|
tests/test_features.py
|
Python
| 0.000008
|
@@ -0,0 +1,2744 @@
+from unittest import mock, TestCase%0A%0Afrom unleash_client import features%0A%0A%0Aclass TestFactory(TestCase):%0A def test_simple_case(self):%0A strategies = %7B'Foo': mock.Mock(return_value='R')%7D%0A feature = %7B'strategies': %5B%7B'name': 'Foo', 'parameters': %7B'x': 0%7D%7D%5D%7D%0A%0A result = features.feature_gates(strategies, feature)%0A%0A assert result == %5B'R'%5D%0A assert strategies%5B'Foo'%5D.called_once_with(x=0)%0A%0A def test_two_strategies(self):%0A strategies = %7B%0A 'Foo': mock.Mock(return_value='F'),%0A 'Bar': mock.Mock(return_value='B'),%0A %7D%0A feature = %7B'strategies': %5B%0A %7B'name': 'Foo', 'parameters': %7B'x': 0%7D%7D,%0A %7B'name': 'Bar', 'parameters': %7B'y': 1%7D%7D,%0A %5D%7D%0A%0A result = features.feature_gates(strategies, feature)%0A%0A assert result == %5B'F', 'B'%5D%0A assert strategies%5B'Foo'%5D.called_once_with(x=0)%0A assert strategies%5B'Bar'%5D.called_once_with(y=1)%0A%0A def test_unknown_strategy(self):%0A strategies = %7B%7D%0A feature = %7B'strategies': %5B%7B'name': 'absent', 'parameters': %7B'z': 9%7D%7D%5D%7D%0A%0A with mock.patch('unleash_client.features.log') as log:%0A result = features.feature_gates(strategies, feature)%0A%0A assert len(result) == 1%0A assert callable(result%5B0%5D)%0A assert not result%5B0%5D(basically_anything_here='foo')%0A assert log.warning.called%0A%0A%0Aclass TestFeature(TestCase):%0A def test_happy_path(self):%0A strategies = %7B'Foo': mock.Mock(return_value=lambda z: z)%7D%0A feature_def = %7B%0A 'enabled': True,%0A 'strategies': %5B%7B'name': 'Foo', 'parameters': %7B'x': 0%7D%7D%5D,%0A %7D%0A%0A toggle = features.Feature(strategies, feature_def)%0A%0A assert isinstance(toggle, features.Feature)%0A assert toggle(%7B'z': True%7D)%0A assert not toggle(%7B'z': False%7D)%0A assert toggle.choices == %7BTrue: 1, False: 1%7D%0A%0A def test_empty_strategy_list(self):%0A strategies = %7B'Foo': mock.Mock(return_value=lambda z: z)%7D%0A feature_def = %7B%0A 'enabled': True,%0A 'strategies': %5B%5D,%0A %7D%0A%0A toggle = features.Feature(strategies, feature_def)%0A%0A assert isinstance(toggle, features.Feature)%0A assert not toggle(%7B'z': True%7D)%0A assert not toggle(%7B'z': False%7D)%0A assert toggle.choices == %7BTrue: 0, False: 2%7D%0A%0A def test_disable(self):%0A strategies = %7B'Foo': mock.Mock(return_value=lambda z: z)%7D%0A feature_def = %7B%0A 'enabled': False,%0A 'strategies': %5B%7B'name': 'Foo', 'parameters': %7B'x': 0%7D%7D%5D,%0A %7D%0A%0A toggle = features.Feature(strategies, feature_def)%0A%0A assert not toggle(%7B'z': True%7D)%0A assert not toggle(%7B'z': False%7D)%0A assert toggle.choices == %7BTrue: 0, False: 2%7D%0A
|
|
2b8b32605c9d211154f47d228038464ff5df7b56
|
fix import
|
tests/test_kernel32.py
|
tests/test_kernel32.py
|
import os
from pywincffi.core.ffi import ffi
from pywincffi.core.testutil import TestCase
from pywincffi.exceptions import WindowsAPIError
from pywincffi.kernel32 import PROCESS_QUERY_LIMITED_INFORMATION, OpenProcess
class TestOpenProcess(TestCase):
"""
Tests for :func:`pywincffi.kernel32.OpenProcess`
"""
def test_returns_handle(self):
handle = OpenProcess(
PROCESS_QUERY_LIMITED_INFORMATION,
False,
os.getpid()
)
typeof = ffi.typeof(handle)
self.assertEqual(typeof.kind, "pointer")
self.assertEqual(typeof.cname, "void *")
def test_access_denied_for_null_desired_access(self):
with self.assertRaises(WindowsAPIError) as error:
OpenProcess(0, False, os.getpid())
self.assertEqual(error.exception.code, 5)
|
Python
| 0.000001
|
@@ -156,24 +156,32 @@
kernel32
+.process
import
PROCESS_
@@ -172,16 +172,22 @@
import
+(%0A
PROCESS_
@@ -224,16 +224,17 @@
nProcess
+)
%0A%0A%0Aclass
|
b1edd17b647766091eab44d730b79ec52c8cb50d
|
Add tests for project points
|
tests/test_projects.py
|
tests/test_projects.py
|
from taiga.requestmaker import RequestMaker, RequestMakerException
from taiga.models.base import InstanceResource, ListResource
from taiga.models import User, Point, UserStoryStatus, Severity, Project, Projects
from taiga import TaigaAPI
import taiga.exceptions
import json
import requests
import unittest
from mock import patch
from .tools import create_mock_json
from .tools import MockResponse
class TestProjects(unittest.TestCase):
@patch('taiga.requestmaker.RequestMaker.get')
def test_single_project_parsing(self, mock_requestmaker_get):
mock_requestmaker_get.return_value = MockResponse(200,
create_mock_json('tests/resources/project_details_success.json'))
api = TaigaAPI(token='f4k3')
project = api.projects.get(1)
self.assertEqual(project.description, 'test 1 on real taiga')
self.assertEqual(len(project.users), 1)
self.assertTrue(isinstance(project.points[0], Point))
self.assertTrue(isinstance(project.us_statuses[0], UserStoryStatus))
self.assertTrue(isinstance(project.severities[0], Severity))
@patch('taiga.requestmaker.RequestMaker.get')
def test_list_projects_parsing(self, mock_requestmaker_get):
mock_requestmaker_get.return_value = MockResponse(200,
create_mock_json('tests/resources/projects_list_success.json'))
api = TaigaAPI(token='f4k3')
projects = api.projects.list()
self.assertEqual(projects[0].description, 'test 1 on real taiga')
self.assertEqual(len(projects), 1)
self.assertEqual(len(projects[0].users), 1)
self.assertTrue(isinstance(projects[0].users[0], User))
@patch('taiga.requestmaker.RequestMaker.post')
def test_star(self, mock_requestmaker_post):
rm = RequestMaker('/api/v1', 'fakehost', 'faketoken')
project = Project(rm, id=1)
self.assertEqual(project.star().id, 1)
mock_requestmaker_post.assert_called_with(
'/{endpoint}/{id}/star',
endpoint='projects', id=1
)
@patch('taiga.requestmaker.RequestMaker.post')
def test_unstar(self, mock_requestmaker_post):
rm = RequestMaker('/api/v1', 'fakehost', 'faketoken')
project = Project(rm, id=1)
self.assertEqual(project.unstar().id, 1)
mock_requestmaker_post.assert_called_with(
'/{endpoint}/{id}/unstar',
endpoint='projects', id=1
)
@patch('taiga.models.base.ListResource._new_resource')
def test_create_project(self, mock_new_resource):
rm = RequestMaker('/api/v1', 'fakehost', 'faketoken')
mock_new_resource.return_value = Project(rm)
sv = Projects(rm).create('PR 1', 'PR desc 1')
mock_new_resource.assert_called_with(
payload={'name': 'PR 1', 'description': 'PR desc 1'}
)
|
Python
| 0
|
@@ -2819,16 +2819,577 @@
c 1'%7D%0A )%0A
+ @patch('taiga.models.Points.create')%0A def test_add_point(self, mock_new_point):%0A rm = RequestMaker('/api/v1', 'fakehost', 'faketoken')%0A project = Project(rm, id=1)%0A project.add_point('Point 1', 1.5)%0A mock_new_point.assert_called_with(1, 'Point 1', 1.5)%0A%0A @patch('taiga.models.Points.list')%0A def test_list_points(self, mock_list_points):%0A rm = RequestMaker('/api/v1', 'fakehost', 'faketoken')%0A project = Project(rm, id=1)%0A project.list_points()%0A mock_list_points.assert_called_with(project=1)%0A
|
224abc99becc1683605a6dc5c3460510efef3efb
|
Comment out the pyserial TestIsCorrectVariant test.
|
tests/test_pyserial.py
|
tests/test_pyserial.py
|
from __future__ import (absolute_import, print_function, unicode_literals)
import os
import sys
lib_path = os.path.abspath('../')
sys.path.append(lib_path)
import io
import struct
import unittest
import threading
import time
import serial
try:
import unittest2 as unittest
except ImportError:
import unittest
class TestIsCorrectVariant(unittest.TestCase):
def test_isMbVariant(self):
self.assertTrue (serial.__version__.index('mb2') > 0 )
def test_hasScanEndpoints(self):
import serial.tools.list_ports as lp
scan = lp.list_ports_by_vid_pid
def test_variantDoesBlocking(self):
#grab a port
#try to grab it again
import serial.tools.list_ports as lp
scan = lp.list_ports_by_vid_pid
print('autograbbing a port')
comports = lp.comports()
if( len(list(comports)) < 1):
print('no comport availabe')
self.assertFalse(True, "no comports, cannot execute test")
portname = comports[-1][0] #item 0 in last comport as the port to test
print("Connecting to serial" + portname)
s = serial.Serial(portname)
with self.assertRaises(serial.SerialException) as ex:
s = serial.Serial(portname)
if __name__ == '__main__':
unittest.main()
|
Python
| 0
|
@@ -569,16 +569,95 @@
id_pid%0A%0A
+ '''%0A # This test is commented out because it requires an actual serial port.
%0A def t
@@ -1258,16 +1258,23 @@
ortname)
+%0A '''
%0A%0A %0Aif
|
c9cb5955320a779a7128e082d7ebf347a8f3e5e4
|
Add missing import
|
tests/test_add_target.py
|
tests/test_add_target.py
|
"""
Tests for helper function for adding a target to a Vuforia database.
"""
import io
import pytest
from mock_vws import MockVWS
from vws import VWS
class TestAddTarget:
"""
Tests for adding a target.
"""
def test_add_target(
self,
client: VWS,
high_quality_image: io.BytesIO,
) -> None:
"""
No exception is raised when adding one target.
"""
name = 'x'
width = 1
target_id = client.add_target(
name=name,
width=width,
image=high_quality_image,
)
target_record = client.get_target_record(target_id=target_id)
assert target_record['name'] == name
assert target_record['width'] == width
assert target_record['active_flag'] is True
def test_add_two_targets(
self,
client: VWS,
high_quality_image: io.BytesIO,
) -> None:
"""
No exception is raised when adding two targets with different names.
This demonstrates that the image seek position is not changed.
"""
client.add_target(name='x', width=1, image=high_quality_image)
client.add_target(name='a', width=1, image=high_quality_image)
class TestCustomBaseVWSURL:
"""
Tests for using a custom base VWS URL.
"""
def test_custom_base_url(self, high_quality_image: io.BytesIO) -> None:
"""
It is possible to use add a target to a database under a custom VWS
URL.
"""
base_vws_url = 'http://example.com'
with MockVWS(base_vws_url=base_vws_url) as mock:
client = VWS(
server_access_key=mock.server_access_key,
server_secret_key=mock.server_secret_key,
base_vws_url=base_vws_url,
)
client.add_target(
name='x',
width=1,
image=high_quality_image,
)
class TestApplicationMetadata:
"""
Tests for the ``application_metadata`` parameter to ``add_target``.
"""
@pytest.mark.parametrize('application_metadata', [None, b'a'])
def test_valid_metadata(
self,
client: VWS,
high_quality_image: io.BytesIO,
application_metadata: Optional[bytes],
) -> None:
"""
No exception is raised when ``None`` or bytes is given.
"""
client.add_target(
name='x',
width=1,
image=high_quality_image,
application_metadata=None,
)
class TestActiveFlag:
"""
Tests for the ``active_flag`` parameter to ``add_target``.
"""
@pytest.mark.parametrize('active_flag', [True, False])
def test_active_flag_given(
self,
client: VWS,
high_quality_image: io.BytesIO,
active_flag: bool,
) -> None:
"""
It is possible to set the active flag to a boolean value.
"""
target_id = client.add_target(
name='x',
width=1,
image=high_quality_image,
active_flag=active_flag,
)
target_record = client.get_target_record(target_id=target_id)
assert target_record['active_flag'] is active_flag
|
Python
| 0.000466
|
@@ -80,16 +80,44 @@
mport io
+%0Afrom typing import Optional
%0A%0Aimport
|
bb89223a7fcc1f2562c55ca432a3c52eec6efd8a
|
Put everything in one method like we discussed.
|
tests/test_background.py
|
tests/test_background.py
|
import unittest
import numpy.testing as testing
import numpy as np
import fitsio
import redmapper
class BackgroundTestCase(unittest.TestCase):
def test_io(self): pass
def test_sigma_g(self):
inputs = [()]
idl_outputs = [0.32197464, 6.4165196, 0.0032830855, 1.4605126,
0.0098356586, 0.79848081, 0.011284498, 9.3293247]
for out in idl_outputs:
idx = tuple(np.random.randint(i) for i in self.bkg.sigma_g.shape)
testing.assert_almost_equal(self.bkg.sigma_g[idx], out, decimal=5)
def test_lookup(self): pass
def setUpClass(cls):
# test that we fail if we try a non-existent file
self.assertRaises(IOError, redmapper.background.Background,
'nonexistent.fit')
# test that we fail if we read a non-fits file
self.assertRaises(IOError, redmapper.background.Background,
'%s/testconfig.yaml' % (self.file_path))
# test that we fail if we try a file without the right header info
self.assertRaises(AttributeError, redmapper.background.Background,
'%s/test_dr8_pars.fit' % (self.file_path))
self.file_name, self.file_path = 'test_bkg.fit', 'data'
self.bkg = redmapper.background.Background('%s/%s' % (self.file_path,
self.file_name))
if __name__=='__main__':
unittest.main()
|
Python
| 0
|
@@ -75,22 +75,20 @@
fitsio%0A%0A
-import
+from
redmapp
@@ -89,16 +89,45 @@
edmapper
+.background import Background
%0A%0Aclass
@@ -178,471 +178,76 @@
def
-test_io(self): pass%0A%0A%0A def test_sigma_g(self):%0A inputs = %5B()%5D%0A idl_outputs = %5B0.32197464, 6.4165196, 0.0032830855, 1.4605126,%0A 0.0098356586, 0.79848081, 0.011284498, 9.3293247%5D%0A for out in idl_outputs:%0A idx = tuple(np.random.randint(i) for i in self.bkg.sigma_g.shape)%0A testing.assert_almost_equal(self.bkg.sigma_g%5Bidx%5D, out, decimal=5)%0A%0A def test_lookup(self): pass%0A%0A def setUpClass(cls):
+runTest(self):%0A file_name, file_path = 'test_bkg.fit', 'data'
%0A
@@ -340,61 +340,19 @@
or,
-redmapper.background.Background, %0A
+Background,
'no
@@ -457,37 +457,16 @@
OError,
-redmapper.background.
Backgrou
@@ -460,34 +460,39 @@
ror, Background,
-
%0A
+
@@ -515,29 +515,24 @@
ig.yaml' %25 (
-self.
file_path))%0A
@@ -648,37 +648,16 @@
eError,
-redmapper.background.
Backgrou
@@ -661,16 +661,22 @@
round, %0A
+
@@ -709,29 +709,24 @@
ars.fit' %25 (
-self.
file_path))%0A
@@ -737,220 +737,551 @@
-self.file_name, self.file_path = 'test_bkg.fit', 'data'%0A self.bkg = redmapper.background.Background('%25s/%25s' %25 (self.file_path, %0A self.file_name)
+bkg = Background('%25s/%25s' %25 (file_path, file_name))%0A%0A%0A inputs = %5B(172,15,64), (323,3,103), (9,19,21), (242,4,87),%0A (70,12,58), (193,6,39), (87,14,88), (337,5,25), (333,8,9)%5D%0A py_outputs = np.array(%5Bbkg.sigma_g%5Bidx%5D for idx in inputs%5D)%0A idl_outputs = np.array(%5B0.32197464, 6.4165196, 0.0032830855, %0A 1.4605126, 0.0098356586, 0.79848081, %0A 0.011284498, 9.3293247, 8.7064905%5D)%0A testing.assert_almost_equal(py_outputs, idl_outputs, decimal=1
)%0A%0A%0A
|
80c1275899045bfd50efa9b436ada7672c09e783
|
use md5 password hasher to speed up the tests
|
tests/test_settings.py
|
tests/test_settings.py
|
SECRET_KEY = 'fake-key'
INSTALLED_APPS = [
'tests',
]
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
}
|
Python
| 0
|
@@ -140,8 +140,85 @@
%7D%0A%7D%0A
+%0APASSWORD_HASHERS = (%0A 'django.contrib.auth.hashers.MD5PasswordHasher',%0A)%0A
|
f00f4f82ce35f433ecb2ce0e1da2fc9069f33294
|
Add a test case to trigger this bug without the fix.
|
tests/test_simplify.py
|
tests/test_simplify.py
|
import claripy
import nose
def test_bool_simplification():
def assert_correct(a, b):
nose.tools.assert_true(claripy.backends.z3.identical(claripy.simplify(a), b))
a, b, c = (claripy.BoolS(name) for name in ('a', 'b', 'c'))
assert_correct(claripy.And(a, claripy.Not(a)), claripy.false)
assert_correct(claripy.Or(a, claripy.Not(a)), claripy.true)
complex_true_expression = claripy.Or(
claripy.And(a,b),
claripy.Or(claripy.And(a, claripy.Not(b)), claripy.And(claripy.Not(a), c)),
claripy.Or(claripy.And(a, claripy.Not(b)), claripy.And(claripy.Not(a), claripy.Not(c))))
assert_correct(complex_true_expression, claripy.true)
def test_simplification():
def assert_correct(a, b):
nose.tools.assert_true(claripy.backends.z3.identical(a, b))
x, y, z = (claripy.BVS(name, 32) for name in ('x', 'y', 'z'))
# test extraction of concatted values
concatted = claripy.Concat(x, y, z)
assert_correct(concatted[95:64], x)
assert_correct(concatted[63:32], y)
assert_correct(concatted[31:0], z)
assert_correct(concatted[95:32], claripy.Concat(x, y))
assert_correct(concatted[63:0], claripy.Concat(y, z))
assert_correct(concatted[95:0], concatted)
assert_correct(concatted[47:0], claripy.Concat(y, z)[47:0])
assert_correct(concatted[70:0], concatted[70:0])
assert_correct(concatted[70:15], concatted[70:15])
assert_correct(concatted[70:35], claripy.Concat(x, y)[38:3])
# make sure the division simplification works
assert_correct(2+x, claripy.backends.z3.simplify(1+x+1))
assert_correct(x/y, claripy.backends.z3.simplify(x/y))
assert_correct(x%y, claripy.backends.z3.simplify(x%y))
def test_rotate_shift_mask_simplification():
a = claripy.BVS('N', 32, max=0xc, min=0x1)
extend_ = claripy.BVS('extend', 32, uninitialized=True)
a_ext = extend_.concat(a)
expr = ((a_ext << 3) | (claripy.LShR(a_ext, 61))) & 0x7fffffff8
# print(expr)
# print(expr._model_vsa)
model_vsa = expr._model_vsa
nose.tools.assert_equal(model_vsa.lower_bound, 8)
nose.tools.assert_equal(model_vsa.upper_bound, 0x60)
nose.tools.assert_equal(model_vsa.cardinality, 12)
def test_reverse_extract_reverse_simplification():
# without the reverse_extract_reverse simplifier, loading dx from rdx will result in the following complicated
# expression:
# Reverse(Extract(63, 48, Reverse(BVS('rdx', 64))))
a = claripy.BVS('rdx', 64)
dx = claripy.Reverse(claripy.Extract(63, 48, claripy.Reverse(a)))
# simplification should have kicked in at this moment
nose.tools.assert_equal(dx.op, 'Extract')
nose.tools.assert_equal(dx.args[0], 15)
nose.tools.assert_equal(dx.args[1], 0)
nose.tools.assert_is(dx.args[2], a)
def perf_boolean_and_simplification_0():
# Create a gigantic And AST with many operands, one variable at a time
bool_vars = [ claripy.BoolS("b%d" % i) for i in range(1500) ]
v = bool_vars[0]
for i in range(1, len(bool_vars)):
v = claripy.And(v, bool_vars[i])
def perf_boolean_and_simplification_1():
# Create a gigantic And AST with many operands, many variables at a time
bool_vars = [ claripy.BoolS("b%d" % i) for i in range(500) ]
v = bool_vars[0]
for i in range(1, len(bool_vars)):
if v.op == "And":
v = claripy.And(*(v.args + (bool_vars[i] == False,))) # pylint:disable=singleton-comparison
else:
v = claripy.And(v, bool_vars[i])
def perf():
import timeit
print(timeit.timeit("perf_boolean_and_simplification_0()",
number=10,
setup="from __main__ import perf_boolean_and_simplification_0"))
print(timeit.timeit("perf_boolean_and_simplification_1()",
number=10,
setup="from __main__ import perf_boolean_and_simplification_1"))
if __name__ == '__main__':
test_simplification()
test_bool_simplification()
test_rotate_shift_mask_simplification()
test_reverse_extract_reverse_simplification()
|
Python
| 0
|
@@ -2778,24 +2778,400 @@
gs%5B2%5D, a)%0A%0A%0A
+def test_reverse_concat_reverse_simplification():%0A%0A # Reverse(Concat(Reverse(a), Reverse(b))) = Concat(b, a)%0A%0A a = claripy.BVS('a', 32)%0A b = claripy.BVS('b', 32)%0A x = claripy.Reverse(claripy.Concat(claripy.Reverse(a), claripy.Reverse(b)))%0A%0A nose.tools.assert_equal(x.op, 'Concat')%0A nose.tools.assert_is(x.args%5B0%5D, b)%0A nose.tools.assert_is(x.args%5B1%5D, a)%0A%0A%0A
def perf_boo
@@ -4446,24 +4446,73 @@
everse_simplification()%0A
+ test_reverse_concat_reverse_simplification()%0A
|
0d825461c5c28ce451092783937fe95171c243bd
|
Add full deprecated test
|
tests/test_deprecated.py
|
tests/test_deprecated.py
|
"""
SPDX-FileCopyrightText: 2019 oemof developer group <contact@oemof.org>
SPDX-License-Identifier: MIT
"""
import warnings
import pytest
from windpowerlib.data import load_turbine_data_from_oedb
from windpowerlib.wind_turbine import get_turbine_types
def test_old_import():
msg = "Use >>from windpowerlib import get_turbine_types"
with pytest.raises(ImportError, match=msg):
get_turbine_types()
def test_old_name_load_data_from_oedb():
with warnings.catch_warnings():
warnings.simplefilter("error")
msg = "store_turbine_data_from_oedb"
with pytest.raises(FutureWarning, match=msg):
load_turbine_data_from_oedb()
|
Python
| 0.000001
|
@@ -454,106 +454,26 @@
edb(
-):%0A with warnings.catch_warnings(
+recwarn
):%0A
-
- warnings.simplefilter(%22error%22)%0A msg = %22store
+load
_tur
@@ -495,102 +495,44 @@
oedb
-%22
+()
%0A
- with pytest.raises(FutureWarning, match=msg):%0A load_turbine_data_from_oedb()%0A
+assert recwarn.pop(FutureWarning)
|
ca40822d7898d02272cdf0a52fa5a8b75b983930
|
Fix syntax errors in addpost test
|
tests/tests/addpost.py
|
tests/tests/addpost.py
|
import binascii
import os
import sqlalchemy
import selenium
from selenium.webdriver.common import keys
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.common.by import By
from timpani import database
LOGIN_TITLE = "Login - Timpani"
ADD_POST_TITLE = "Add Post - Timpani"
#A random constant used to varify that a unique post was made
POST_RANDOM = binascii.hexlify(os.urandom(16)).decode()
POST_TITLE = "Test post, please ignore."
POST_BODY = ("This is a test post."
"There is no reason you should be paying attention to it. %s" % POST_RANDOM)
POST_TAGS = ["test", "post", "selenium"]
def test(driver, username, password):
databaseConnection = database.DatabaseConnection()
driver.get("http://127.0.0.1:8080/add_post")
WebDriverWait(driver, 10)
.until(expected_conditions.title_contains("Timpani"))
#Check that we were redirected to the login page, as we are not logged in.
assert driver.title == LOGIN_TITLE, "Title is %s" % driver.title
loginForm = driver.find_element_by_id("login-form")
usernameField = driver.find_element_by_id("username-field")
passwordField = driver.find_element_by_id("password-field")
usernameField.send_keys(username)
passwordField.send_keys(password)
loginForm.submit()
WebDriverWait(driver, 10)
.until_not(expected_conditions.title_is(LOGIN_TITLE))
#We should have been redirected to the add_post page.
assert driver.title == ADD_POST_TITLE, "Title is %s" % driver.title
postForm = driver.find_element_by_id("post-form")
titleInput = driver.find_element_by_id("title-input")
editorField = driver.find_element_by_css_selector("#editor > .ql-editor")
tagsInput = driver.find_element_by_id("tag-input-div")
titleInput.click()
titleInput.send_keys(POST_TITLE)
editorField.click()
actionChain = selenium.webdriver.ActionChains(driver)
actionChain.send_keys(POST_BODY)
actionChain.perform()
tagsInput.click()
actionChain = selenium.webdriver.ActionChains(driver)
for tag in POST_TAGS:
actionChain.send_keys(tag)
actionChain.send_keys(keys.Keys.SPACE)
actionChain.perform()
postForm.submit()
post = databaseConnection.session
.query(database.tables.Post)
.order_by(sqlalchemy.desc(database.tables.Post.id))
.first()
tags = databaseConnection.session
.query(database.tables.Tag.name)
.filter(database.tables.Tag.post_id == post.id)
.all()
#Resolve sqlalchemy tuples
tags = [tag[0] for tag in tags]
assert post != None
assert post.title == POST_TITLE, "Title is %s" % post.title
assert POST_RANDOM in post.body
assert tags == POST_TAGS, "Tags are %s" % tags
|
Python
| 0.000004
|
@@ -549,18 +549,19 @@
st post.
+
%22%0A
-
%09%22There
@@ -811,16 +811,17 @@
st%22)%09%0A%0A%09
+(
WebDrive
@@ -893,16 +893,17 @@
mpani%22))
+)
%0A%0A%09#Chec
@@ -1307,16 +1307,17 @@
mit()%0A%0A%09
+(
WebDrive
@@ -1389,16 +1389,17 @@
_TITLE))
+)
%0A%0A%09#We s
@@ -2166,16 +2166,17 @@
%09post =
+(
database
@@ -2289,16 +2289,17 @@
.first()
+)
%0A%09tags =
@@ -2299,16 +2299,17 @@
%09tags =
+(
database
@@ -2416,16 +2416,16 @@
id)%0A
-
%09%09.all()
%0A%0A%09#
@@ -2420,16 +2420,17 @@
%09%09.all()
+)
%0A%0A%09#Reso
|
72351a1ebff21777f71d93a1f5647482b98d3448
|
Fix text.
|
tests/test_formatters.py
|
tests/test_formatters.py
|
import json
import random
import unittest
from exporters.exceptions import ConfigurationError
from exporters.export_formatter.base_export_formatter import BaseExportFormatter
from exporters.export_formatter.csv_export_formatter import CSVExportFormatter
from exporters.export_formatter.json_export_formatter import JsonExportFormatter
from exporters.records.base_record import BaseRecord
class BaseExportFormatterTest(unittest.TestCase):
def setUp(self):
self.options = {
'exporter_options': {
}
}
self.export_formatter = BaseExportFormatter(self.options)
def test_format_not_implemented(self):
with self.assertRaises(NotImplementedError):
self.export_formatter.format([])
class JsonFormatterTest(unittest.TestCase):
def setUp(self):
self.options = {
'exporter_options': {
}
}
self.export_formatter = JsonExportFormatter(self.options)
def test_format(self):
item = BaseRecord()
item['key'] = 0
item['value'] = random.randint(0, 10000)
item = self.export_formatter.format([item])
item = list(item)[0]
self.assertIsInstance(json.loads(item.formatted), dict)
def test_raise_exception(self):
with self.assertRaises(Exception):
list(self.export_formatter.format([1, 2, 3]))
class CSVFormatterTest(unittest.TestCase):
def setUp(self):
self.batch = [
BaseRecord({'key1': 'value1', 'key2': 'value2'}),
BaseRecord({'key1': 'value1', 'key2': 'value2'}),
BaseRecord({'key1': 'value1', 'key2': 'value2'}),
BaseRecord({'key1': 'value1', 'key2': 'value2'}),
BaseRecord({'key1': 'value1', 'key2': 'value2'})
]
def test_create_without_options_raises_errors(self):
with self.assertRaisesRegexp(ConfigurationError, "requires at least one of"):
CSVExportFormatter({})
def test_format_batch_titles(self):
options = {
'options': {
'show_titles': True,
'fields': ['key1']
}
}
formatter = CSVExportFormatter(options)
items = formatter.format(self.batch)
items = list(items)
self.assertEqual(items[0].formatted, '"key1"')
self.assertEqual(items[1].formatted, '"value1"')
def test_format_batch_no_titles(self):
# given:
options = {
'options': {
'fields': ['key1']
}
}
formatter = CSVExportFormatter(options)
# when:
items = list(formatter.format(self.batch))
# then:
self.assertEqual(items[0].formatted, '"key1"')
self.assertEqual(items[1].formatted, '"value1"')
def test_format_from_schema(self):
# given:
options = {
'options': {
'show_titles': True,
'schema': {
'$schema': 'http://json-schema.org/draft-04/schema',
'properties': {
'key1': {
'type': 'string'
},
'key2': {
'type': 'string'
}
},
'required': [
'key2',
'key1'
],
'type': 'object'
}
}
}
formatter = CSVExportFormatter(options)
# when:
items = list(formatter.format(self.batch))
# then:
self.assertEqual([True] + [False] * 5, [i.header for i in items])
self.assertEqual(['"key1","key2"'] + ['"value1","value2"'] * 5,
[i.formatted for i in items])
self.assertEqual(set(['csv']), set(i.format for i in items))
|
Python
| 0.999999
|
@@ -3362,17 +3362,17 @@
'key
-2
+1
',%0A
@@ -3394,17 +3394,17 @@
'key
-1
+2
'%0A
|
8288619fcb4aa44cd5d293dd9421d190d6c57e98
|
Simplify pytest fixture [ci skip]
|
tests/test_formatting.py
|
tests/test_formatting.py
|
"""Test entry formatting and printing."""
import bibpy
import pytest
@pytest.fixture
def test_entries():
entries, _, _, _, _ = bibpy.read_file('tests/data/simple_1.bib',
'biblatex')
return entries
@pytest.mark.skip
def test_formatting(test_entries):
print(test_entries[0].format())
assert test_entries[0].format(align=True, indent=' ', order=[]) ==\
"""@article{test,
month = {4},
title = {1337 Hacker},
institution = {Office of Information Management {and} Communications},
year = {2010},
author = {James Conway and Archer Sterling}
}"""
assert test_entries[1].format(align=True, indent=' ', order=[]) ==\
"""@conference{lol,
author = {k}
}"""
@pytest.mark.skip
def test_align(test_entries, monkeypatch):
# Set PYTHONHASHSEED to zero for Python 3+ to ensure predictable ordering
# of Python's dictionary
monkeypatch.setenv('PYTHONHASHSEED', 0)
assert test_entries[0].format(align=False, indent=' ', order=[]) ==\
"""@article{test,
month = {4},
title = {1337 Hacker},
institution = {Office of Information Management {and} Communications},
year = {2010},
author = {James Conway and Archer Sterling}
}"""
@pytest.mark.skip
def test_indent(test_entries, monkeypatch):
# Set PYTHONHASHSEED to zero for Python 3+ to ensure predictable ordering
# of Python's dictionary
monkeypatch.setenv('PYTHONHASHSEED', 0)
assert test_entries[0].format(align=True, indent='', order=[]) ==\
"""@article{test,
month = {4},
title = {1337 Hacker},
institution = {Office of Information Management {and} Communications},
year = {2010},
author = {James Conway and Archer Sterling}
}"""
assert test_entries[0].format(align=True, indent=' ' * 9, order=[]) ==\
"""@article{test,
month = {4},
title = {1337 Hacker},
institution = {Office of Information Management {and} Communications},
year = {2010},
author = {James Conway and Archer Sterling}
}"""
def test_ordering(test_entries, monkeypatch):
for fail in ('string', 0.453245, object()):
with pytest.raises(ValueError):
test_entries[0].format(order=fail)
# Print a predefined order
order = ['author', 'title', 'year', 'institution', 'month']
assert test_entries[0].format(align=True, indent=' ', order=order) ==\
"""@article{test,
author = {James Conway and Archer Sterling},
title = {1337 Hacker},
year = {2010},
institution = {Office of Information Management {and} Communications},
month = {4}
}"""
# Print fields as sorted
assert test_entries[0].format(align=True, indent=' ', order=True) ==\
"""@article{test,
author = {James Conway and Archer Sterling},
institution = {Office of Information Management {and} Communications},
month = {4},
title = {1337 Hacker},
year = {2010}
}"""
|
Python
| 0
|
@@ -109,29 +109,14 @@
-entries, _, _, _, _ =
+return
bib
@@ -158,74 +158,21 @@
ib',
-%0A 'biblatex')%0A return
+ 'biblatex').
entr
|
a9f2ad660d0b1e4443785cc5fb0c9afd0b1ce660
|
Update test_ica_lingam.py
|
tests/test_ica_lingam.py
|
tests/test_ica_lingam.py
|
import numpy as np
import pandas as pd
from lingam.ica_lingam import ICALiNGAM
def test_fit_success():
# causal direction: x0 --> x1 --> x3
x0 = np.random.uniform(size=1000)
x1 = 2.0 * x0 + np.random.uniform(size=1000)
x2 = np.random.uniform(size=1000)
x3 = 4.0 * x1 + np.random.uniform(size=1000)
X = pd.DataFrame(np.array([x0, x1, x2, x3]).T, columns=['x0', 'x1', 'x2', 'x3'])
model = ICALiNGAM()
model.fit(X)
# check the causal ordering
co = model.causal_order_
assert co.index(0) < co.index(1) < co.index(3)
# check the adjacency matrix
am = model.adjacency_matrix_
assert am[1, 0] > 1.5 and am[3, 1] > 3.5
am[1, 0] = 0.0
am[3, 1] = 0.0
assert np.sum(am) < 0.1
# for coverage
matrix = np.array([
[0, 1, 1, 1],
[0, 0, 1, 1],
[0, 0, 0, 1],
[0, 0, 0, 0],
])
model = ICALiNGAM()
model._search_causal_order(matrix)
# for coverage
matrix = np.array([
[1, 1, 1],
[1, 1, 1],
[0, 0, 0],
])
model = ICALiNGAM()
model._search_causal_order(matrix)
def test_fit_invalid_data():
# Not array data
X = 1
try:
model = ICALiNGAM()
model.fit(X)
except ValueError:
pass
else:
raise AssertionError
# Include non-numeric data
x0 = np.random.uniform(size=5)
x1 = np.array(['X', 'Y', 'X', 'Y', 'X'])
X = pd.DataFrame(np.array([x0, x1]).T, columns=['x0', 'x1'])
try:
model = ICALiNGAM()
model.fit(X)
except ValueError:
pass
else:
raise AssertionError
# Include NaN values
x0 = np.random.uniform(size=1000)
x1 = 2.0 * x0 + np.random.uniform(size=1000)
X = pd.DataFrame(np.array([x0, x1]).T, columns=['x0', 'x1'])
X.iloc[100, 0] = np.nan
try:
model = ICALiNGAM()
model.fit(X)
except ValueError:
pass
else:
raise AssertionError
# Include infinite values
x0 = np.random.uniform(size=1000)
x1 = 2.0 * x0 + np.random.uniform(size=1000)
X = pd.DataFrame(np.array([x0, x1]).T, columns=['x0', 'x1'])
X.iloc[100, 0] = np.inf
try:
model = ICALiNGAM()
model.fit(X)
except ValueError:
pass
else:
raise AssertionError
|
Python
| 0.000001
|
@@ -619,24 +619,26 @@
_matrix_%0A
+ #
assert am%5B1
@@ -705,24 +705,26 @@
1%5D = 0.0%0A
+ #
assert np.s
|
1bfa44cd2f584a4fd9ab973a830ef287c504c6dc
|
Update tests
|
tests/test_rate_limit.py
|
tests/test_rate_limit.py
|
"""Test for prawcore.Sessions module."""
from copy import copy
from mock import patch
from prawcore.rate_limit import RateLimiter
import unittest
class RateLimiterTest(unittest.TestCase):
def _headers(self, remaining, used, reset):
return {
"x-ratelimit-remaining": str(float(remaining)),
"x-ratelimit-used": str(used),
"x-ratelimit-reset": str(reset),
}
def setUp(self):
self.rate_limiter = RateLimiter()
self.rate_limiter.next_request_timestamp = 100
@patch("time.sleep")
@patch("time.time")
def test_delay(self, mock_time, mock_sleep):
mock_time.return_value = 1
self.rate_limiter.delay()
self.assertTrue(mock_time.called)
mock_sleep.assert_called_with(99)
@patch("time.sleep")
@patch("time.time")
def test_delay__no_sleep_when_time_in_past(self, mock_time, mock_sleep):
mock_time.return_value = 101
self.rate_limiter.delay()
self.assertTrue(mock_time.called)
self.assertFalse(mock_sleep.called)
@patch("time.sleep")
def test_delay__no_sleep_when_time_is_not_set(self, mock_sleep):
self.rate_limiter.delay()
self.assertFalse(mock_sleep.called)
@patch("time.sleep")
@patch("time.time")
def test_delay__no_sleep_when_times_match(self, mock_time, mock_sleep):
mock_time.return_value = 100
self.rate_limiter.delay()
self.assertTrue(mock_time.called)
self.assertFalse(mock_sleep.called)
@patch("time.time")
def test_update__delay_full_time_with_negative_remaining(self, mock_time):
mock_time.return_value = 37
self.rate_limiter.remaining = -1
self.rate_limiter.update(self._headers(0, 100, 13))
self.assertEqual(0, self.rate_limiter.remaining)
self.assertEqual(100, self.rate_limiter.used)
self.assertEqual(50, self.rate_limiter.next_request_timestamp)
@patch("time.time")
def test_update__delay_full_time_with_zero_remaining(self, mock_time):
mock_time.return_value = 37
self.rate_limiter.remaining = 0
self.rate_limiter.update(self._headers(0, 100, 13))
self.assertEqual(0, self.rate_limiter.remaining)
self.assertEqual(100, self.rate_limiter.used)
self.assertEqual(50, self.rate_limiter.next_request_timestamp)
@patch("time.time")
def test_update__compute_delay_with_no_previous_info(self, mock_time):
mock_time.return_value = 100
self.rate_limiter.update(self._headers(60, 100, 60))
self.assertEqual(60, self.rate_limiter.remaining)
self.assertEqual(100, self.rate_limiter.used)
self.assertEqual(101, self.rate_limiter.next_request_timestamp)
@patch("time.time")
def test_update__compute_delay_with_single_client(self, mock_time):
self.rate_limiter.remaining = 61
mock_time.return_value = 100
self.rate_limiter.update(self._headers(60, 100, 60))
self.assertEqual(60, self.rate_limiter.remaining)
self.assertEqual(100, self.rate_limiter.used)
self.assertEqual(101, self.rate_limiter.next_request_timestamp)
@patch("time.time")
def test_update__compute_delay_with_six_clients(self, mock_time):
self.rate_limiter.remaining = 66
mock_time.return_value = 100
self.rate_limiter.update(self._headers(60, 100, 60))
self.assertEqual(60, self.rate_limiter.remaining)
self.assertEqual(100, self.rate_limiter.used)
self.assertEqual(106, self.rate_limiter.next_request_timestamp)
def test_update__no_change_without_headers(self):
prev = copy(self.rate_limiter)
self.rate_limiter.update({})
self.assertEqual(prev.remaining, self.rate_limiter.remaining)
self.assertEqual(prev.used, self.rate_limiter.used)
self.assertEqual(
prev.next_request_timestamp,
self.rate_limiter.next_request_timestamp,
)
def test_update__values_change_without_headers(self):
self.rate_limiter.remaining = 10
self.rate_limiter.used = 99
self.rate_limiter.update({})
self.assertEqual(9, self.rate_limiter.remaining)
self.assertEqual(100, self.rate_limiter.used)
|
Python
| 0.000001
|
@@ -2687,33 +2687,33 @@
f.assertEqual(10
-1
+0
, self.rate_limi
@@ -2954,33 +2954,33 @@
e(self._headers(
-6
+5
0, 100, 60))%0A
@@ -2993,33 +2993,33 @@
elf.assertEqual(
-6
+5
0, self.rate_lim
@@ -3115,17 +3115,17 @@
Equal(10
-1
+5
, self.r
@@ -3381,34 +3381,34 @@
eaders(60, 100,
-60
+72
))%0A self.
|
61786f6693086e5b22292ce9c063ffa66657b1aa
|
Add unit test to validate value returned by mse() function
|
tests/test_regressors.py
|
tests/test_regressors.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_regressors
---------------
Tests for the `regressors` module.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
import pandas as pd
import unittest2 as unittest
from sklearn import datasets
from sklearn.decomposition import PCA
from regressors import regressors
from regressors import stats
boston = datasets.load_boston()
which_betas = np.ones(13, dtype=bool)
which_betas[3] = False # Eliminate dummy variable
X = boston.data[:, which_betas]
y = boston.target
class TestLinearRegression(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_LinearRegression_fit_with_no_xlabels(self):
ols = regressors.LinearRegression()
try:
ols.fit(X, y)
except Exception as e:
self.fail("Fitting with no xlabels raised unexpected "
"exception: {0}".format(e))
def test_LinearRegression_fit_with_xlabels_as_args(self):
ols = regressors.LinearRegression()
labels = ['LABEL{0}'.format(i) for i in range(X.shape[1])]
try:
ols.fit(X, y, labels)
except Exception as e:
self.fail("Fitting with xlabels as *args raised unexpected "
"exception: {0}".format(e))
def test_LinearRegression_fit_with_xlabels_as_kwargs(self):
ols = regressors.LinearRegression()
labels = ['LABEL{0}'.format(i) for i in range(X.shape[1])]
try:
ols.fit(X, y=y, xlabels=labels)
except Exception as e:
self.fail("Fitting with xlabels as **kwargs raised unexpected "
"exception: {0}".format(e))
def test_LinearRegression_fit_with_xlabels_mixed_kwarg(self):
ols = regressors.LinearRegression()
labels = ['LABEL{0}'.format(i) for i in range(X.shape[1])]
try:
ols.fit(X, y, xlabels=labels)
except Exception as e:
self.fail("Fitting with xlabels as **kwargs with y also as "
"**kwargs raised unexpected exception: {0}".format(e))
def test_LinearRegression_fit_with_xlabels_all_kwargs(self):
ols = regressors.LinearRegression()
labels = ['LABEL{0}'.format(i) for i in range(X.shape[1])]
try:
ols.fit(X=X, y=y, xlabels=labels)
except Exception as e:
self.fail("Fitting with xlabels with all parameters as "
"**kwargs raised unexpected exception: {0}".format(e))
def test_LinearRegression_fit_with_xlabels_out_of_position_kwargs(self):
ols = regressors.LinearRegression()
labels = ['LABEL{0}'.format(i) for i in range(X.shape[1])]
try:
ols.fit(X=X, xlabels=labels, y=y)
except Exception as e:
self.fail("Fitting with xlabels with all parameters as "
"**kwargs raised unexpected exception: {0}".format(e))
def test_LinearRegression_fit_with_xlabels_args_out_of_pos_args_fails(self):
ols = regressors.LinearRegression()
labels = ['LABEL{0}'.format(i) for i in range(X.shape[1])]
with self.assertRaises(AssertionError):
ols.fit(X, labels, y)
def test_LinearRegression_xlabel_dimensions_error_checking(self):
ols = regressors.LinearRegression()
with self.assertRaises(AssertionError):
ols.fit(X, y, xlabels=['LABEL1', 'LABEL2'])
def test_LinearRegression_summary(self):
ols = regressors.LinearRegression()
labels = ['LABEL{0}'.format(i) for i in range(X.shape[1])]
ols.fit(X, y, labels)
summary = ols.summary()
self.assertIsInstance(summary, pd.core.frame.DataFrame)
try:
str(summary)
except Exception as e:
self.fail("str(summary) raised "
"exception unexpectedly: {0}".format(e))
class TestStatsResiduals(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_classifier_type_assertion_raised(self):
# Test that assertion is raised for unsupported model
pcomp = PCA()
pcomp.fit(X, y)
with self.assertRaises(AttributeError):
stats.residuals(pcomp, X, y)
def tests_classifier_type_assertion_not_raised(self):
# Test that assertion is not raise for supported models
for classifier in regressors.supported_linear_models:
clf = classifier()
clf.fit(X, y)
try:
stats.residuals(clf, X, y)
except Exception as e:
self.fail("Testing supported linear models in residuals "
"function failed unexpectedly: {0}".format(e))
def test_getting_raw_residuals(self):
ols = regressors.LinearRegression()
ols.fit(X, y)
try:
stats.residuals(ols, X, y, r_type='raw')
except Exception as e:
self.fail("Testing raw residuals failed unexpectedly: "
"{0}".format(e))
def test_getting_standardized_residuals(self):
ols = regressors.LinearRegression()
ols.fit(X, y)
try:
stats.residuals(ols, X, y, r_type='standardized')
except Exception as e:
self.fail("Testing standardized residuals failed unexpectedly: "
"{0}".format(e))
def test_getting_studentized_residuals(self):
ols = regressors.LinearRegression()
ols.fit(X, y)
try:
stats.residuals(ols, X, y, r_type='studentized')
except Exception as e:
self.fail("Testing studentized residuals failed unexpectedly: "
"{0}".format(e))
def test_error_not_raised_by_sse(self):
# Test that assertion is not raise for supported models
for classifier in regressors.supported_linear_models:
clf = classifier()
clf.fit(X, y)
try:
sse = stats.sse(clf, X, y)
except Exception as e:
self.fail("Testing SSE function for supported linear models "
"failed unexpectedly: {0}".format(e))
def test_error_not_raised_by_adj_r2_score(self):
# Test that assertion is not raise for supported models
for classifier in regressors.supported_linear_models:
clf = classifier()
clf.fit(X, y)
try:
stats.adj_r2_score(clf, X, y)
except Exception as e:
self.fail("Testing adjusted R2 function for supported linear "
"models failed unexpectedly: {0}".format(e))
if __name__ == '__main__':
import sys
sys.exit(unittest.main())
|
Python
| 0
|
@@ -363,16 +363,77 @@
atasets%0A
+from sklearn import metrics%0Afrom sklearn import linear_model%0A
from skl
@@ -6876,16 +6876,292 @@
at(e))%0A%0A
+ def test_mse(self):%0A ols = linear_model.LinearRegression()%0A ols.fit(X, y)%0A expected_mse = metrics.mean_squared_error(y, ols.predict(X))%0A np.testing.assert_approx_equal(stats.mse(ols, X, y),%0A expected_mse)%0A%0A
%0Aif __na
|
1be562eb115f302bd7fe47c2a90c5d4796a0eb98
|
make slider example more sophisticated
|
examples/plotting/file/slider.py
|
examples/plotting/file/slider.py
|
from bokeh.plotting import figure, output_file, show, vplot, ColumnDataSource
from bokeh.models.actions import Callback
from bokeh.models.widgets import Slider
import numpy as np
x = np.linspace(0, 10, 100)
y = np.sin(x)
source = ColumnDataSource(data=dict(x=x, y=y, y_orig=y))
plot = figure(y_range=(-20, 20))
plot.line('x', 'y', source=source)
callback = Callback(args=dict(source=source), code="""
var data = source.get('data');
var val = cb_obj.get('value')
data['y'] = Bokeh._.map(data['y_orig'], function(y){ return y*val; });
source.trigger('change');
""")
slider = Slider(start=1, end=20, value=1, step=1, title="Foo", callback=callback)
layout = vplot(slider, plot)
output_file("slider.html")
show(layout)
|
Python
| 0
|
@@ -1,20 +1,47 @@
+%0Afrom bokeh.io import vform
%0Afrom bokeh.plotting
@@ -55,16 +55,23 @@
figure,
+ hplot,
output_
@@ -232,17 +232,17 @@
(0, 10,
-1
+5
00)%0Ay =
@@ -300,22 +300,13 @@
y=y
-, y_orig=y
))%0A%0A
+%0A
plot
@@ -329,15 +329,48 @@
e=(-
-20, 2
+10, 1
0)
+, plot_width=400, plot_height=400
)%0Apl
@@ -400,16 +400,46 @@
e=source
+, line_width=3, line_alpha=0.6
)%0A%0Acallb
@@ -535,20 +535,110 @@
var
-val = cb_obj
+A = amp.get('value')%0A var k = freq.get('value')%0A var phi = phase.get('value')%0A var B = offset
.get
@@ -654,79 +654,126 @@
%0A
+ x =
data%5B'
-y'%5D = Bokeh._.map(data%5B'y_orig'%5D, function(y)%7B return y*val; %7D);
+x'%5D%0A y = data%5B'y'%5D%0A for (i = 0; i %3C x.length; i++) %7B%0A y%5Bi%5D = B + A*Math.sin(k*x%5Bi%5D+phi);%0A %7D
%0A
@@ -805,16 +805,20 @@
;%0A%22%22%22)%0A%0A
+amp_
slider =
@@ -835,16 +835,149 @@
art=
+0.
1, end=
-2
+10, value=1, step=.1, title=%22Amplitude%22, callback=callback)%0Acallback.args%5B%22amp%22%5D = amp_slider%0A%0Afreq_slider = Slider(start=0.1, end=1
0, v
@@ -989,16 +989,17 @@
1, step=
+.
1, title
@@ -1005,60 +1005,430 @@
e=%22F
-oo%22, callback=callback)%0A%0Alayout = vplot(slider,
+requency%22, callback=callback)%0Acallback.args%5B%22freq%22%5D = freq_slider%0A%0Aphase_slider = Slider(start=0, end=6.4, value=0, step=.1, title=%22Phase%22, callback=callback)%0Acallback.args%5B%22phase%22%5D = phase_slider%0A%0Aoffset_slider = Slider(start=-5, end=5, value=0, step=.1, title=%22Offset%22, callback=callback)%0Acallback.args%5B%22offset%22%5D = offset_slider%0A%0Alayout = hplot(%0A vform(amp_slider, freq_slider, phase_slider, offset_slider),%0A
plot
+%0A
)%0A%0Ao
|
feb095effbe4cd92f253e7d5b68baf5b215056ef
|
Update views.py
|
examples/quickhowto/app/views.py
|
examples/quickhowto/app/views.py
|
from flask.ext.appbuilder.menu import Menu
from flask.ext.appbuilder.baseapp import BaseApp
from flask.ext.appbuilder.models.datamodel import SQLAModel
from flask.ext.appbuilder.views import GeneralView
from flask.ext.appbuilder.charts.views import ChartView, TimeChartView
from app import app, db
from models import Group, Contact
class ContactGeneralView(GeneralView):
datamodel = SQLAModel(Contact, db.session)
label_columns = {'group':'Contacts Group'}
list_columns = ['name','personal_celphone','birthday','group']
order_columns = ['name','personal_celphone','birthday']
search_columns = ['name','personal_celphone','group','birthday']
show_fieldsets = [
('Summary',{'fields':['name','address','group']}),
('Personal Info',{'fields':['birthday','personal_phone','personal_celphone'],'expanded':False}),
]
class GroupGeneralView(GeneralView):
datamodel = SQLAModel(Group, db.session)
related_views = [ContactGeneralView()]
list_columns = ['name']
order_columns = ['name']
search_columns = ['name']
class ContactChartView(ChartView):
search_columns = ['name','group']
chart_title = 'Grouped contacts'
label_columns = ContactGeneralView.label_columns
group_by_columns = ['group']
datamodel = SQLAModel(Contact, db.session)
class ContactTimeChartView(TimeChartView):
search_columns = ['name','group']
chart_title = 'Grouped Birth contacts'
label_columns = ContactGeneralView.label_columns
group_by_columns = ['birthday']
datamodel = SQLAModel(Contact, db.session)
genapp = BaseApp(app, db)
genapp.add_view(GroupGeneralView(), "List Groups",icon = "th-large",category = "Contacts")
genapp.add_view(ContactGeneralView(), "List Contacts",icon = "earphone",category = "Contacts")
genapp.add_separator("Contacts")
genapp.add_view(ContactChartView(), "Contacts Chart","/contactchartview/chart","signal","Contacts")
genapp.add_view(ContactTimeChartView(), "Contacts Birth Chart","/contacttimechartview/chart/month","signal","Contacts")
|
Python
| 0
|
@@ -591,24 +591,25 @@
thday'%5D%0A
+#
search_colum
@@ -1047,24 +1047,25 @@
'name'%5D%0A
+#
search_colum
@@ -1110,32 +1110,33 @@
ChartView):%0A
+#
search_columns =
@@ -1371,16 +1371,17 @@
w):%0A
+#
search_c
|
db8f189075783bf9c3947cba1be9b846d80b3f13
|
Update backlog on_feed_abort to api v2
|
flexget/plugins/input_backlog.py
|
flexget/plugins/input_backlog.py
|
import logging
from datetime import datetime, timedelta
from sqlalchemy import Column, Integer, String, DateTime, PickleType
from flexget.manager import Base, Session
from flexget.feed import Entry
from flexget.plugin import register_plugin, priority, PluginWarning
log = logging.getLogger('backlog')
class BacklogEntry(Base):
__tablename__ = 'backlog'
id = Column(Integer, primary_key=True)
feed = Column(String)
title = Column(String)
expire = Column(DateTime)
entry = Column(PickleType(mutable=False))
def __repr__(self):
return '<BacklogEntry(title=%s)>' % (self.title)
class InputBacklog(object):
"""
Keeps feed history for given amount of time.
Example:
backlog: 4 days
Rarely useful for end users, mainly used by other plugins.
"""
def validator(self):
from flexget import validator
root = validator.factory('regexp_match', message='Must be in format <number> <hours|minutes|days|weeks>')
root.accept('\d+ (minute|hour|day|week)s?')
return root
def get_amount(self, value):
amount, unit = value.split(' ')
# Make sure unit name is plural.
if not unit.endswith('s'):
unit = unit + 's'
log.debug('amount: %s unit: %s' % (repr(amount), repr(unit)))
params = {unit: int(amount)}
try:
return timedelta(**params)
except TypeError:
raise PluginWarning('Invalid time format \'%s\'' % value, log)
@priority(-255)
def on_feed_input(self, feed, config):
# Take a snapshot of the entries' states after the input event in case we have to store them to backlog
for entry in feed.entries:
entry.take_snapshot('after_input')
injections = self.get_injections(feed)
if config:
# If backlog is manually enabled for this feed, learn the entries.
self.learn_backlog(feed, config)
# Return the entries from backlog that are not already in the feed
return injections
def on_feed_abort(self, feed):
"""Remember all entries for 12 hours when feed gets aborted."""
log.debug('Remembering all entries to backlog for 12 hours because of feed abort.')
self.learn_backlog(feed, '12 hours')
def add_backlog(self, feed, entry, amount=''):
"""Add single entry to feed backlog"""
snapshot = entry.snapshots.get('after_input')
if not snapshot:
log.warning('No input snapshot available for `%s`, using current state' % entry['title'])
snapshot = dict(entry)
session = Session()
expire_time = datetime.now() + self.get_amount(amount)
backlog_entry = session.query(BacklogEntry).filter(BacklogEntry.title == entry['title']).\
filter(BacklogEntry.feed == feed.name).first()
if backlog_entry:
# If there is already a backlog entry for this, update the expiry time if necessary.
if backlog_entry.expire < expire_time:
log.debug('Updating expiry time for %s' % entry['title'])
backlog_entry.expire = expire_time
else:
log.debug('Saving %s' % entry['title'])
backlog_entry = BacklogEntry()
backlog_entry.title = entry['title']
backlog_entry.entry = snapshot
backlog_entry.feed = feed.name
backlog_entry.expire = expire_time
session.add(backlog_entry)
session.commit()
def learn_backlog(self, feed, amount=''):
"""Learn current entries into backlog. All feed inputs must have been executed."""
for entry in feed.entries:
self.add_backlog(feed, entry, amount)
def get_injections(self, feed):
"""Insert missing entries from backlog."""
# purge expired
for backlog_entry in feed.session.query(BacklogEntry).filter(datetime.now() > BacklogEntry.expire).all():
log.debug('Purging %s' % backlog_entry.title)
feed.session.delete(backlog_entry)
# add missing from backlog
entries = []
backlog_entries = feed.session.query(BacklogEntry).filter(BacklogEntry.feed == feed.name).all()
for backlog_entry in backlog_entries:
entry = Entry(backlog_entry.entry)
# this is already in the feed
if feed.find_entry(title=entry['title'], url=entry['url']):
continue
log.debug('Restoring %s' % entry['title'])
entries.append(entry)
feed.session.delete(backlog_entry)
if entries:
feed.verbose_progress('Added %s entries from backlog' % len(entries), log)
return entries
register_plugin(InputBacklog, 'backlog', builtin=True, api_ver=2)
|
Python
| 0.000001
|
@@ -2067,32 +2067,40 @@
abort(self, feed
+, config
):%0A %22%22%22Re
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.