commit
stringlengths 40
40
| old_file
stringlengths 4
236
| new_file
stringlengths 4
236
| old_contents
stringlengths 1
3.26k
| new_contents
stringlengths 16
4.43k
| subject
stringlengths 16
624
| message
stringlengths 17
3.29k
| lang
stringclasses 5
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
|
|---|---|---|---|---|---|---|---|---|---|
822cc689ce44b1c43ac118b2a13c6d0024d2e194
|
tests/raw_text_tests.py
|
tests/raw_text_tests.py
|
from nose.tools import istest, assert_equal
from mammoth.raw_text import extract_raw_text_from_element
from mammoth import documents
@istest
def raw_text_of_text_element_is_value():
assert_equal("Hello", extract_raw_text_from_element(documents.Text("Hello")))
@istest
def raw_text_of_paragraph_is_terminated_with_newlines():
paragraph = documents.paragraph(children=[documents.Text("Hello")])
assert_equal("Hello\n\n", extract_raw_text_from_element(paragraph))
@istest
def non_text_element_without_children_has_no_raw_text():
tab = documents.Tab()
assert not hasattr(tab, "children")
assert_equal("", extract_raw_text_from_element(documents.Tab()))
|
from nose.tools import istest, assert_equal
from mammoth.raw_text import extract_raw_text_from_element
from mammoth import documents
@istest
def text_element_is_converted_to_text_content():
element = documents.Text("Hello.")
result = extract_raw_text_from_element(element)
assert_equal("Hello.", result)
@istest
def paragraphs_are_terminated_with_newlines():
element = documents.paragraph(
children=[
documents.Text("Hello "),
documents.Text("world."),
],
)
result = extract_raw_text_from_element(element)
assert_equal("Hello world.\n\n", result)
@istest
def children_are_recursively_converted_to_text():
element = documents.document([
documents.paragraph(
[
documents.text("Hello "),
documents.text("world.")
],
{}
)
])
result = extract_raw_text_from_element(element)
assert_equal("Hello world.\n\n", result)
@istest
def non_text_element_without_children_is_converted_to_empty_string():
element = documents.line_break
assert not hasattr(element, "children")
result = extract_raw_text_from_element(element)
assert_equal("", result)
|
Make raw text tests consistent with mammoth.js
|
Make raw text tests consistent with mammoth.js
|
Python
|
bsd-2-clause
|
mwilliamson/python-mammoth
|
fd5387f1bb8ac99ed421c61fdff777316a4d3191
|
tests/test_publisher.py
|
tests/test_publisher.py
|
import pytest
import pika
from mettle.settings import get_settings
from mettle.publisher import publish_event
def test_long_routing_key():
settings = get_settings()
conn = pika.BlockingConnection(pika.URLParameters(settings.rabbit_url))
chan = conn.channel()
exchange = settings['state_exchange']
chan.exchange_declare(exchange=exchange, type='topic', durable=True)
with pytest.raises(ValueError):
publish_event(chan, exchange, dict(
description=None,
tablename='a' * 8000,
name="foo",
pipeline_names=None,
id=15,
updated_by='vagrant',
))
|
import pytest
import pika
from mettle.settings import get_settings
from mettle.publisher import publish_event
@pytest.mark.xfail(reason="Need RabbitMQ fixture")
def test_long_routing_key():
settings = get_settings()
conn = pika.BlockingConnection(pika.URLParameters(settings.rabbit_url))
chan = conn.channel()
exchange = settings['state_exchange']
chan.exchange_declare(exchange=exchange, type='topic', durable=True)
with pytest.raises(ValueError):
publish_event(chan, exchange, dict(
description=None,
tablename='a' * 8000,
name="foo",
pipeline_names=None,
id=15,
updated_by='vagrant',
))
|
Mark test as xfail so that releases can be cut
|
Mark test as xfail so that releases can be cut
|
Python
|
mit
|
yougov/mettle,yougov/mettle,yougov/mettle,yougov/mettle
|
53636a17cd50d704b7b4563d0b23a474677051f4
|
hub/prototype/config.py
|
hub/prototype/config.py
|
# put this into ~/.alphahub/config.py and make sure it's
# not readable by anyone else (it contains passwords!)
# the host we run on and want to receive packets on; note
# that "localhost" is probably the wrong thing here, you
# want your actual host name here so the sockets bind the
# right way and receive packets from the outside
HOST = "the.hub.machine.tld"
# the servers we listen to; for now each server can just
# have one port and secret key on the hub even if it runs
# multiple game servers; not sure if we need to allow more
# than that yet :-/
SERVERS = {
"some.game.server.tld": (42, "somesecret"),
}
# the other hubs we echo to; note that we don't yet change
# the packets in any way, so they'll look like they really
# come from us; not good, but we'll need to define a new
# packet format for forwarded userinfo strings first, then
# we can fix this :-/
HUBS = {
"some.hub.server.tld": (84, "anothersecret"),
}
|
# put this into ~/.alphahub/config.py and make sure it's
# not readable by anyone else (it contains passwords!)
# the host we run on and want to receive packets on; note
# that "localhost" is probably the wrong thing here, you
# want your actual host name here so the sockets bind the
# right way and receive packets from the outside
HOST = "the.hub.machine.tld"
# the servers we listen to; for now each server can just
# have one port and secret key on the hub even if it runs
# multiple game servers; not sure if we need to allow more
# than that yet :-/
SERVERS = {
"some.game.server.tld": (42, "somesecret"),
"some.other.game.tld": (543, "monkeyspam"),
}
# the other hubs we echo to; note that we don't yet change
# the packets in any way, so they'll look like they really
# come from us; not good, but we'll need to define a new
# packet format for forwarded userinfo strings first, then
# we can fix this :-/
HUBS = {
"some.hub.server.tld": (84, "anothersecret"),
}
|
Make sure we give an example for two servers.
|
Make sure we give an example for two servers.
|
Python
|
agpl-3.0
|
madprof/alpha-hub
|
d185407ac4caf5648ef4c12eab83fec81c307407
|
tests/test_trackable.py
|
tests/test_trackable.py
|
# -*- coding: utf-8 -*-
"""
test_trackable
~~~~~~~~~~~~~~
Trackable tests
"""
import pytest
from utils import authenticate, logout
pytestmark = pytest.mark.trackable()
def test_trackable_flag(app, client):
e = 'matt@lp.com'
authenticate(client, email=e)
logout(client)
authenticate(client, email=e)
with app.app_context():
user = app.security.datastore.find_user(email=e)
assert user.last_login_at is not None
assert user.current_login_at is not None
assert user.last_login_ip == 'untrackable'
assert user.current_login_ip == 'untrackable'
assert user.login_count == 2
|
# -*- coding: utf-8 -*-
"""
test_trackable
~~~~~~~~~~~~~~
Trackable tests
"""
import pytest
from utils import authenticate, logout
pytestmark = pytest.mark.trackable()
def test_trackable_flag(app, client):
e = 'matt@lp.com'
authenticate(client, email=e)
logout(client)
authenticate(client, email=e, headers={'X-Forwarded-For': '127.0.0.1'})
with app.app_context():
user = app.security.datastore.find_user(email=e)
assert user.last_login_at is not None
assert user.current_login_at is not None
assert user.last_login_ip == 'untrackable'
assert user.current_login_ip == '127.0.0.1'
assert user.login_count == 2
|
Add mock X-Forwarded-For header in trackable tests
|
Add mock X-Forwarded-For header in trackable tests
|
Python
|
mit
|
pawl/flask-security,reustle/flask-security,jonafato/flask-security,asmodehn/flask-security,quokkaproject/flask-security,LeonhardPrintz/flask-security-fork,dommert/flask-security,LeonhardPrintz/flask-security-fork,fuhrysteve/flask-security,CodeSolid/flask-security,simright/flask-security,inveniosoftware/flask-security-fork,x5a/flask-security,mafrosis/flask-security,Samael500/flask-security,dlakata/flask-security,inveniosoftware/flask-security-fork,fuhrysteve/flask-security,inveniosoftware/flask-security-fork,redpandalabs/flask-security,fmerges/flask-security,wjt/flask-security,CodeSolid/flask-security,yingbo/flask-security,asmodehn/flask-security,reustle/flask-security,felix1m/flask-security,themylogin/flask-security,a-pertsev/flask-security,GregoryVigoTorres/flask-security,x5a/flask-security,quokkaproject/flask-security,tatataufik/flask-security,Samael500/flask-security,jonafato/flask-security,mik3cap/private-flask-security,a-pertsev/flask-security,guoqiao/flask-security,themylogin/flask-security,LeonhardPrintz/flask-security-fork,GregoryVigoTorres/flask-security,dommert/flask-security,fmerges/flask-security,yingbo/flask-security,mik3cap/private-flask-security,pawl/flask-security,simright/flask-security,nfvs/flask-security,tatataufik/flask-security,dlakata/flask-security,felix1m/flask-security,covertgeek/flask-security,mafrosis/flask-security,wjt/flask-security,covertgeek/flask-security,mattupstate/flask-security,redpandalabs/flask-security,guoqiao/flask-security,mattupstate/flask-security,nfvs/flask-security
|
3bd9214465547ff6cd0f7ed94edf8dacf10135b5
|
registration/backends/simple/urls.py
|
registration/backends/simple/urls.py
|
"""
URLconf for registration and activation, using django-registration's
one-step backend.
If the default behavior of these views is acceptable to you, simply
use a line like this in your root URLconf to set up the default URLs
for registration::
(r'^accounts/', include('registration.backends.simple.urls')),
This will also automatically set up the views in
``django.contrib.auth`` at sensible default locations.
If you'd like to customize registration behavior, feel free to set up
your own URL patterns for these views instead.
"""
from django.conf.urls import include, url
from django.views.generic.base import TemplateView
from registration.backends.simple.views import RegistrationView
urlpatterns = [
url(r'^register/$',
RegistrationView.as_view(),
name='registration_register'),
url(r'^register/closed/$',
TemplateView.as_view(
template_name='registration/registration_closed.html'),
name='registration_disallowed'),
url(r'', include('registration.auth_urls')),
]
|
"""
URLconf for registration and activation, using django-registration's
one-step backend.
If the default behavior of these views is acceptable to you, simply
use a line like this in your root URLconf to set up the default URLs
for registration::
(r'^accounts/', include('registration.backends.simple.urls')),
This will also automatically set up the views in
``django.contrib.auth`` at sensible default locations.
If you'd like to customize registration behavior, feel free to set up
your own URL patterns for these views instead.
"""
from django.conf.urls import include, url
from django.views.generic.base import TemplateView
from .views import RegistrationView
urlpatterns = [
url(r'^register/$',
RegistrationView.as_view(),
name='registration_register'),
url(r'^register/closed/$',
TemplateView.as_view(
template_name='registration/registration_closed.html'),
name='registration_disallowed'),
url(r'', include('registration.auth_urls')),
]
|
Clean up an import in simple backend URLs.
|
Clean up an import in simple backend URLs.
|
Python
|
bsd-3-clause
|
dirtycoder/django-registration,ubernostrum/django-registration,myimages/django-registration,tdruez/django-registration,awakeup/django-registration
|
4dfbe6ea079b32644c9086351f911ce1a2b2b0e1
|
easy_maps/geocode.py
|
easy_maps/geocode.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from django.utils.encoding import smart_str
from geopy import geocoders
from geopy.exc import GeocoderServiceError
class Error(Exception):
pass
def google_v3(address):
"""
Given an address, return ``(computed_address, (latitude, longitude))``
tuple using Google Geocoding API v3.
"""
try:
g = geocoders.GoogleV3()
address = smart_str(address)
return g.geocode(address, exactly_one=False)[0]
except (UnboundLocalError, ValueError, GeocoderServiceError) as e:
raise Error(e)
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from django.utils.encoding import smart_str
from geopy import geocoders
from geopy.exc import GeocoderServiceError
class Error(Exception):
pass
def google_v3(address):
"""
Given an address, return ``(computed_address, (latitude, longitude))``
tuple using Google Geocoding API v3.
"""
try:
g = geocoders.GoogleV3()
address = smart_str(address)
results = g.geocode(address, exactly_one=False)
if results is not None:
return results[0]
raise Error('No results found')
except (UnboundLocalError, ValueError, GeocoderServiceError) as e:
raise Error(e)
|
Resolve the 500 error when google send a no results info
|
Resolve the 500 error when google send a no results info
|
Python
|
mit
|
duixteam/django-easy-maps,kmike/django-easy-maps,Gonzasestopal/django-easy-maps,kmike/django-easy-maps,bashu/django-easy-maps,bashu/django-easy-maps,Gonzasestopal/django-easy-maps
|
7c6f1bfca63ea0db8e07156b5122d0986b9cd1a5
|
backend/breach/tests/test_sniffer.py
|
backend/breach/tests/test_sniffer.py
|
from mock import patch
from django.test import TestCase
from breach.sniffer import Sniffer
class SnifferTest(TestCase):
def setUp(self):
self.endpoint = 'http://localhost'
self.sniffer = Sniffer(self.endpoint, '147.102.239.229', 'dionyziz.com', 'wlan0', '8080')
@patch('breach.sniffer.requests')
def test_sniffer_start(self, requests):
self.sniffer.start()
self.assertTrue(requests.post.called)
@patch('breach.sniffer.requests')
def test_sniffer_read(self, requests):
self.sniffer.read()
self.assertTrue(requests.get.called)
@patch('breach.sniffer.requests')
def test_sniffer_delete(self, requests):
self.sniffer.delete()
self.assertTrue(requests.post.called)
|
from mock import patch
from django.test import TestCase
from breach.sniffer import Sniffer
class SnifferTest(TestCase):
def setUp(self):
self.endpoint = 'http://localhost'
sniffer_params = {
'snifferendpoint': self.endpoint,
'sourceip': '147.102.239.229',
'host': 'dionyziz.com',
'interface': 'wlan0',
'port': '8080',
'calibration_wait': 0.0
}
self.sniffer = Sniffer(sniffer_params)
@patch('breach.sniffer.requests')
def test_sniffer_start(self, requests):
self.sniffer.start()
self.assertTrue(requests.post.called)
@patch('breach.sniffer.requests')
def test_sniffer_read(self, requests):
self.sniffer.read()
self.assertTrue(requests.get.called)
@patch('breach.sniffer.requests')
def test_sniffer_delete(self, requests):
self.sniffer.delete()
self.assertTrue(requests.post.called)
|
Update sniffer tests with new argument passing
|
Update sniffer tests with new argument passing
|
Python
|
mit
|
dimkarakostas/rupture,dionyziz/rupture,esarafianou/rupture,dionyziz/rupture,esarafianou/rupture,dimkarakostas/rupture,dimriou/rupture,dimriou/rupture,dimriou/rupture,dionyziz/rupture,esarafianou/rupture,dionyziz/rupture,dimkarakostas/rupture,dimkarakostas/rupture,dimriou/rupture,dionyziz/rupture,dimriou/rupture,esarafianou/rupture,dimkarakostas/rupture
|
34e17142f565cfc27c15522212c4240944cb4001
|
sauce/lib/helpers.py
|
sauce/lib/helpers.py
|
# -*- coding: utf-8 -*-
"""WebHelpers used in SAUCE.
@author: moschlar
"""
from tg import url as tgurl
from webhelpers import date, feedgenerator, html, number, misc, text
import re, textwrap
#log = logging.getLogger(__name__)
# shortcut for links
link_to = html.tags.link_to
def link(label, url='', **attrs):
return link_to(label, tgurl(url), **attrs)
def strftimedelta(delta, format='%D Days %hh:%mm:%ss'):
'''Return a string representing the timedelta element.
Possible format codes are:
%D days
%h hours
%hh hours with leading zero
%m minutes
%mm minutes with leading zero
%s seconds
%ss seconds with leading zero
'''
totalSeconds = delta.seconds
hours, remainder = divmod(totalSeconds, 3600)
minutes, seconds = divmod(remainder, 60)
result = format.replace('%D', str(delta.days)).\
replace('%hh', '%02d' % hours).replace('%mm', '%02d' % minutes).\
replace('%ss', '%02d' % seconds).\
replace('%h', str(hours)).replace('%m', str(minutes)).\
replace('%s', str(seconds))
return result
def striphtml(text):
return re.sub('<[^<]+?>', ' ', text).strip()
def cut(text, max=200):
if len(text) < max:
return text
else:
return textwrap.wrap(text, max)[0] + ' ...'
|
# -*- coding: utf-8 -*-
"""WebHelpers used in SAUCE.
@author: moschlar
"""
from datetime import datetime
from tg import url as tgurl
#from webhelpers import date, feedgenerator, html, number, misc, text
import webhelpers as w
from webhelpers.html.tags import link_to
from webhelpers.text import truncate
from webhelpers.date import distance_of_time_in_words
import re
#log = logging.getLogger(__name__)
cut = lambda text, max=200: truncate(text, max, whole_word=True)
strftimedelta = lambda delta, granularity='minute': distance_of_time_in_words(datetime.now(), datetime.now()+delta, granularity)
def link(label, url='', **attrs):
return link_to(label, tgurl(url), **attrs)
def striphtml(text):
return re.sub('<[^<]+?>', ' ', text).strip()
|
Replace my own helper functions with webhelper ones
|
Replace my own helper functions with webhelper ones
|
Python
|
agpl-3.0
|
moschlar/SAUCE,moschlar/SAUCE,moschlar/SAUCE,moschlar/SAUCE
|
25993238cb18212a2b83b2d6b0aa98939d38f192
|
scripts/lwtnn-split-keras-network.py
|
scripts/lwtnn-split-keras-network.py
|
#!/usr/bin/env python3
"""
Convert a keras model, saved with model.save(...) to a weights and
architecture component.
"""
import argparse
def get_args():
d = '(default: %(default)s)'
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('model')
parser.add_argument('-w','--weight-file-name', default='weights.h5',
help=d)
parser.add_argument('-a', '--architecture-file-name',
default='architecture.json', help=d)
return parser.parse_args()
def run():
args = get_args()
import keras
m = keras.models.load_model(args.model)
m.save_weights(args.weight_file_name)
with open(args.architecture_file_name,'w') as arch:
arch.write(m.to_json(indent=2))
if __name__ == '__main__':
run()
|
#!/usr/bin/env python3
"""
Convert a keras model, saved with model.save(...) to a weights and
architecture component.
"""
import argparse
def get_args():
d = '(default: %(default)s)'
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('model')
parser.add_argument('-w','--weight-file-name', default='weights.h5',
help=d)
parser.add_argument('-a', '--architecture-file-name',
default='architecture.json', help=d)
return parser.parse_args()
def run():
args = get_args()
from h5py import File
import json
m = File(args.model,'r')
with File(args.weight_file_name,'w') as w:
for name, wt in w.items():
w.copy(wt, name)
arch = json.loads(m.attrs['model_config'])
with open(args.architecture_file_name,'w') as arch_file:
arch_file.write(json.dumps(arch,indent=2))
if __name__ == '__main__':
run()
|
Remove Keras from network splitter
|
Remove Keras from network splitter
Keras isn't as stable as h5py and json. This commit removes the keras dependency from the network splitting function.
|
Python
|
mit
|
lwtnn/lwtnn,lwtnn/lwtnn,lwtnn/lwtnn
|
3916efe4a017fe9e0fb1c5fe09b99f374d7a4060
|
instana/__init__.py
|
instana/__init__.py
|
"""
Instana sensor and tracer. It consists of two modules that can be used as entry points:
- sensor: activates the meter to collect and transmit all kind of built-in metrics
- tracer: OpenTracing tracer implementation. It implicitly activates the meter
"""
__author__ = 'Instana Inc.'
__copyright__ = 'Copyright 2016 Instana Inc.'
__credits__ = ['Pavlo Baron']
__license__ = 'MIT'
__version__ = '0.0.1'
__maintainer__ = 'Pavlo Baron'
__email__ = 'pavlo.baron@instana.com'
__all__ = ['sensor', 'tracer']
|
"""
Instana sensor and tracer. It consists of two modules that can be used as entry points:
- sensor: activates the meter to collect and transmit all kind of built-in metrics
- tracer: OpenTracing tracer implementation. It implicitly activates the meter
"""
__author__ = 'Instana Inc.'
__copyright__ = 'Copyright 2017 Instana Inc.'
__credits__ = ['Pavlo Baron', 'Peter Giacomo Lombardo']
__license__ = 'MIT'
__version__ = '0.6.6'
__maintainer__ = 'Peter Giacomo Lombardo'
__email__ = 'peter.lombardo@instana.com'
__all__ = ['sensor', 'tracer']
|
Update module init file; begin version stamping here.
|
Update module init file; begin version stamping here.
|
Python
|
mit
|
instana/python-sensor,instana/python-sensor
|
67fd73f8f035ac0e13a64971d9d54662df46a77f
|
karm/test/__karmutil.py
|
karm/test/__karmutil.py
|
import sys
import os
def dcopid():
'''Get dcop id of karm. Fail if more than one instance running.'''
id = stdin = stdout = None
try:
( stdin, stdout ) = os.popen2( "dcop" )
l = stdout.readline()
while l:
if l.startswith( "karm" ):
if not id: id = l
else: raise "Only one instance of karm may be running."
l = stdout.readline()
if not id:
raise "No karm instance found. Try running dcop at command-line to verify it works."
except:
if stdin: stdin.close()
if stdout: stdout.close()
print sys.exc_info()[0]
sys.exit(1)
stdin.close()
stdout.close()
# strip trailing newline
return id.strip()
def test( goal, actual ):
'''Raise exception if goal != actual.'''
if goal != actual:
path, scriptname = os.path.split( sys.argv[0] )
raise "%s: expected '%s', got '%s'" % ( scriptname, goal, actual )
|
import sys
import os
class KarmTestError( Exception ): pass
def dcopid():
'''Get dcop id of karm. Fail if more than one instance running.'''
id = stdin = stdout = None
( stdin, stdout ) = os.popen2( "dcop" )
l = stdout.readline()
while l:
if l.startswith( "karm" ):
if not id: id = l
else: raise KarmTestError( "Only one instance of karm may be running." )
l = stdout.readline()
if not id:
raise KarmTestError( "No karm instance found. Try running dcop at command-line to verify it works." )
stdin.close()
stdout.close()
# strip trailing newline
return id.strip()
def test( goal, actual ):
'''Raise exception if goal != actual.'''
if goal != actual:
path, scriptname = os.path.split( sys.argv[0] )
raise KarmTestError( "%s: expected '%s', got '%s'" % ( scriptname, goal, actual ) )
|
Add KarmTestError we can distinguish and print full tracebacks for unexpected errors. Delete exception trapping--let the test scripts do that.
|
Add KarmTestError we can distinguish and print full tracebacks for unexpected errors. Delete exception trapping--let the test scripts do that.
svn path=/trunk/kdepim/; revision=367066
|
Python
|
lgpl-2.1
|
lefou/kdepim-noakonadi,lefou/kdepim-noakonadi,lefou/kdepim-noakonadi,lefou/kdepim-noakonadi,lefou/kdepim-noakonadi,lefou/kdepim-noakonadi
|
d237c121955b7249e0e2ab5580d2abc2d19b0f25
|
noveltorpedo/models.py
|
noveltorpedo/models.py
|
from django.db import models
class Author(models.Model):
name = models.CharField(max_length=255)
def __str__(self):
return self.name
class Story(models.Model):
author = models.ForeignKey(Author, on_delete=models.CASCADE)
title = models.CharField(max_length=255)
contents = models.TextField(default='')
def __str__(self):
return self.title
|
from django.db import models
class Author(models.Model):
name = models.CharField(max_length=255)
def __str__(self):
return self.name
class Story(models.Model):
authors = models.ManyToManyField(Author)
title = models.CharField(max_length=255)
contents = models.TextField(default='')
def __str__(self):
return self.title
|
Allow a story to have many authors
|
Allow a story to have many authors
|
Python
|
mit
|
NovelTorpedo/noveltorpedo,NovelTorpedo/noveltorpedo,NovelTorpedo/noveltorpedo,NovelTorpedo/noveltorpedo
|
8b33c216b2da4a7bf480d79675325134777db9ae
|
wsme/release.py
|
wsme/release.py
|
name = "WSME"
version = "0.3"
description = "Web Services Made Easy"
author = "Christophe de Vienne"
email = "python-wsme@googlegroups.com"
url = "http://bitbucket.org/cdevienne/wsme"
license = "MIT"
|
name = "WSME"
version = "0.3"
description = """Web Services Made Easy makes it easy to \
implement multi-protocol webservices."""
author = "Christophe de Vienne"
email = "python-wsme@googlegroups.com"
url = "http://bitbucket.org/cdevienne/wsme"
license = "MIT"
|
Change a bit the short description to make it more explicit
|
Change a bit the short description to make it more explicit
|
Python
|
mit
|
stackforge/wsme
|
0ac053e9c27f8381bb1aceff0bfdb12fc9c952cb
|
tests/test_config.py
|
tests/test_config.py
|
from pytest import fixture
from oshino.config import Config, RiemannConfig
@fixture
def base_config():
return Config({"riemann": {"host": "localhost",
"port": 5555
},
"interval": 5
})
@fixture
def incomplete_config():
return Config({})
class TestBase(object):
def test_base_config_interval(self, base_config):
assert base_config.interval == 5
class TestRiemann(object):
def test_base_config_get_riemann(self, base_config):
assert isinstance(base_config.riemann, RiemannConfig)
def test_incomplete_config_get_riemann(self, incomplete_config):
assert isinstance(incomplete_config.riemann, RiemannConfig)
|
from pytest import fixture
from oshino.config import Config, RiemannConfig
@fixture
def base_config():
return Config({"riemann": {"host": "localhost",
"port": 5555
},
"interval": 5
})
@fixture
def incomplete_config():
return Config({})
class TestBase(object):
def test_base_config_interval(self, base_config):
assert base_config.interval == 5
class TestRiemann(object):
def test_base_config_get_riemann(self, base_config):
assert isinstance(base_config.riemann, RiemannConfig)
def test_incomplete_config_get_riemann(self, incomplete_config):
assert isinstance(incomplete_config.riemann, RiemannConfig)
def test_riemann_default_host(self, incomplete_config):
assert incomplete_config.riemann.host == "localhost"
def test_riemann_default_port(self, incomplete_config):
assert incomplete_config.riemann.port == 5555
|
Test default values for Riemann
|
Test default values for Riemann
|
Python
|
mit
|
CodersOfTheNight/oshino
|
d8b3e511b00c9b5a8c7951e16d06173fe93d6501
|
engine/util.py
|
engine/util.py
|
import json
import threading
import Queue
from datetime import datetime,date,timedelta
import time
from tornado import gen
from tornado.ioloop import IOLoop
@gen.coroutine
def async_sleep(seconds):
yield gen.Task(IOLoop.instance().add_timeout, time.time() + seconds)
def delayed(seconds):
def f(x):
time.sleep(seconds)
return x
return f
def call_in_background(f, *args):
'''Call function in background in a separate thread / coroutine'''
result = Queue.Queue(1)
t = threading.Thread(target=lambda: result.put(f(*args)))
t.start()
return result
def get_id_from_slug(slug):
'''Remove '/' from a part of url if it is present'''
return slug if slug[-1] != '/' else slug[:-1]
def my_print(s):
'''Pretty printing with timestamp'''
print "[" + str(datetime.now()) + "] " + s
class DateTimeEncoder(json.JSONEncoder):
'''Auxuliary class that lets us encode dates in json'''
def default(self, obj):
if isinstance(obj, datetime):
return obj.isoformat()
elif isinstance(obj, date):
return obj.isoformat()
elif isinstance(obj, timedelta):
return (datetime.min + obj).time().isoformat()
else:
return super(DateTimeEncoder, self).default(obj)
|
import json
import threading
import Queue
from datetime import datetime,date,timedelta
import time
import numpy
from tornado import gen
from tornado.ioloop import IOLoop
@gen.coroutine
def async_sleep(seconds):
yield gen.Task(IOLoop.instance().add_timeout, time.time() + seconds)
def delayed(seconds):
def f(x):
time.sleep(seconds)
return x
return f
def call_in_background(f, *args):
'''Call function in background in a separate thread / coroutine'''
result = Queue.Queue(1)
t = threading.Thread(target=lambda: result.put(f(*args)))
t.start()
return result
def get_id_from_slug(slug):
'''Remove '/' from a part of url if it is present'''
return slug if slug[-1] != '/' else slug[:-1]
def my_print(s):
'''Pretty printing with timestamp'''
print "[" + str(datetime.now()) + "] " + s
class DateTimeEncoder(json.JSONEncoder):
'''Auxuliary class that lets us encode dates in json'''
def default(self, obj):
if hasattr(obj, 'isoformat'):
return obj.isoformat()
elif isinstance(obj, datetime):
return obj.isoformat()
elif isinstance(obj, date):
return obj.isoformat()
elif isinstance(obj, timedelta):
return (datetime.min + obj).time().isoformat()
elif isinstance(obj, numpy.generic):
return numpy.asscalar(obj)
else:
return super(DateTimeEncoder, self).default(obj)
|
Fix conditions in JSON encoder
|
Fix conditions in JSON encoder
|
Python
|
apache-2.0
|
METASPACE2020/sm-engine,SpatialMetabolomics/SM_distributed,METASPACE2020/sm-engine,SpatialMetabolomics/SM_distributed,SpatialMetabolomics/SM_distributed,SpatialMetabolomics/SM_distributed
|
43905a102092bdd50de1f8997cd19cb617b348b3
|
tests/cart_tests.py
|
tests/cart_tests.py
|
import importlib
import os
import sys
import unittest
import code
import struct
code_path = os.path.dirname(__file__)
code_path = os.path.join(code_path, os.pardir)
sys.path.append(code_path)
import MOS6502
class TestCartHeaderParsing(unittest.TestCase):
def testMagic(self):
cpu = MOS6502.CPU()
cpu.loadRom("../smb1.nes")
self.assertEqual(cpu.rom != None, True)
def testRomBanks(self):
cpu = MOS6502.CPU()
cpu.loadRom("../smb1.nes")
self.assertEqual(cpu.rom.numRomBanks, 2)
self.assertEqual(cpu.rom.numVromBanks, 1)
if __name__ == '__main__':
unittest.main()
|
import importlib
import os
import sys
import unittest
import code
import struct
code_path = os.path.dirname(__file__)
code_path = os.path.join(code_path, os.pardir)
sys.path.append(code_path)
import MOS6502
class TestCartHeaderParsing(unittest.TestCase):
def testMagic(self):
cpu = MOS6502.CPU()
cpu.loadRom("../smb1.nes")
self.assertEqual(cpu.rom != None, True)
def testRomBanks(self):
cpu = MOS6502.CPU()
cpu.loadRom("../smb1.nes")
self.assertEqual(cpu.rom.numRomBanks, 2)
self.assertEqual(cpu.rom.numVromBanks, 1)
startAddr = cpu.ReadMemWord(cpu.reset)
firstByte = cpu.ReadMemory(startAddr)
self.assertEqual(firstByte, 0x78)
if __name__ == '__main__':
unittest.main()
|
Use the reset adder from the banks properly
|
Use the reset adder from the banks properly
|
Python
|
bsd-2-clause
|
pusscat/refNes
|
f0e8999ad139a8da8d3762ee1d318f23928edd9c
|
tests/modelstest.py
|
tests/modelstest.py
|
# Copyright (C) 2010 rPath, Inc.
import testsuite
testsuite.setup()
from testrunner import testcase
from rpath_repeater import models
class TestBase(testcase.TestCaseWithWorkDir):
pass
class ModelsTest(TestBase):
def testModelToXml(self):
files = models.ImageFiles([
models.ImageFile(title="i1", sha1="s1", size=1),
models.ImageFile(title="i2", sha1="s2"),
])
metadata = models.ImageMetadata(owner="me")
files.append(metadata)
self.failUnlessEqual(files.toXml(),
'<files><file><title>i1</title><size>1</size><sha1>s1</sha1></file><file><title>i2</title><sha1>s2</sha1></file><metadata><owner>me</owner></metadata></files>')
testsuite.main()
|
# Copyright (C) 2010 rPath, Inc.
import testsuite
testsuite.setup()
from testrunner import testcase
from rpath_repeater import models
class TestBase(testcase.TestCaseWithWorkDir):
pass
class ModelsTest(TestBase):
def testModelToXml(self):
files = models.ImageFiles([
models.ImageFile(title="i1", sha1="s1", size=1),
models.ImageFile(title="i2", sha1="s2"),
])
self.failUnlessEqual(files.toXml(),
'<files><file><title>i1</title><size>1</size><sha1>s1</sha1></file><file><title>i2</title><sha1>s2</sha1></file></files>')
testsuite.main()
|
Fix test after metadata changes
|
Fix test after metadata changes
|
Python
|
apache-2.0
|
sassoftware/rpath-repeater
|
3a5fb18a385ffd0533da94632d917e3c0bcfb051
|
tests/test_nulls.py
|
tests/test_nulls.py
|
from tests.models import EventWithNulls, EventWithNoNulls
import pytest
@pytest.mark.django_db
def test_recurs_can_be_explicitly_none_if_none_is_allowed():
# Check we can save None correctly
event = EventWithNulls.objects.create(recurs=None)
assert event.recurs is None
# Check we can deserialize None correctly
reloaded = EventWithNulls.objects.get(pk=event.pk)
assert reloaded.recurs is None
@pytest.mark.django_db
def test_recurs_cannot_be_explicitly_none_if_none_is_disallowed():
with pytest.raises(ValueError):
EventWithNoNulls.objects.create(recurs=None)
|
from recurrence import Recurrence
from tests.models import EventWithNulls, EventWithNoNulls
import pytest
@pytest.mark.django_db
def test_recurs_can_be_explicitly_none_if_none_is_allowed():
# Check we can save None correctly
event = EventWithNulls.objects.create(recurs=None)
assert event.recurs is None
# Check we can deserialize None correctly
reloaded = EventWithNulls.objects.get(pk=event.pk)
assert reloaded.recurs is None
@pytest.mark.django_db
def test_recurs_cannot_be_explicitly_none_if_none_is_disallowed():
with pytest.raises(ValueError):
EventWithNoNulls.objects.create(recurs=None)
@pytest.mark.django_db
def test_recurs_can_be_empty_even_if_none_is_disallowed():
event = EventWithNoNulls.objects.create(recurs=Recurrence())
assert event.recurs == Recurrence()
|
Add a test for saving an empty recurrence object
|
Add a test for saving an empty recurrence object
I wasn't sure whether this would fail on models which don't
accept null values. Turns out it's allowed, so we should
make sure it stays allowed.
|
Python
|
bsd-3-clause
|
linux2400/django-recurrence,linux2400/django-recurrence,django-recurrence/django-recurrence,Nikola-K/django-recurrence,FrankSalad/django-recurrence,Nikola-K/django-recurrence,FrankSalad/django-recurrence,django-recurrence/django-recurrence
|
cd2e4cce080413feb7685ec9a788327b8bca9053
|
tests/test_style.py
|
tests/test_style.py
|
import pkg_resources
import unittest
class CodeStyleTestCase(unittest.TestCase):
def test_code_style(self):
flake8 = pkg_resources.load_entry_point('flake8', 'console_scripts', 'flake8')
try:
flake8([])
except SystemExit as e:
if e.code != 0:
self.fail('Code style checks failed')
|
import logging
import pkg_resources
import unittest
class CodeStyleTestCase(unittest.TestCase):
def test_code_style(self):
logger = logging.getLogger('flake8')
logger.setLevel(logging.ERROR)
flake8 = pkg_resources.load_entry_point('flake8', 'console_scripts', 'flake8')
try:
flake8([])
except SystemExit as e:
if e.code != 0:
self.fail('Code style checks failed')
|
Decrease noise from code-style test
|
Decrease noise from code-style test
|
Python
|
mit
|
ministryofjustice/bai2
|
59e47de06a175084538140481c7a702ff020e919
|
libvcs/__about__.py
|
libvcs/__about__.py
|
__title__ = 'libvcs'
__package_name__ = 'libvcs'
__description__ = 'vcs abstraction layer'
__version__ = '0.3.0'
__author__ = 'Tony Narlock'
__github__ = 'https://github.com/vcs-python/libvcs'
__pypi__ = 'https://pypi.org/project/libvcs/'
__email__ = 'tony@git-pull.com'
__license__ = 'MIT'
__copyright__ = 'Copyright 2016- Tony Narlock'
|
__title__ = 'libvcs'
__package_name__ = 'libvcs'
__description__ = 'vcs abstraction layer'
__version__ = '0.3.0'
__author__ = 'Tony Narlock'
__github__ = 'https://github.com/vcs-python/libvcs'
__docs__ = 'https://libvcs.git-pull.com'
__tracker__ = 'https://github.com/vcs-python/libvcs/issues'
__pypi__ = 'https://pypi.org/project/libvcs/'
__email__ = 'tony@git-pull.com'
__license__ = 'MIT'
__copyright__ = 'Copyright 2016- Tony Narlock'
|
Add metadata for tracker and docs
|
Add metadata for tracker and docs
|
Python
|
mit
|
tony/libvcs
|
d8c1c7da47e2568cecc1fd6dff0fec7661b39125
|
turbosms/routers.py
|
turbosms/routers.py
|
class SMSRouter(object):
app_label = 'sms'
db_name = 'sms'
def db_for_read(self, model, **hints):
if model._meta.app_label == self.app_label:
return self.db_name
return None
def db_for_write(self, model, **hints):
if model._meta.app_label == self.app_label:
return self.db_name
return None
def allow_relation(self, obj1, obj2, **hints):
if obj1._meta.app_label == self.app_label or \
obj2._meta.app_label == self.app_label:
return False
return None
def allow_migrate(self, db, app_label, model_name=None, **hints):
if app_label == self.app_label:
return False
return None
|
class TurboSMSRouter(object):
app_label = 'turbosms'
db_name = 'turbosms'
def db_for_read(self, model, **hints):
if model._meta.app_label == self.app_label:
return self.db_name
return None
def db_for_write(self, model, **hints):
if model._meta.app_label == self.app_label:
return self.db_name
return None
def allow_relation(self, obj1, obj2, **hints):
if obj1._meta.app_label == self.app_label or \
obj2._meta.app_label == self.app_label:
return False
return None
def allow_migrate(self, db, app_label, model_name=None, **hints):
if app_label == self.app_label:
return False
return None
|
Fix bug in sms router.
|
Fix bug in sms router.
|
Python
|
isc
|
pmaigutyak/mp-turbosms
|
5fade4bc26c2637a479a69051cee37a1a859c71a
|
load_hilma.py
|
load_hilma.py
|
#!/usr/bin/env python3
import xml.etree.ElementTree as ET
import sys
import pymongo
from pathlib import Path
import argh
from xml2json import etree_to_dict
from hilma_conversion import get_handler
hilma_to_dict = lambda notice: etree_to_dict(notice, get_handler)
def load_hilma_xml(inputfile, collection):
root = ET.parse(inputfile).getroot()
notices = list(root.iterfind('WRAPPED_NOTICE'))
notices = map(hilma_to_dict, notices)
collection.ensure_index('ID', unique=True)
for n in notices:
# Use the ID as primary key
n.update('_id', n['ID'])
collection.save(n)
def sync_hilma_xml_directory(directory, mongo_uri=None, mongo_db='openhilma'):
if mongo_uri is None:
client = pymongo.MongoClient()
else:
client = pymongo.MongoClient(mongo_uri)
db = client[mongo_db]
collection = db.notices
paths = sorted(Path(directory).glob("*.xml"))
for fpath in paths:
load_hilma_xml(fpath.open(), collection)
if __name__ == '__main__':
argh.dispatch_command(sync_hilma_xml_directory)
|
#!/usr/bin/env python3
import xml.etree.ElementTree as ET
import sys
import pymongo
from pathlib import Path
import argh
from xml2json import etree_to_dict
from hilma_conversion import get_handler
hilma_to_dict = lambda notice: etree_to_dict(notice, get_handler)
def load_hilma_xml(inputfile, collection):
root = ET.parse(inputfile).getroot()
notices = list(root.iterfind('WRAPPED_NOTICE'))
notices = map(hilma_to_dict, notices)
for n in notices:
# Use the ID as primary key
n.update({'_id': n['ID']})
collection.save(n)
def sync_hilma_xml_directory(directory, mongo_uri=None, mongo_db='openhilma'):
if mongo_uri is None:
client = pymongo.MongoClient()
else:
client = pymongo.MongoClient(mongo_uri)
db = client[mongo_db]
collection = db.notices
paths = sorted(Path(directory).glob("*.xml"))
for fpath in paths:
load_hilma_xml(fpath.open(), collection)
if __name__ == '__main__':
argh.dispatch_command(sync_hilma_xml_directory)
|
Use the notice ID as priary key
|
Use the notice ID as priary key
Gentlemen, drop your DBs!
|
Python
|
agpl-3.0
|
jampekka/openhilma
|
816874f692c7ef9de5aa8782fab1747e96199229
|
moksha/live/flot.py
|
moksha/live/flot.py
|
# This file is part of Moksha.
#
# Moksha is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Moksha is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Moksha. If not, see <http://www.gnu.org/licenses/>.
#
# Copyright 2008, Red Hat, Inc.
# Authors: Luke Macken <lmacken@redhat.com>
from tw.jquery.flot import FlotWidget
from moksha.live import LiveWidget
class LiveFlotWidget(LiveWidget):
""" A live graphing widget """
topic = 'flot_example'
children = [FlotWidget('flot')]
params = ['id', 'data', 'options', 'height', 'width',
'onconnectedframe', 'onmessageframe']
onmessageframe = '$.plot($("#${id}"),json[0]["data"],json[0]["options"])'
template = '<div id="${id}" style="width:${width};height:${height};" />'
height = '250px'
width = '390px'
options = {}
data = [{}]
|
# This file is part of Moksha.
#
# Moksha is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Moksha is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Moksha. If not, see <http://www.gnu.org/licenses/>.
#
# Copyright 2008, Red Hat, Inc.
# Authors: Luke Macken <lmacken@redhat.com>
from tw.jquery.flot import FlotWidget
from moksha.live import LiveWidget
class LiveFlotWidget(LiveWidget):
""" A live graphing widget """
topic = 'flot_example'
params = ['id', 'data', 'options', 'height', 'width', 'onmessageframe']
children = [FlotWidget('flot')]
onmessageframe = '$.plot($("#${id}"),json[0]["data"],json[0]["options"])'
template = '<div id="${id}" style="width:${width};height:${height};" />'
height = '250px'
width = '390px'
options = {}
data = [{}]
|
Clean up some LiveFlotWidget params
|
Clean up some LiveFlotWidget params
|
Python
|
apache-2.0
|
ralphbean/moksha,lmacken/moksha,lmacken/moksha,pombredanne/moksha,ralphbean/moksha,mokshaproject/moksha,pombredanne/moksha,mokshaproject/moksha,mokshaproject/moksha,pombredanne/moksha,mokshaproject/moksha,pombredanne/moksha,ralphbean/moksha,lmacken/moksha
|
8bc2b19e9aef410832555fb9962c243f0d4aef96
|
brink/decorators.py
|
brink/decorators.py
|
def require_request_model(cls, *args, validate=True, **kwargs):
"""
Makes a handler require that a request body that map towards the given model
is provided. Unless the ``validate`` option is set to ``False`` the data will
be validated against the model's fields.
The model will be passed to the handler as the last positional argument. ::
@require_request_model(Model)
async def handle_model(request, model):
return 200, model
"""
def decorator(handler):
async def new_handler(request):
body = await request.json()
model = cls(**body)
if validate:
model.validate()
return await handler(request, *args, model, **kwargs)
return new_handler
return decorator
|
import asyncio
def require_request_model(cls, *args, validate=True, **kwargs):
"""
Makes a handler require that a request body that map towards the given model
is provided. Unless the ``validate`` option is set to ``False`` the data will
be validated against the model's fields.
The model will be passed to the handler as the last positional argument. ::
@require_request_model(Model)
async def handle_model(request, model):
return 200, model
"""
def decorator(handler):
async def new_handler(request):
body = await request.json()
model = cls(**body)
if validate:
model.validate()
return await handler(request, *args, model, **kwargs)
return new_handler
return decorator
def use_ws_subhandlers(handler):
"""
Allows the handler to return any number of **subhandlers** that will be
run in parallel. This makes it much cleaner and easier to write a handler
that both listens for incoming messages on the socket connection, while
also watching a changefeed from RethinkDB.
Example usage ::
@use_ws_subhandlers
async def handle_feed(request, ws):
async def handle_incoming(_, ws):
async for msg in ws:
await Item(value=msg.data).save()
async def handle_change(_, ws):
async for item in await Item.changes():
ws.send_json(item)
return [handle_incoming, handle_change]
"""
async def new_handler(request, ws):
handlers = await handler(request, ws)
tasks = [request.app.loop.create_task(h(request, ws))
for h in handlers]
try:
await asyncio.gather(*tasks)
finally:
for task in tasks:
task.cancel()
await ws.close()
return new_handler
|
Add decorator for using websocket subhandlers
|
Add decorator for using websocket subhandlers
|
Python
|
bsd-3-clause
|
brinkframework/brink
|
2501bb03e836ac29cc1defa8591446ff217771b2
|
tests/test_model.py
|
tests/test_model.py
|
"""Sample unittests."""
import unittest2 as unittest
from domain_models import model
from domain_models import fields
class User(model.DomainModel):
"""Example user domain model."""
id = fields.Int()
email = fields.String()
first_name = fields.Unicode()
last_name = fields.Unicode()
gender = fields.String()
birth_date = fields.String()
__view_key__ = [id, email]
__unique_key__ = id
class SampleTests(unittest.TestCase):
"""Sample tests tests."""
def test_set_and_get_attrs(self):
"""Test setting and getting of domain model attributes."""
user = User()
user.id = 1
user.email = 'example@example.com'
user.first_name = 'John'
user.last_name = 'Smith'
user.gender = 'male'
user.birth_date = '05/04/1988'
self.assertEqual(user.id, 1)
self.assertEqual(user.email, 'example@example.com')
self.assertEqual(user.first_name, u'John')
self.assertEqual(user.last_name, u'Smith')
self.assertEqual(user.gender, 'male')
self.assertEqual(user.birth_date, '05/04/1988')
|
"""Sample unittests."""
import unittest2 as unittest
from domain_models import model
from domain_models import fields
class User(model.DomainModel):
"""Example user domain model."""
id = fields.Int()
email = fields.String()
first_name = fields.Unicode()
last_name = fields.Unicode()
gender = fields.String()
birth_date = fields.String()
__view_key__ = [id, email]
__unique_key__ = id
class SampleTests(unittest.TestCase):
"""Sample tests tests."""
def test_set_and_get_attrs(self):
"""Test setting and getting of domain model attributes."""
user = User()
user.id = 1
user.email = 'example@example.com'
user.first_name = 'John'
user.last_name = 'Smith'
user.gender = 'male'
user.birth_date = '05/04/1988'
self.assertEqual(user.id, 1)
self.assertEqual(user.email, 'example@example.com')
self.assertEqual(user.first_name, unicode('John'))
self.assertEqual(user.last_name, unicode('Smith'))
self.assertEqual(user.gender, 'male')
self.assertEqual(user.birth_date, '05/04/1988')
|
Fix of tests with unicode strings
|
Fix of tests with unicode strings
|
Python
|
bsd-3-clause
|
ets-labs/domain_models,ets-labs/python-domain-models,rmk135/domain_models
|
cb798ae8f7f6e810a87137a56cd04be76596a2dd
|
photutils/tests/test_psfs.py
|
photutils/tests/test_psfs.py
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import division
import numpy as np
from astropy.tests.helper import pytest
from photutils.psf import GaussianPSF
try:
from scipy import optimize
HAS_SCIPY = True
except ImportError:
HAS_SCIPY = False
widths = [0.001, 0.01, 0.1, 1]
@pytest.mark.skipif('not HAS_SCIPY')
@pytest.mark.parametrize(('width'), widths)
def test_subpixel_gauss_psf(width):
"""
Test subpixel accuracy of Gaussian PSF by checking the sum o pixels.
"""
gauss_psf = GaussianPSF(width)
y, x = np.mgrid[-10:11, -10:11]
assert np.abs(gauss_psf(x, y).sum() - 1) < 1E-12
@pytest.mark.skipif('not HAS_SCIPY')
def test_gaussian_PSF_integral():
"""
Test if Gaussian PSF integrates to unity on larger scales.
"""
psf = GaussianPSF(10)
y, x = np.mgrid[-100:101, -100:101]
assert np.abs(psf(y, x).sum() - 1) < 1E-12
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import division
import numpy as np
from astropy.tests.helper import pytest
from ..psf import GaussianPSF
try:
from scipy import optimize
HAS_SCIPY = True
except ImportError:
HAS_SCIPY = False
widths = [0.001, 0.01, 0.1, 1]
@pytest.mark.skipif('not HAS_SCIPY')
@pytest.mark.parametrize(('width'), widths)
def test_subpixel_gauss_psf(width):
"""
Test subpixel accuracy of Gaussian PSF by checking the sum o pixels.
"""
gauss_psf = GaussianPSF(width)
y, x = np.mgrid[-10:11, -10:11]
assert np.abs(gauss_psf(x, y).sum() - 1) < 1E-12
@pytest.mark.skipif('not HAS_SCIPY')
def test_gaussian_PSF_integral():
"""
Test if Gaussian PSF integrates to unity on larger scales.
"""
psf = GaussianPSF(10)
y, x = np.mgrid[-100:101, -100:101]
assert np.abs(psf(y, x).sum() - 1) < 1E-12
|
Use relative imports for consistency; pep8
|
Use relative imports for consistency; pep8
|
Python
|
bsd-3-clause
|
larrybradley/photutils,astropy/photutils
|
66df1b2719aa278c37f1c70ef550659c22d93d10
|
tests/unit/fakes.py
|
tests/unit/fakes.py
|
# Copyright 2012 Intel Inc, OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Fakes For filter and weight tests.
"""
from openstack.common.scheduler import weights
class FakeWeigher1(weights.BaseHostWeigher):
def __init__(self):
pass
class FakeWeigher2(weights.BaseHostWeigher):
def __init__(self):
pass
class FakeClass(object):
def __init__(self):
pass
|
# Copyright 2012 Intel Inc, OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Fakes For filter and weight tests.
"""
from openstack.common.scheduler import weights
class FakeWeigher1(weights.BaseHostWeigher):
def __init__(self):
pass
class FakeWeigher2(weights.BaseHostWeigher):
def __init__(self):
pass
class FakeClass(object):
def __init__(self):
pass
|
Fix Copyright Headers - Rename LLC to Foundation
|
Fix Copyright Headers - Rename LLC to Foundation
One code change, rest are in headers
Change-Id: I73f59681358629e1ad74e49d3d3ca13fcb5c2eb1
|
Python
|
apache-2.0
|
openstack/oslo.i18n,varunarya10/oslo.i18n
|
65fd070a88e06bb040e8c96babc6b4c86ca29730
|
validatish/error.py
|
validatish/error.py
|
"""
Module containing package exception classes.
"""
class Invalid(Exception):
def __init__(self, message, exceptions=None, validator=None):
Exception.__init__(self, message, exceptions)
self.message = message
self.exceptions = exceptions
self.validator = validator
def __str__(self):
return self.message
__unicode__ = __str__
def __repr__(self):
if self.exceptions:
return 'validatish.Invalid("%s", exceptions=%s, validator=%s)' % (self.message, self.exceptions, self.validator)
else:
return 'validatish.Invalid("%s", validator=%s)' % (self.message, self.validator)
@property
def errors(self):
return list(_flatten(self._fetch_errors(), _keepstrings))
def _fetch_errors(self):
if self.exceptions is None:
yield self.message
else:
for e in self.exceptions:
yield e._fetch_errors()
def _flatten(s, toiter=iter):
try:
it = toiter(s)
except TypeError:
yield s
else:
for elem in it:
for subelem in _flatten(elem, toiter):
yield subelem
def _keepstrings(seq):
if isinstance(seq, basestring):
raise TypeError
return iter(seq)
|
"""
Module containing package exception classes.
"""
class Invalid(Exception):
def __init__(self, message, exceptions=None, validator=None):
Exception.__init__(self, message, exceptions)
self.message = message
self.exceptions = exceptions
self.validator = validator
def __str__(self):
return self.message
__unicode__ = __str__
def __repr__(self):
if self.exceptions:
return 'validatish.Invalid("%s", exceptions=%s, validator=%s)' % (self.message, self.exceptions, self.validator)
else:
return 'validatish.Invalid("%s", validator=%s)' % (self.message, self.validator)
@property
def errors(self):
return list(_flatten(self._fetch_errors(), _keepstrings))
def _fetch_errors(self):
if self.exceptions is None:
yield self.message
else:
for e in self.exceptions:
yield e._fetch_errors()
# Hide Python 2.6 deprecation warning.
def _get_message(self): return self._message
def _set_message(self, message): self._message = message
message = property(_get_message, _set_message)
def _flatten(s, toiter=iter):
try:
it = toiter(s)
except TypeError:
yield s
else:
for elem in it:
for subelem in _flatten(elem, toiter):
yield subelem
def _keepstrings(seq):
if isinstance(seq, basestring):
raise TypeError
return iter(seq)
|
Hide Python 2.6 Exception.message deprecation warnings
|
Hide Python 2.6 Exception.message deprecation warnings
|
Python
|
bsd-3-clause
|
ish/validatish,ish/validatish
|
5c620a504327696b9cfe3ffc423ae7ae6e915e78
|
dec02/dec02part1.py
|
dec02/dec02part1.py
|
# Advent of Code
# Dec 2, Part 1
# @geekygirlsarah
|
# Advent of Code
# Dec 2, Part 1
# @geekygirlsarah
inputFile = "input.txt"
# Tracking vars
finalCode = ""
lastNumber = 5 # start here
tempNumber = 0
with open(inputFile) as f:
while True:
line = f.readline(-1)
if not line:
# print "End of file"
break
# print ("Line: ", line)
print ("First number=" + str(lastNumber))
for dir in line:
print("dir=" + dir)
if dir == "U":
tempNumber = lastNumber - 3
elif dir == "D":
tempNumber = lastNumber + 3
elif dir == "L":
tempNumber = lastNumber - 1
elif dir == "R":
tempNumber = lastNumber + 1
elif dir == "\n":
break
# Boundary checks to undo out of bounds
if dir == "U" and tempNumber < 1:
tempNumber = lastNumber
elif dir == "D" and tempNumber > 9:
tempNumber = lastNumber
elif dir == "L" and (tempNumber == 0 or tempNumber == 3 or tempNumber == 6):
tempNumber = lastNumber
elif dir == "R" and (tempNumber == 10 or tempNumber == 7 or tempNumber == 4):
tempNumber = lastNumber
print ("New number: " + str(tempNumber))
lastNumber = tempNumber
# last number validated, so add to code
finalCode = finalCode + str(tempNumber)
print("Final code: " + finalCode)
|
Add 12/2 part 1 solution
|
Add 12/2 part 1 solution
|
Python
|
mit
|
geekygirlsarah/adventofcode2016
|
356dd5294280db3334f86354202f0d68881254b9
|
joerd/check.py
|
joerd/check.py
|
import zipfile
import tarfile
import shutil
import tempfile
from osgeo import gdal
def is_zip(tmp):
"""
Returns True if the NamedTemporaryFile given as the argument appears to be
a well-formed Zip file.
"""
try:
zip_file = zipfile.ZipFile(tmp.name, 'r')
test_result = zip_file.testzip()
return test_result is None
except:
pass
return False
def tar_gz_has_gdal(member_name):
"""
Returns a function which, when called with a NamedTemporaryFile, returns
True if that file is a GZip-encoded TAR file containing a `member_name`
member which can be opened with GDAL.
"""
def func(tmp):
try:
tar = tarfile.open(tmp.name, mode='r:gz', errorlevel=2)
with tempfile.NamedTemporaryFile() as tmp_member:
shutil.copyfileobj(tar.extractfile(member_name), tmp_member)
return is_gdal(tmp_member)
except (tarfile.TarError, IOError, OSError) as e:
return False
def is_gdal(tmp):
"""
Returns true if the NamedTemporaryFile given as the argument appears to be
a well-formed GDAL raster file.
"""
try:
ds = gdal.Open(tmp.name)
band = ds.GetRasterBand(1)
band.ComputeBandStats()
return True
except:
pass
return False
|
import zipfile
import tarfile
import shutil
import tempfile
from osgeo import gdal
def is_zip(tmp):
"""
Returns True if the NamedTemporaryFile given as the argument appears to be
a well-formed Zip file.
"""
try:
zip_file = zipfile.ZipFile(tmp.name, 'r')
test_result = zip_file.testzip()
return test_result is None
except:
pass
return False
def tar_gz_has_gdal(member_name):
"""
Returns a function which, when called with a NamedTemporaryFile, returns
True if that file is a GZip-encoded TAR file containing a `member_name`
member which can be opened with GDAL.
"""
def func(tmp):
try:
tar = tarfile.open(tmp.name, mode='r:gz', errorlevel=2)
with tempfile.NamedTemporaryFile() as tmp_member:
shutil.copyfileobj(tar.extractfile(member_name), tmp_member)
tmp_member.seek(0)
return is_gdal(tmp_member)
except (tarfile.TarError, IOError, OSError) as e:
return False
return func
def is_gdal(tmp):
"""
Returns true if the NamedTemporaryFile given as the argument appears to be
a well-formed GDAL raster file.
"""
try:
ds = gdal.Open(tmp.name)
band = ds.GetRasterBand(1)
band.ComputeBandStats()
return True
except:
pass
return False
|
Return verifier function, not None. Also reset the temporary file to the beginning before verifying it.
|
Return verifier function, not None. Also reset the temporary file to the beginning before verifying it.
|
Python
|
mit
|
mapzen/joerd,tilezen/joerd
|
9353a5e2369e819c092c94d224b09c321f5b5ff0
|
utils/get_collection_object_count.py
|
utils/get_collection_object_count.py
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
import sys, os
import argparse
from deepharvest.deepharvest_nuxeo import DeepHarvestNuxeo
def main(argv=None):
parser = argparse.ArgumentParser(description='Print count of objects for a given collection.')
parser.add_argument('path', help="Nuxeo path to collection")
parser.add_argument('--pynuxrc', default='~/.pynuxrc-prod', help="rcfile for use with pynux utils")
if argv is None:
argv = parser.parse_args()
dh = DeepHarvestNuxeo(argv.path, '', pynuxrc=argv.pynuxrc)
print "about to fetch objects for path {}".format(dh.path)
objects = dh.fetch_objects()
object_count = len(objects)
print "finished fetching objects. {} found".format(object_count)
print "about to iterate through objects and get components"
component_count = 0
for obj in objects:
components = dh.fetch_components(obj)
component_count = component_count + len(components)
print "finished fetching components. {} found".format(component_count)
print "Grand Total: {}".format(object_count + component_count)
if __name__ == "__main__":
sys.exit(main())
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
import sys, os
import argparse
from deepharvest.deepharvest_nuxeo import DeepHarvestNuxeo
def main(argv=None):
parser = argparse.ArgumentParser(description='Print count of objects for a given collection.')
parser.add_argument('path', help="Nuxeo path to collection")
parser.add_argument('--pynuxrc', default='~/.pynuxrc-prod', help="rcfile for use with pynux utils")
parser.add_argument('--components', action='store_true', help="show counts for object components")
if argv is None:
argv = parser.parse_args()
dh = DeepHarvestNuxeo(argv.path, '', pynuxrc=argv.pynuxrc)
print "about to fetch objects for path {}".format(dh.path)
objects = dh.fetch_objects()
object_count = len(objects)
print "finished fetching objects. {} found".format(object_count)
if not argv.components:
return
print "about to iterate through objects and get components"
component_count = 0
for obj in objects:
components = dh.fetch_components(obj)
component_count = component_count + len(components)
print "finished fetching components. {} found".format(component_count)
print "Grand Total: {}".format(object_count + component_count)
if __name__ == "__main__":
sys.exit(main())
|
Add option to count components
|
Add option to count components
|
Python
|
bsd-3-clause
|
barbarahui/nuxeo-calisphere,barbarahui/nuxeo-calisphere
|
d84e6aa022ef5e256807738c35e5069a0a1380d7
|
app/main/forms/frameworks.py
|
app/main/forms/frameworks.py
|
from flask.ext.wtf import Form
from wtforms import BooleanField
from wtforms.validators import DataRequired, Length
from dmutils.forms import StripWhitespaceStringField
class SignerDetailsForm(Form):
signerName = StripWhitespaceStringField('Full name', validators=[
DataRequired(message="You must provide the full name of the person signing on behalf of the company."),
Length(max=255, message="You must provide a name under 256 characters.")
])
signerRole = StripWhitespaceStringField(
'Role at the company',
validators=[
DataRequired(message="You must provide the role of the person signing on behalf of the company."),
Length(max=255, message="You must provide a role under 256 characters.")
],
description='The person signing must have the authority to agree to the framework terms, '
'eg director or company secretary.'
)
class ContractReviewForm(Form):
authorisation = BooleanField(
'Authorisation',
validators=[DataRequired(message="You must confirm you have the authority to return the agreement.")]
)
|
from flask.ext.wtf import Form
from wtforms import BooleanField
from wtforms.validators import DataRequired, Length
from dmutils.forms import StripWhitespaceStringField
class SignerDetailsForm(Form):
signerName = StripWhitespaceStringField('Full name', validators=[
DataRequired(message="You must provide the full name of the person signing on behalf of the company."),
Length(max=255, message="You must provide a name under 256 characters.")
])
signerRole = StripWhitespaceStringField(
'Role at the company',
validators=[
DataRequired(message="You must provide the role of the person signing on behalf of the company."),
Length(max=255, message="You must provide a role under 256 characters.")
],
description='The person signing must have the authority to agree to the framework terms, '
'eg director or company secretary.'
)
class ContractReviewForm(Form):
authorisation = BooleanField(
'Authorisation',
validators=[DataRequired(message="You must confirm you have the authority to return the agreement.")]
)
class AcceptAgreementVariationForm(Form):
accept_changes = BooleanField(
'I accept these proposed changes',
validators=[
DataRequired(message="If you agree to the proposed changes then you must check the box before saving.")
]
)
|
Add form for accepting contract variation
|
Add form for accepting contract variation
|
Python
|
mit
|
alphagov/digitalmarketplace-supplier-frontend,alphagov/digitalmarketplace-supplier-frontend,alphagov/digitalmarketplace-supplier-frontend,alphagov/digitalmarketplace-supplier-frontend
|
f16994fd3722acba8a60157eed0630a5e2a3d387
|
macdict/cli.py
|
macdict/cli.py
|
from __future__ import absolute_import
import sys
import argparse
from macdict.dictionary import lookup_word, ensure_unicode
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('word')
return parser.parse_args()
def abort(text):
sys.stderr.write(u'%s\n' % text)
sys.exit(1)
def report(text):
sys.stdout.write(u'%s\n' % text)
sys.exit(0)
def main():
args = parse_args()
definition = lookup_word(ensure_unicode(args.word, 'utf-8'))
if definition is None:
abort(u'Definition not found for "%s"' % args.word)
else:
report(definition)
|
from __future__ import absolute_import
import sys
import argparse
from macdict.dictionary import lookup_word, ensure_unicode
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('word')
return parser.parse_args()
def abort(text):
sys.stderr.write(u'%s\n' % text)
sys.exit(1)
def report(text):
sys.stdout.write(u'%s\n' % text)
sys.exit(0)
def main():
args = parse_args()
word = ensure_unicode(args.word, 'utf-8')
definition = lookup_word(word)
if definition is None:
abort(u'Definition not found for "%s"' % word)
else:
report(definition)
|
Fix unicode decoding on error messages
|
Fix unicode decoding on error messages
|
Python
|
mit
|
tonyseek/macdict
|
75171ed80079630d22463685768072ad7323e653
|
boundary/action_installed.py
|
boundary/action_installed.py
|
###
### Copyright 2014-2015 Boundary, Inc.
###
### Licensed under the Apache License, Version 2.0 (the "License");
### you may not use this file except in compliance with the License.
### You may obtain a copy of the License at
###
### http://www.apache.org/licenses/LICENSE-2.0
###
### Unless required by applicable law or agreed to in writing, software
### distributed under the License is distributed on an "AS IS" BASIS,
### WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
### See the License for the specific language governing permissions and
### limitations under the License.
###
from api_cli import ApiCli
class ActionInstalled (ApiCli):
def __init__(self):
ApiCli.__init__(self)
self.method = "GET"
self.path = "v1/actions/installed"
def getDescription(self):
return "Returns the actions associated with the Boundary account"
|
#
# Copyright 2014-2015 Boundary, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from api_cli import ApiCli
class ActionInstalled (ApiCli):
def __init__(self):
ApiCli.__init__(self)
self.method = "GET"
self.path = "v1/actions/installed"
def getDescription(self):
return "Returns the actions configured within a Boundary account"
|
Change code to be PEP-8 compliant
|
Change code to be PEP-8 compliant
|
Python
|
apache-2.0
|
boundary/boundary-api-cli,boundary/boundary-api-cli,jdgwartney/boundary-api-cli,jdgwartney/pulse-api-cli,wcainboundary/boundary-api-cli,wcainboundary/boundary-api-cli,jdgwartney/pulse-api-cli,boundary/pulse-api-cli,jdgwartney/boundary-api-cli,boundary/pulse-api-cli
|
57bc8b3c40bbafda6f69b23c230ad73750e881ab
|
hashable/helpers.py
|
hashable/helpers.py
|
from .equals_builder import EqualsBuilder
from .hash_code_builder import HashCodeBuilder
__all__ = [
'hashable',
'equality_comparable',
]
def hashable(cls=None, attributes=None, methods=None):
_validate_attributes_and_methods(attributes, methods)
def decorator(cls):
cls = equality_comparable(cls, attributes, methods)
cls.__hash__ = HashCodeBuilder.auto_generate(cls, attributes, methods)
return cls
return decorator if cls is None else decorator(cls)
def equality_comparable(cls=None, attributes=None, methods=None):
_validate_attributes_and_methods(attributes, methods)
def decorator(cls):
cls.__eq__ = EqualsBuilder.auto_generate(cls, attributes, methods)
cls.__ne__ = EqualsBuilder.auto_ne_from_eq()
return cls
return decorator if cls is None else decorator(cls)
def _validate_attributes_and_methods(attributes, methods):
assert not isinstance(attributes, basestring), 'attributes must be list'
assert not isinstance(methods, basestring), 'methods must be list'
assert attributes or methods, 'attributes or methods must be NOT empty'
|
from .equals_builder import EqualsBuilder
from .hash_code_builder import HashCodeBuilder
__all__ = [
'hashable',
'equalable',
]
def hashable(cls=None, attributes=None, methods=None):
_validate_attributes_and_methods(attributes, methods)
def decorator(cls):
cls = equalable(cls, attributes, methods)
cls.__hash__ = HashCodeBuilder.auto_generate(cls, attributes, methods)
return cls
return decorator if cls is None else decorator(cls)
def equalable(cls=None, attributes=None, methods=None):
_validate_attributes_and_methods(attributes, methods)
def decorator(cls):
cls.__eq__ = EqualsBuilder.auto_generate(cls, attributes, methods)
cls.__ne__ = EqualsBuilder.auto_ne_from_eq()
return cls
return decorator if cls is None else decorator(cls)
def _validate_attributes_and_methods(attributes, methods):
assert not isinstance(attributes, basestring), 'attributes must be list'
assert not isinstance(methods, basestring), 'methods must be list'
assert attributes or methods, 'attributes or methods must be NOT empty'
|
Rename decorator equality_comparable to equalable
|
Rename decorator equality_comparable to equalable
|
Python
|
mit
|
minmax/hashable
|
4f6e27a6bbc2bbdb19c165f21d47d1491bffd70e
|
scripts/mc_check_lib_file.py
|
scripts/mc_check_lib_file.py
|
#!/usr/bin/env python
# -*- mode: python; coding: utf-8 -*-
# Copyright 2021 The HERA Collaboration
# Licensed under the 2-clause BSD License
"""
Check that input files are safely in the librarian.
This script takes a list of input files and returns the list of those
found in the HERA_MC.lib_files table.
NOTE: Assumes that lib_files is a faithful list of files uploaded to the Librarian
"""
import os
from hera_mc import mc
ap = mc.get_mc_argument_parser()
ap.description = """Check that listed files are safely in librarian."""
ap.add_argument("files", type=str, default=None, nargs="*", help="list of files")
args = ap.parse_args()
db = mc.connect_to_mc_db(args)
found_files = []
for pathname in args.files:
filename = os.path.basename(pathname)
with db.sessionmaker() as session:
out = session.get_lib_files(filename)
if len(out) > 0:
print(pathname) # if we have a file, say so
|
#!/usr/bin/env python
# -*- mode: python; coding: utf-8 -*-
# Copyright 2021 The HERA Collaboration
# Licensed under the 2-clause BSD License
"""
Check that input files are safely in the librarian.
This script takes a list of input files and returns the list of those
found in the HERA_MC.lib_files table.
NOTE: Assumes that lib_files is a faithful list of files uploaded to the Librarian
"""
import os
from hera_mc import mc
ap = mc.get_mc_argument_parser()
ap.description = """Check that listed files are safely in librarian."""
ap.add_argument("files", type=str, default=None, nargs="*", help="list of files")
args = ap.parse_args()
db = mc.connect_to_mc_db(args)
found_files = []
with db.sessionmaker() as session:
for pathname in args.files:
filename = os.path.basename(pathname)
out = session.get_lib_files(filename)
if len(out) > 0:
print(pathname) # if we have a file, say so
|
Move sessionmaker outside of loop
|
Move sessionmaker outside of loop
|
Python
|
bsd-2-clause
|
HERA-Team/hera_mc,HERA-Team/hera_mc
|
5436068e2a0974a932d59d51dd529af221832735
|
test/vim_autopep8.py
|
test/vim_autopep8.py
|
"""Run autopep8 on the selected buffer in Vim.
map <C-I> :pyfile <path_to>/vim_autopep8.py<CR>
"""
import vim
if vim.eval('&syntax') == 'python':
encoding = vim.eval('&fileencoding')
source = '\n'.join(line.decode(encoding)
for line in vim.current.buffer) + '\n'
import autopep8
options = autopep8.parse_args(['--range',
str(1 + vim.current.range.start),
str(1 + vim.current.range.end),
''])
formatted = autopep8.fix_code(source, options=options)
if source != formatted:
if formatted.endswith('\n'):
formatted = formatted[:-1]
vim.current.buffer[:] = [line.encode(encoding)
for line in formatted.splitlines()]
|
"""Run autopep8 on the selected buffer in Vim.
map <C-I> :pyfile <path_to>/vim_autopep8.py<CR>
Replace ":pyfile" with ":py3file" if Vim is built with Python 3 support.
"""
from __future__ import unicode_literals
import sys
import vim
ENCODING = vim.eval('&fileencoding')
def encode(text):
if sys.version_info[0] >= 3:
return text
else:
return text.encode(ENCODING)
def decode(text):
if sys.version_info[0] >= 3:
return text
else:
return text.decode(ENCODING)
if vim.eval('&syntax') == 'python':
source = '\n'.join(decode(line)
for line in vim.current.buffer) + '\n'
import autopep8
options = autopep8.parse_args(['--range',
str(1 + vim.current.range.start),
str(1 + vim.current.range.end),
''])
formatted = autopep8.fix_code(source, options=options)
if source != formatted:
if formatted.endswith('\n'):
formatted = formatted[:-1]
vim.current.buffer[:] = [encode(line)
for line in formatted.splitlines()]
|
Support Python 3 in Vim usage example
|
Support Python 3 in Vim usage example
|
Python
|
mit
|
vauxoo-dev/autopep8,Vauxoo/autopep8,vauxoo-dev/autopep8,hhatto/autopep8,SG345/autopep8,SG345/autopep8,MeteorAdminz/autopep8,Vauxoo/autopep8,hhatto/autopep8,MeteorAdminz/autopep8
|
c105d6f18a5a17b0a47fda5a2df2f8f47352b037
|
setuptools/command/upload.py
|
setuptools/command/upload.py
|
import getpass
from distutils.command import upload as orig
class upload(orig.upload):
"""
Override default upload behavior to obtain password
in a variety of different ways.
"""
def finalize_options(self):
orig.upload.finalize_options(self)
# Attempt to obtain password. Short circuit evaluation at the first
# sign of success.
self.password = (
self.password or
self._load_password_from_keyring() or
self._prompt_for_password()
)
def _load_password_from_keyring(self):
"""
Attempt to load password from keyring. Suppress Exceptions.
"""
try:
keyring = __import__('keyring')
password = keyring.get_password(self.repository, self.username)
except Exception:
password = None
finally:
return password
def _prompt_for_password(self):
"""
Prompt for a password on the tty. Suppress Exceptions.
"""
password = None
try:
while not password:
password = getpass.getpass()
except (Exception, KeyboardInterrupt):
password = None
finally:
return password
|
import getpass
from distutils.command import upload as orig
class upload(orig.upload):
"""
Override default upload behavior to obtain password
in a variety of different ways.
"""
def finalize_options(self):
orig.upload.finalize_options(self)
# Attempt to obtain password. Short circuit evaluation at the first
# sign of success.
self.password = (
self.password or
self._load_password_from_keyring() or
self._prompt_for_password()
)
def _load_password_from_keyring(self):
"""
Attempt to load password from keyring. Suppress Exceptions.
"""
try:
keyring = __import__('keyring')
return keyring.get_password(self.repository, self.username)
except Exception:
pass
def _prompt_for_password(self):
"""
Prompt for a password on the tty. Suppress Exceptions.
"""
try:
return getpass.getpass()
except (Exception, KeyboardInterrupt):
pass
|
Simplify logic by eliminating retries in password prompt and returning results directly.
|
Simplify logic by eliminating retries in password prompt and returning results directly.
|
Python
|
mit
|
pypa/setuptools,pypa/setuptools,pypa/setuptools
|
a3213788d0d8591b235359d4b17886ce3f50ab37
|
tests/test_plugin.py
|
tests/test_plugin.py
|
import datajoint.errors as djerr
import datajoint.plugin as p
import pkg_resources
def test_check_pubkey():
base_name = 'datajoint'
base_meta = pkg_resources.get_distribution(base_name)
pubkey_meta = base_meta.get_metadata('{}.pub'.format(base_name))
with open('./datajoint.pub', "r") as f:
assert(f.read() == pubkey_meta)
def test_normal_djerror():
try:
raise djerr.DataJointError
except djerr.DataJointError as e:
assert(e.__cause__ is None)
def test_verified_djerror():
try:
curr_plugins = p.discovered_plugins
p.discovered_plugins = dict(test_plugin_module=dict(verified=True, plugon='example'))
raise djerr.DataJointError
except djerr.DataJointError as e:
p.discovered_plugins = curr_plugins
assert(e.__cause__ is None)
def test_unverified_djerror():
try:
curr_plugins = p.discovered_plugins
p.discovered_plugins = dict(test_plugin_module=dict(verified=False, plugon='example'))
raise djerr.DataJointError("hello")
except djerr.DataJointError as e:
p.discovered_plugins = curr_plugins
assert(isinstance(e.__cause__, djerr.PluginWarning))
|
import datajoint.errors as djerr
import datajoint.plugin as p
import pkg_resources
from os import path
def test_check_pubkey():
base_name = 'datajoint'
base_meta = pkg_resources.get_distribution(base_name)
pubkey_meta = base_meta.get_metadata('{}.pub'.format(base_name))
with open(path.join(path.abspath(
path.dirname(__file__)), '..', 'datajoint.pub'), "r") as f:
assert(f.read() == pubkey_meta)
def test_normal_djerror():
try:
raise djerr.DataJointError
except djerr.DataJointError as e:
assert(e.__cause__ is None)
def test_verified_djerror():
try:
curr_plugins = p.discovered_plugins
p.discovered_plugins = dict(test_plugin_module=dict(verified=True, plugon='example'))
raise djerr.DataJointError
except djerr.DataJointError as e:
p.discovered_plugins = curr_plugins
assert(e.__cause__ is None)
def test_unverified_djerror():
try:
curr_plugins = p.discovered_plugins
p.discovered_plugins = dict(test_plugin_module=dict(verified=False, plugon='example'))
raise djerr.DataJointError("hello")
except djerr.DataJointError as e:
p.discovered_plugins = curr_plugins
assert(isinstance(e.__cause__, djerr.PluginWarning))
|
Make pubkey test more portable.
|
Make pubkey test more portable.
|
Python
|
lgpl-2.1
|
datajoint/datajoint-python,dimitri-yatsenko/datajoint-python
|
bc5475bcc3608de75c42d24c5c74e416b41b873f
|
pages/base.py
|
pages/base.py
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
from selenium.webdriver.common.by import By
from page import Page
class Base(Page):
_login_locator = (By.ID, 'login')
_logout_locator = (By.ID, 'logout')
_notification_locator = (By.CLASS_NAME, 'flash')
def click_login(self):
self.selenium.find_element(*self._login_locator).click()
from pages.login import LoginPage
return LoginPage(self.testsetup)
def click_logout(self):
self.selenium.find_element(*self._logout_locator).click()
def login(self, username=None, password=None):
login_page = self.click_login()
return login_page.login(username, password)
def logout(self):
self.click_logout()
@property
def notification(self):
return self.selenium.find_element(*self._notification_locator).text
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
from selenium.webdriver.common.by import By
from page import Page
class Base(Page):
_login_locator = (By.ID, 'login')
_logout_locator = (By.ID, 'logout')
_notification_locator = (By.CLASS_NAME, 'flash')
def click_login(self):
self.selenium.find_element(*self._login_locator).click()
from pages.login import LoginPage
return LoginPage(self.testsetup)
def click_logout(self):
self.selenium.find_element(*self._logout_locator).click()
def login(self, username, password):
login_page = self.click_login()
return login_page.login(username, password)
def logout(self):
self.click_logout()
@property
def notification(self):
return self.selenium.find_element(*self._notification_locator).text
|
Make username and password required arguments
|
Make username and password required arguments
|
Python
|
mpl-2.0
|
mozilla/mozwebqa-examples,davehunt/mozwebqa-examples,mozilla/mozwebqa-examples,davehunt/mozwebqa-examples
|
54bce2a224843ec9c1c8b7eb35cdc6bf19d5726b
|
expensonator/api.py
|
expensonator/api.py
|
from tastypie.authorization import Authorization
from tastypie.fields import CharField
from tastypie.resources import ModelResource
from expensonator.models import Expense
class ExpenseResource(ModelResource):
tags = CharField()
def dehydrate_tags(self, bundle):
return bundle.obj.tags_as_string()
def save(self, bundle, skip_errors=False):
bundle = super(ExpenseResource, self).save(bundle, skip_errors)
bundle.obj.reset_tags_from_string(bundle.data["tags"])
return bundle
class Meta:
queryset = Expense.objects.all()
excludes = ["created", "updated"]
# WARNING: Tastypie docs say that this is VERY INSECURE!
# For development only!
authorization = Authorization()
|
from tastypie.authorization import Authorization
from tastypie.fields import CharField
from tastypie.resources import ModelResource
from expensonator.models import Expense
class ExpenseResource(ModelResource):
tags = CharField()
def dehydrate_tags(self, bundle):
return bundle.obj.tags_as_string()
def save(self, bundle, skip_errors=False):
bundle = super(ExpenseResource, self).save(bundle, skip_errors)
if "tags" in bundle.data:
bundle.obj.reset_tags_from_string(bundle.data["tags"])
return bundle
class Meta:
queryset = Expense.objects.all()
excludes = ["created", "updated"]
# WARNING: Tastypie docs say that this is VERY INSECURE!
# For development only!
authorization = Authorization()
|
Fix key error when no tags are specified
|
Fix key error when no tags are specified
|
Python
|
mit
|
matt-haigh/expensonator
|
f02b6505f190011f06b37619ec4fdf9bda1e804e
|
cea/interfaces/dashboard/api/utils.py
|
cea/interfaces/dashboard/api/utils.py
|
from flask import current_app
import cea.inputlocator
def deconstruct_parameters(p):
params = {'name': p.name, 'type': p.typename,
'value': p.get(), 'help': p.help}
try:
params['choices'] = p._choices
except AttributeError:
pass
if p.typename == 'WeatherPathParameter':
config = current_app.cea_config
locator = cea.inputlocator.InputLocator(config.scenario)
params['choices'] = {wn: locator.get_weather(
wn) for wn in locator.get_weather_names()}
return params
|
from flask import current_app
import cea.config
import cea.inputlocator
def deconstruct_parameters(p):
params = {'name': p.name, 'type': p.typename,
'value': p.get(), 'help': p.help}
if isinstance(p, cea.config.ChoiceParameter):
params['choices'] = p._choices
if p.typename == 'WeatherPathParameter':
config = current_app.cea_config
locator = cea.inputlocator.InputLocator(config.scenario)
params['choices'] = {wn: locator.get_weather(
wn) for wn in locator.get_weather_names()}
elif p.typename == 'DatabasePathParameter':
params['choices'] = p._choices
return params
|
Add parameter deconstruction fro DatabasePathParameter
|
Add parameter deconstruction fro DatabasePathParameter
|
Python
|
mit
|
architecture-building-systems/CEAforArcGIS,architecture-building-systems/CEAforArcGIS
|
dfdeaf536466cfa8003af4cd5341d1d7127ea6b7
|
py/_test_py2go.py
|
py/_test_py2go.py
|
#!/usr/bin/env python
import datetime
def return_true():
return True
def return_false():
return False
def return_int():
return 123
def return_float():
return 1.0
def return_string():
return "ABC"
def return_bytearray():
return bytearray('abcdefg')
def return_array():
return [1, 2, {"key": 3}]
def return_map():
return {"key1": 123, "key2": "str"}
def return_nested_map():
return {"key1": {"key2": 123}}
def return_none():
return None
def return_timestamp():
return datetime.datetime(2015, 4, 1, 14, 27, 0, 500*1000, None)
|
#!/usr/bin/env python
import datetime
def return_true():
return True
def return_false():
return False
def return_int():
return 123
def return_float():
return 1.0
def return_string():
return "ABC"
def return_bytearray():
return bytearray('abcdefg')
def return_array():
return [1, 2, {"key": 3}]
def return_map():
return {"key1": 123, "key2": "str"}
def return_nested_map():
return {"key1": {"key2": 123}}
def return_none():
return None
def return_timestamp():
return datetime.datetime(2015, 4, 1, 14, 27, 0, 500*1000, None)
|
Update python script for pep8 style
|
Update python script for pep8 style
|
Python
|
mit
|
sensorbee/py,sensorbee/py
|
caf9795cf0f775442bd0c3e06cd550a6e8d0206b
|
virtool/labels/db.py
|
virtool/labels/db.py
|
async def count_samples(db, label_id):
return await db.samples.count_documents({"labels": {"$in": [label_id]}})
|
async def attach_sample_count(db, document, label_id):
document.update({"count": await db.samples.count_documents({"labels": {"$in": [label_id]}})})
|
Rewrite function for sample count
|
Rewrite function for sample count
|
Python
|
mit
|
virtool/virtool,igboyes/virtool,virtool/virtool,igboyes/virtool
|
51e7cd3bc5a9a56fb53a5b0a8328d0b9d58848dd
|
modder/utils/desktop_notification.py
|
modder/utils/desktop_notification.py
|
# coding: utf-8
import platform
if platform.system() == 'Darwin':
from Foundation import NSUserNotificationDefaultSoundName
import objc
NSUserNotification = objc.lookUpClass('NSUserNotification')
NSUserNotificationCenter = objc.lookUpClass('NSUserNotificationCenter')
def desktop_notify(text, title='Modder', sound=False):
notification = NSUserNotification.alloc().init()
notification.setTitle_(title.decode('utf-8'))
notification.setInformativeText_(text.decode('utf-8'))
if sound:
notification.setSoundName_(NSUserNotificationDefaultSoundName)
center = NSUserNotificationCenter.defaultUserNotificationCenter()
center.deliverNotification_(notification)
elif platform.system() == 'Windows':
def desktop_notify(text, title='Modder', sound=False):
pass
elif platform.system() == 'Linux':
def desktop_notify(text, title='Modder', sound=False):
pass
|
# coding: utf-8
import platform
if platform.system() == 'Darwin':
from Foundation import NSUserNotificationDefaultSoundName
import objc
NSUserNotification = objc.lookUpClass('NSUserNotification')
NSUserNotificationCenter = objc.lookUpClass('NSUserNotificationCenter')
def desktop_notify(text, title=None, sound=False):
title = title or 'Modder'
notification = NSUserNotification.alloc().init()
notification.setTitle_(title.decode('utf-8'))
notification.setInformativeText_(text.decode('utf-8'))
if sound:
notification.setSoundName_(NSUserNotificationDefaultSoundName)
center = NSUserNotificationCenter.defaultUserNotificationCenter()
center.deliverNotification_(notification)
elif platform.system() == 'Windows':
def desktop_notify(text, title=None, sound=False):
title = title or 'Modder'
pass
elif platform.system() == 'Linux':
def desktop_notify(text, title=None, sound=False):
title = title or 'Modder'
pass
|
Fix title for desktop notification
|
Fix title for desktop notification
|
Python
|
mit
|
JokerQyou/Modder2
|
8a7837a8ce7b35c3141374c6a5c99361261fa70a
|
Cura/avr_isp/chipDB.py
|
Cura/avr_isp/chipDB.py
|
avrChipDB = {
'ATMega2560': {
'signature': [0x1E, 0x98, 0x01],
'pageSize': 128,
'pageCount': 1024,
},
}
def getChipFromDB(sig):
for chip in avrChipDB.values():
if chip['signature'] == sig:
return chip
return False
|
avrChipDB = {
'ATMega1280': {
'signature': [0x1E, 0x97, 0x03],
'pageSize': 128,
'pageCount': 512,
},
'ATMega2560': {
'signature': [0x1E, 0x98, 0x01],
'pageSize': 128,
'pageCount': 1024,
},
}
def getChipFromDB(sig):
for chip in avrChipDB.values():
if chip['signature'] == sig:
return chip
return False
|
Add ATMega1280 chip to programmer chips.
|
Add ATMega1280 chip to programmer chips.
|
Python
|
agpl-3.0
|
MolarAmbiguity/OctoPrint,EZ3-India/EZ-Remote,JackGavin13/octoprint-test-not-finished,spapadim/OctoPrint,dragondgold/OctoPrint,hudbrog/OctoPrint,CapnBry/OctoPrint,Javierma/OctoPrint-TFG,chriskoz/OctoPrint,javivi001/OctoPrint,shohei/Octoprint,eddieparker/OctoPrint,MolarAmbiguity/OctoPrint,mayoff/OctoPrint,uuv/OctoPrint,C-o-r-E/OctoPrint,Mikk36/OctoPrint,DanLipsitt/OctoPrint,shohei/Octoprint,beeverycreative/BEEweb,alex1818/OctoPrint,EZ3-India/EZ-Remote,alex1818/OctoPrint,shohei/Octoprint,markwal/OctoPrint,beeverycreative/BEEweb,aerickson/OctoPrint,beeverycreative/BEEweb,aerickson/OctoPrint,nicanor-romero/OctoPrint,punkkeks/OctoPrint,d42/octoprint-fork,Javierma/OctoPrint-TFG,3dprintcanalhouse/octoprint2,ErikDeBruijn/OctoPrint,punkkeks/OctoPrint,masterhou/OctoPrint,shaggythesheep/OctoPrint,chriskoz/OctoPrint,madhuni/AstroBox,Catrodigious/OctoPrint-TAM,alephobjects/Cura,javivi001/OctoPrint,uuv/OctoPrint,leductan-nguyen/RaionPi,MoonshineSG/OctoPrint,eliasbakken/OctoPrint,nicanor-romero/OctoPrint,Skeen/OctoPrint,javivi001/OctoPrint,Salandora/OctoPrint,jneves/OctoPrint,hudbrog/OctoPrint,shaggythesheep/OctoPrint,MoonshineSG/OctoPrint,skieast/OctoPrint,abinashk-inf/AstroBox,nickverschoor/OctoPrint,eddieparker/OctoPrint,EZ3-India/EZ-Remote,EZ3-India/EZ-Remote,abinashk-inf/AstroBox,mrbeam/OctoPrint,abinashk-inf/AstroBox,mrbeam/OctoPrint,Voxel8/OctoPrint,sstocker46/OctoPrint,bicephale/OctoPrint,dragondgold/OctoPrint,Jaesin/OctoPrint,mcanes/OctoPrint,ryanneufeld/OctoPrint,Salandora/OctoPrint,CapnBry/OctoPrint,foosel/OctoPrint,nickverschoor/OctoPrint,alephobjects/Cura,mcanes/OctoPrint,markwal/OctoPrint,sstocker46/OctoPrint,Jaesin/OctoPrint,3dprintcanalhouse/octoprint1,skieast/OctoPrint,madhuni/AstroBox,markwal/OctoPrint,Mikk36/OctoPrint,AstroPrint/AstroBox,ymilord/OctoPrint-MrBeam,dansantee/OctoPrint,Jaesin/OctoPrint,punkkeks/OctoPrint,ymilord/OctoPrint-MrBeam,rurkowce/octoprint-fork,foosel/OctoPrint,Salandora/OctoPrint,spapadim/OctoPrint,MoonshineSG/OctoPrint,spapadim/OctoPrint,madhuni/AstroBox,masterhou/OctoPrint,ymilord/OctoPrint-MrBeam,alephobjects/Cura,ryanneufeld/OctoPrint,chriskoz/OctoPrint,hudbrog/OctoPrint,Mikk36/OctoPrint,eddieparker/OctoPrint,leductan-nguyen/RaionPi,JackGavin13/octoprint-test-not-finished,beeverycreative/BEEweb,bicephale/OctoPrint,nicanor-romero/OctoPrint,jneves/OctoPrint,JackGavin13/octoprint-test-not-finished,ErikDeBruijn/OctoPrint,leductan-nguyen/RaionPi,CapnBry/OctoPrint,chriskoz/OctoPrint,ryanneufeld/OctoPrint,3dprintcanalhouse/octoprint1,mrbeam/OctoPrint,senttech/OctoPrint,Javierma/OctoPrint-TFG,dansantee/OctoPrint,Voxel8/OctoPrint,bicephale/OctoPrint,MolarAmbiguity/OctoPrint,MaxOLydian/OctoPrint,eliasbakken/OctoPrint,DanLipsitt/OctoPrint,mayoff/OctoPrint,Skeen/OctoPrint,Jaesin/OctoPrint,rurkowce/octoprint-fork,CapnBry/OctoPrint,AstroPrint/AstroBox,madhuni/AstroBox,uuv/OctoPrint,abinashk-inf/AstroBox,JackGavin13/octoprint-test-not-finished,SeveQ/OctoPrint,sstocker46/OctoPrint,dansantee/OctoPrint,skieast/OctoPrint,mayoff/OctoPrint,C-o-r-E/OctoPrint,eliasbakken/OctoPrint,ryanneufeld/OctoPrint,foosel/OctoPrint,nickverschoor/OctoPrint,bicephale/OctoPrint,SeveQ/OctoPrint,MoonshineSG/OctoPrint,SeveQ/OctoPrint,senttech/OctoPrint,shohei/Octoprint,ymilord/OctoPrint-MrBeam,3dprintcanalhouse/octoprint2,d42/octoprint-fork,mcanes/OctoPrint,Voxel8/OctoPrint,senttech/OctoPrint,ymilord/OctoPrint-MrBeam,leductan-nguyen/RaionPi,Javierma/OctoPrint-TFG,Salandora/OctoPrint,C-o-r-E/OctoPrint,alex1818/OctoPrint,MaxOLydian/OctoPrint,shaggythesheep/OctoPrint,masterhou/OctoPrint,shohei/Octoprint,ErikDeBruijn/OctoPrint,jneves/OctoPrint,Catrodigious/OctoPrint-TAM,foosel/OctoPrint,dragondgold/OctoPrint,senttech/OctoPrint,aerickson/OctoPrint,MaxOLydian/OctoPrint,nickverschoor/OctoPrint,Skeen/OctoPrint,Catrodigious/OctoPrint-TAM,AstroPrint/AstroBox
|
ef96000b01c50a77b3500fc4071f83f96d7b2458
|
mrbelvedereci/api/views/cumulusci.py
|
mrbelvedereci/api/views/cumulusci.py
|
from django.shortcuts import render
from mrbelvedereci.api.serializers.cumulusci import OrgSerializer
from mrbelvedereci.api.serializers.cumulusci import ScratchOrgInstanceSerializer
from mrbelvedereci.api.serializers.cumulusci import ServiceSerializer
from mrbelvedereci.cumulusci.filters import OrgFilter
from mrbelvedereci.cumulusci.filters import ScratchOrgInstanceFilter
from mrbelvedereci.cumulusci.filters import ServiceFilter
from mrbelvedereci.cumulusci.models import Org
from mrbelvedereci.cumulusci.models import ScratchOrgInstance
from mrbelvedereci.cumulusci.models import Service
from rest_framework import viewsets
class OrgViewSet(viewsets.ModelViewSet):
"""
A viewset for viewing and editing Orgs
"""
serializer_class = OrgSerializer
queryset = Org.objects.all()
filter_class = OrgFilter
class ScratchOrgInstanceViewSet(viewsets.ModelViewSet):
"""
A viewset for viewing and editing ScratchOrgInstances
"""
serializer_class = ScratchOrgInstanceSerializer
queryset = ScratchOrgInstance.objects.all()
filter_class = ScratchOrgInstanceFilter
class ServiceViewSet(viewsets.ModelViewSet):
"""
A viewset for viewing and editing Services
"""
serializer_class = ServiceSerializer
queryset = Service.objects.all()
filter_class = ServiceFilter
|
from django.shortcuts import render
from mrbelvedereci.api.serializers.cumulusci import OrgSerializer
from mrbelvedereci.api.serializers.cumulusci import ScratchOrgInstanceSerializer
from mrbelvedereci.api.serializers.cumulusci import ServiceSerializer
from mrbelvedereci.cumulusci.filters import OrgFilter
from mrbelvedereci.cumulusci.filters import ScratchOrgInstanceFilter
from mrbelvedereci.cumulusci.models import Org
from mrbelvedereci.cumulusci.models import ScratchOrgInstance
from mrbelvedereci.cumulusci.models import Service
from rest_framework import viewsets
class OrgViewSet(viewsets.ModelViewSet):
"""
A viewset for viewing and editing Orgs
"""
serializer_class = OrgSerializer
queryset = Org.objects.all()
filter_class = OrgFilter
class ScratchOrgInstanceViewSet(viewsets.ModelViewSet):
"""
A viewset for viewing and editing ScratchOrgInstances
"""
serializer_class = ScratchOrgInstanceSerializer
queryset = ScratchOrgInstance.objects.all()
filter_class = ScratchOrgInstanceFilter
class ServiceViewSet(viewsets.ModelViewSet):
"""
A viewset for viewing and editing Services
"""
serializer_class = ServiceSerializer
queryset = Service.objects.all()
|
Remove ServiceFilter from view since it's not needed. Service only has name and json
|
Remove ServiceFilter from view since it's not needed. Service only has
name and json
|
Python
|
bsd-3-clause
|
SalesforceFoundation/mrbelvedereci,SalesforceFoundation/mrbelvedereci,SalesforceFoundation/mrbelvedereci,SalesforceFoundation/mrbelvedereci
|
4f0dbf920a6867d8f3e16eb420391c8bcca43c44
|
onirim/card/_door.py
|
onirim/card/_door.py
|
from onirim.card._base import ColorCard
class _Door(ColorCard):
def drawn(self, agent, content):
do_open = agent.ask("if open") if content.can_open(self) else False
if do_open:
content.discard(self)
else:
content.limbo(self)
def door(color):
return _Door(color)
|
from onirim.card._base import ColorCard
from onirim.card._location import LocationKind
def _openable(door_card, card):
"""Check if the door can be opened by another card."""
return card.kind == LocationKind.key and door_card.color == card.color
def _may_open(door_card, content):
"""Check if the door may be opened by agent."""
return any(_openable(door_card, card) for card in content.hand())
class _Door(ColorCard):
def drawn(self, agent, content):
do_open = agent.ask("if open") if _may_open(self, content) else False
if do_open:
content.discard(self)
else:
content.limbo(self)
def door(color):
"""Make a door card."""
return _Door(color)
|
Implement openable check for door card.
|
Implement openable check for door card.
|
Python
|
mit
|
cwahbong/onirim-py
|
24f0402e27ce7e51f370e82aa74c783438875d02
|
oslo_db/tests/sqlalchemy/__init__.py
|
oslo_db/tests/sqlalchemy/__init__.py
|
# Copyright (c) 2014 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_db.sqlalchemy import test_base
load_tests = test_base.optimize_db_test_loader(__file__)
|
# Copyright (c) 2014 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_db.sqlalchemy import test_fixtures
load_tests = test_fixtures.optimize_package_test_loader(__file__)
|
Remove deprecation warning when loading tests/sqlalchemy
|
Remove deprecation warning when loading tests/sqlalchemy
/home/sam/Work/ironic/.tox/py27/local/lib/python2.7/site-packages/oslo_db/tests/sqlalchemy/__init__.py:20:
DeprecationWarning: Function
'oslo_db.sqlalchemy.test_base.optimize_db_test_loader()' has moved to
'oslo_db.sqlalchemy.test_fixtures.optimize_package_test_loader()'
Change-Id: I7fb4e776cedb8adcf97c9a43210049c60f796873
|
Python
|
apache-2.0
|
openstack/oslo.db,openstack/oslo.db
|
db6cb95d5d4261780482b4051f556fcbb2d9f237
|
rest_api/forms.py
|
rest_api/forms.py
|
from django.forms import ModelForm
from rest_api.models import Url
class UrlForm(ModelForm):
class Meta:
model = Url
|
from django.forms import ModelForm
from gateway_backend.models import Url
class UrlForm(ModelForm):
class Meta:
model = Url
|
Remove Url model from admin
|
Remove Url model from admin
|
Python
|
bsd-2-clause
|
victorpoluceno/shortener_frontend,victorpoluceno/shortener_frontend
|
3410fba1c8a39156def029eac9c7ff9f779832e6
|
dev/ci.py
|
dev/ci.py
|
# coding: utf-8
from __future__ import unicode_literals, division, absolute_import, print_function
import os
import site
import sys
from . import build_root, requires_oscrypto
from ._import import _preload
deps_dir = os.path.join(build_root, 'modularcrypto-deps')
if os.path.exists(deps_dir):
site.addsitedir(deps_dir)
if sys.version_info[0:2] not in [(2, 6), (3, 2)]:
from .lint import run as run_lint
else:
run_lint = None
if sys.version_info[0:2] != (3, 2):
from .coverage import run as run_coverage
from .coverage import coverage
run_tests = None
else:
from .tests import run as run_tests
run_coverage = None
def run():
"""
Runs the linter and tests
:return:
A bool - if the linter and tests ran successfully
"""
_preload(requires_oscrypto, True)
if run_lint:
print('')
lint_result = run_lint()
else:
lint_result = True
if run_coverage:
print('\nRunning tests (via coverage.py %s)' % coverage.__version__)
sys.stdout.flush()
tests_result = run_coverage(ci=True)
else:
print('\nRunning tests')
sys.stdout.flush()
tests_result = run_tests(ci=True)
sys.stdout.flush()
return lint_result and tests_result
|
# coding: utf-8
from __future__ import unicode_literals, division, absolute_import, print_function
import os
import site
import sys
from . import build_root, requires_oscrypto
from ._import import _preload
deps_dir = os.path.join(build_root, 'modularcrypto-deps')
if os.path.exists(deps_dir):
site.addsitedir(deps_dir)
# In case any of the deps are installed system-wide
sys.path.insert(0, deps_dir)
if sys.version_info[0:2] not in [(2, 6), (3, 2)]:
from .lint import run as run_lint
else:
run_lint = None
if sys.version_info[0:2] != (3, 2):
from .coverage import run as run_coverage
from .coverage import coverage
run_tests = None
else:
from .tests import run as run_tests
run_coverage = None
def run():
"""
Runs the linter and tests
:return:
A bool - if the linter and tests ran successfully
"""
_preload(requires_oscrypto, True)
if run_lint:
print('')
lint_result = run_lint()
else:
lint_result = True
if run_coverage:
print('\nRunning tests (via coverage.py %s)' % coverage.__version__)
sys.stdout.flush()
tests_result = run_coverage(ci=True)
else:
print('\nRunning tests')
sys.stdout.flush()
tests_result = run_tests(ci=True)
sys.stdout.flush()
return lint_result and tests_result
|
Fix CI to ignore system install of asn1crypto
|
Fix CI to ignore system install of asn1crypto
|
Python
|
mit
|
wbond/oscrypto
|
502d99042428175b478e796c067e41995a0ae5bf
|
picoCTF-web/api/apps/v1/__init__.py
|
picoCTF-web/api/apps/v1/__init__.py
|
"""picoCTF API v1 app."""
from flask import Blueprint, jsonify
from flask_restplus import Api
from api.common import PicoException
from .achievements import ns as achievements_ns
from .problems import ns as problems_ns
from .shell_servers import ns as shell_servers_ns
from .exceptions import ns as exceptions_ns
from .settings import ns as settings_ns
from .bundles import ns as bundles_ns
from .submissions import ns as submissions_ns
from .feedback import ns as feedback_ns
blueprint = Blueprint('v1_api', __name__)
api = Api(
app=blueprint,
title='picoCTF API',
version='1.0',
)
api.add_namespace(achievements_ns)
api.add_namespace(problems_ns)
api.add_namespace(shell_servers_ns)
api.add_namespace(exceptions_ns)
api.add_namespace(settings_ns)
api.add_namespace(bundles_ns)
api.add_namespace(submissions_ns)
api.add_namespace(feedback_ns)
@api.errorhandler(PicoException)
def handle_pico_exception(e):
"""Handle exceptions."""
response = jsonify(e.to_dict())
response.status_code = 203
return response
|
"""picoCTF API v1 app."""
from flask import Blueprint, jsonify
from flask_restplus import Api
from api.common import PicoException
from .achievements import ns as achievements_ns
from .problems import ns as problems_ns
from .shell_servers import ns as shell_servers_ns
from .exceptions import ns as exceptions_ns
from .settings import ns as settings_ns
from .bundles import ns as bundles_ns
from .submissions import ns as submissions_ns
from .feedback import ns as feedback_ns
blueprint = Blueprint('v1_api', __name__)
api = Api(
app=blueprint,
title='picoCTF API',
version='1.0',
)
api.add_namespace(achievements_ns)
api.add_namespace(problems_ns)
api.add_namespace(shell_servers_ns)
api.add_namespace(exceptions_ns)
api.add_namespace(settings_ns)
api.add_namespace(bundles_ns)
api.add_namespace(submissions_ns)
api.add_namespace(feedback_ns)
@api.errorhandler(PicoException)
def handle_pico_exception(e):
"""Handle exceptions."""
response = jsonify(e.to_dict())
response.status_code = e.status_code
return response
|
Fix PicoException response code bug
|
Fix PicoException response code bug
|
Python
|
mit
|
royragsdale/picoCTF,picoCTF/picoCTF,royragsdale/picoCTF,picoCTF/picoCTF,picoCTF/picoCTF,royragsdale/picoCTF,royragsdale/picoCTF,royragsdale/picoCTF,picoCTF/picoCTF,royragsdale/picoCTF,picoCTF/picoCTF,picoCTF/picoCTF,royragsdale/picoCTF
|
5d71215645683a059a51407a3768054c9ea77406
|
pisite/logs/forms.py
|
pisite/logs/forms.py
|
from django import forms
from logs.models import Log
class LineCountForm(forms.Form):
linesToFetch = forms.IntegerField(label="Number of lines to show", min_value=0, initial=Log.defaultLinesToShow)
|
from django import forms
from logs.models import Log
class LineCountForm(forms.Form):
linesToFetch = forms.IntegerField(label="Number of lines to show (0 for all)", min_value=0, initial=Log.defaultLinesToShow)
|
Add to the label that 0 lines will result in the entire file being downloaded
|
Add to the label that 0 lines will result in the entire file being downloaded
|
Python
|
mit
|
sizlo/RPiFun,sizlo/RPiFun
|
94dad4c56a4b6a1968fa15c20b8482fd56774f32
|
optimize/py/main.py
|
optimize/py/main.py
|
from scipy import optimize as o
import clean as c
def minimize(func, guess):
return o.minimize(func, guess)
def minimize_scalar(func, options):
bracket = options['bracket']
bounds = options['bounds']
method = options['method']
tol = options['tol']
options = options['options']
return o.minimize_scalar(func, bracket=bracket, bounds=bounds, method=method, tol=tol, options=options)
|
from scipy import optimize as o
import numpy as np
import clean as c
def minimize_scalar(func, options):
bracket = options['bracket']
bounds = options['bounds']
method = options['method']
tol = options['tol']
options = options['options']
try:
return o.minimize_scalar(func, bracket=bracket, bounds=bounds, method=method, tol=tol, options=options)
except Exception as e:
return str(e)
def nnls(A, b):
try:
solution, residual = o.nnls(np.array(A), np.array(b))
return {
'solution': solution.tolist(),
'residual': residual
}
except Exception as e:
return str(e)
|
Add non negative least squares scipy functionality
|
Add non negative least squares scipy functionality
|
Python
|
mit
|
acjones617/scipy-node,acjones617/scipy-node
|
a389f20c7f2c8811a5c2f50c43a9ce5c7f3c8387
|
jobs_backend/vacancies/serializers.py
|
jobs_backend/vacancies/serializers.py
|
from rest_framework import serializers
from .models import Vacancy
class VacancySerializer(serializers.HyperlinkedModelSerializer):
"""
Common vacancy model serializer
"""
class Meta:
model = Vacancy
fields = (
'id', 'url', 'title', 'description', 'created_on', 'modified_on'
)
extra_kwargs = {
'url': {'view_name': 'vacancies:vacancy-detail', 'read_only': True}
}
|
from rest_framework import serializers
from .models import Vacancy
class VacancySerializer(serializers.ModelSerializer):
"""
Common vacancy model serializer
"""
class Meta:
model = Vacancy
fields = (
'id', 'url', 'title', 'description', 'created_on', 'modified_on'
)
extra_kwargs = {
'url': {'view_name': 'api:vacancies:vacancy-detail', 'read_only': True}
}
|
Fix for correct resolve URL
|
jobs-010: Fix for correct resolve URL
|
Python
|
mit
|
pyshopml/jobs-backend,pyshopml/jobs-backend
|
441a1b85f6ab954ab89f32977e4f00293270aac6
|
sphinxcontrib/multilatex/__init__.py
|
sphinxcontrib/multilatex/__init__.py
|
import directive
import builder
#===========================================================================
# Node visitor functions
def visit_passthrough(self, node):
pass
def depart_passthrough(self, node):
pass
passthrough = (visit_passthrough, depart_passthrough)
#===========================================================================
# Setup and register extension
def setup(app):
app.add_node(directive.latex_document,
html=passthrough)
app.add_directive("latex-document", directive.LatexDocumentDirective)
app.add_builder(builder.MultiLatexBuilder)
return {"version": "0.0"}
|
import directive
import builder
#===========================================================================
# Node visitor functions
def visit_passthrough(self, node):
pass
def depart_passthrough(self, node):
pass
passthrough = (visit_passthrough, depart_passthrough)
#===========================================================================
# Setup and register extension
def setup(app):
app.add_node(directive.latex_document,
latex=passthrough,
html=passthrough)
app.add_directive("latex-document", directive.LatexDocumentDirective)
app.add_builder(builder.MultiLatexBuilder)
return {"version": "0.0"}
|
Set LaTeX builder to skip latex_document nodes
|
Set LaTeX builder to skip latex_document nodes
This stops Sphinx' built-in LaTeX builder from complaining about unknown
latex_document node type.
|
Python
|
apache-2.0
|
t4ngo/sphinxcontrib-multilatex,t4ngo/sphinxcontrib-multilatex
|
5c11a65af1d51794133895ebe2de92861b0894cf
|
flask_limiter/errors.py
|
flask_limiter/errors.py
|
"""errors and exceptions."""
from distutils.version import LooseVersion
from pkg_resources import get_distribution
from six import text_type
from werkzeug import exceptions
werkzeug_exception = None
werkzeug_version = get_distribution("werkzeug").version
if LooseVersion(werkzeug_version) < LooseVersion("0.9"): # pragma: no cover
# sorry, for touching your internals :).
import werkzeug._internal
werkzeug._internal.HTTP_STATUS_CODES[429] = "Too Many Requests"
werkzeug_exception = exceptions.HTTPException
else:
# Werkzeug 0.9 and up have an existing exception for 429
werkzeug_exception = exceptions.TooManyRequests
class RateLimitExceeded(werkzeug_exception):
"""exception raised when a rate limit is hit.
The exception results in ``abort(429)`` being called.
"""
code = 429
limit = None
def __init__(self, limit):
self.limit = limit
if limit.error_message:
description = (
limit.error_message
if not callable(limit.error_message)
else limit.error_message()
)
else:
description = text_type(limit.limit)
super(RateLimitExceeded, self).__init__(description=description)
|
"""errors and exceptions."""
from distutils.version import LooseVersion
from pkg_resources import get_distribution
from six import text_type
from werkzeug import exceptions
class RateLimitExceeded(exceptions.TooManyRequests):
"""exception raised when a rate limit is hit.
The exception results in ``abort(429)`` being called.
"""
code = 429
limit = None
def __init__(self, limit):
self.limit = limit
if limit.error_message:
description = (
limit.error_message
if not callable(limit.error_message)
else limit.error_message()
)
else:
description = text_type(limit.limit)
super(RateLimitExceeded, self).__init__(description=description)
|
Remove backward compatibility hack for exception subclass
|
Remove backward compatibility hack for exception subclass
|
Python
|
mit
|
alisaifee/flask-limiter,alisaifee/flask-limiter
|
b3979a46a7bcd71aa9b40892167910fdeed5ad97
|
frigg/projects/admin.py
|
frigg/projects/admin.py
|
from django.contrib import admin
from django.template.defaultfilters import pluralize
from .forms import EnvironmentVariableForm
from .models import EnvironmentVariable, Project
class EnvironmentVariableMixin:
form = EnvironmentVariableForm
@staticmethod
def get_readonly_fields(request, obj=None):
if obj:
return 'key', 'value', 'is_secret'
class EnvironmentVariableInline(EnvironmentVariableMixin, admin.TabularInline):
model = EnvironmentVariable
extra = 0
@admin.register(Project)
class ProjectAdmin(admin.ModelAdmin):
list_display = ('__str__', 'queue_name', 'approved', 'number_of_members', 'average_time',
'last_build_number', 'can_deploy')
list_filter = ['owner', 'queue_name', 'approved', 'can_deploy']
actions = ['sync_members']
inlines = [EnvironmentVariableInline]
def sync_members(self, request, queryset):
for project in queryset:
project.update_members()
self.message_user(
request,
'{} project{} was synced'.format(len(queryset), pluralize(len(queryset)))
)
sync_members.short_description = 'Sync members of selected projects'
@admin.register(EnvironmentVariable)
class EnvironmentVariableAdmin(EnvironmentVariableMixin, admin.ModelAdmin):
list_display = (
'__str__',
'is_secret',
)
|
from django.contrib import admin
from django.template.defaultfilters import pluralize
from .forms import EnvironmentVariableForm
from .models import EnvironmentVariable, Project
class EnvironmentVariableMixin:
form = EnvironmentVariableForm
@staticmethod
def get_readonly_fields(request, obj=None):
if obj:
return 'key', 'value', 'is_secret'
return tuple()
class EnvironmentVariableInline(EnvironmentVariableMixin, admin.TabularInline):
model = EnvironmentVariable
extra = 0
@admin.register(Project)
class ProjectAdmin(admin.ModelAdmin):
list_display = ('__str__', 'queue_name', 'approved', 'number_of_members', 'average_time',
'last_build_number', 'can_deploy')
list_filter = ['owner', 'queue_name', 'approved', 'can_deploy']
actions = ['sync_members']
inlines = [EnvironmentVariableInline]
def sync_members(self, request, queryset):
for project in queryset:
project.update_members()
self.message_user(
request,
'{} project{} was synced'.format(len(queryset), pluralize(len(queryset)))
)
sync_members.short_description = 'Sync members of selected projects'
@admin.register(EnvironmentVariable)
class EnvironmentVariableAdmin(EnvironmentVariableMixin, admin.ModelAdmin):
list_display = (
'__str__',
'is_secret',
)
|
Return empty tuple in get_readonly_fields
|
fix: Return empty tuple in get_readonly_fields
|
Python
|
mit
|
frigg/frigg-hq,frigg/frigg-hq,frigg/frigg-hq
|
0d7c0b045c4a2e930fe0d7aa68b96d5a99916a34
|
scripts/document_path_handlers.py
|
scripts/document_path_handlers.py
|
#!/usr/bin/env python
from __future__ import print_function, unicode_literals
from nikola import nikola
n = nikola.Nikola()
n.init_plugins()
print(""".. title: Path Handlers for Nikola
.. slug: path-handlers
.. author: The Nikola Team
Nikola supports special links with the syntax ``link://kind/name``. Here is
the description for all the supported kinds.
""")
for k in sorted(n.path_handlers.keys()):
v = n.path_handlers[k]
print(k)
print('\n'.join(' '+l.strip() for l in v.__doc__.splitlines()))
print()
|
#!/usr/bin/env python
from __future__ import print_function, unicode_literals
from nikola import nikola
n = nikola.Nikola()
n.init_plugins()
print(""".. title: Path Handlers for Nikola
.. slug: path-handlers
.. author: The Nikola Team
Nikola supports special links with the syntax ``link://kind/name``. Here is
the description for all the supported kinds.
.. class:: dl-horizontal
""")
for k in sorted(n.path_handlers.keys()):
v = n.path_handlers[k]
print(k)
print('\n'.join(' '+l.strip() for l in v.__doc__.splitlines()))
print()
|
Make path handlers list horizontal
|
Make path handlers list horizontal
Signed-off-by: Chris Warrick <de6f931166e131a07f31c96c765aee08f061d1a5@gmail.com>
|
Python
|
mit
|
s2hc-johan/nikola,wcmckee/nikola,gwax/nikola,x1101/nikola,okin/nikola,masayuko/nikola,xuhdev/nikola,wcmckee/nikola,gwax/nikola,knowsuchagency/nikola,atiro/nikola,andredias/nikola,gwax/nikola,xuhdev/nikola,atiro/nikola,x1101/nikola,okin/nikola,knowsuchagency/nikola,wcmckee/nikola,okin/nikola,getnikola/nikola,masayuko/nikola,okin/nikola,getnikola/nikola,masayuko/nikola,andredias/nikola,atiro/nikola,xuhdev/nikola,xuhdev/nikola,s2hc-johan/nikola,getnikola/nikola,knowsuchagency/nikola,getnikola/nikola,x1101/nikola,andredias/nikola,s2hc-johan/nikola
|
c6d50c3feed444f8f450c5c140e8470c6897f2bf
|
societies/models.py
|
societies/models.py
|
# -*- coding: utf-8 -*-
from django.db import models
from django_countries.fields import CountryField
class GuitarSociety(models.Model):
"""
Represents a single guitar society.
.. versionadded:: 0.1
"""
#: the name of the society
#: ..versionadded:: 0.1
name = models.CharField(max_length=1024)
#: the society's url
#: ..versionadded:: 0.1
link = models.URLField(max_length=255)
#: The country in which the society resides
#: .. versionadded:: 0.1
country = CountryField()
#: A free form "city" or "region" field used to display where
#: exactly the society is within a country
#: .. versionadded:: 0.1
region = models.CharField(max_length=512, null=True, default=None, blank=True)
def __str__(self):
return 'GuitarSociety(name="{}", link="{}")'.format(self.name, self.link)
|
# -*- coding: utf-8 -*-
from django.db import models
from django_countries.fields import CountryField
class GuitarSociety(models.Model):
"""
Represents a single guitar society.
.. versionadded:: 0.1
"""
#: the name of the society
#: ..versionadded:: 0.1
name = models.CharField(max_length=1024)
#: the society's url
#: ..versionadded:: 0.1
link = models.URLField(max_length=255)
#: The country in which the society resides
#: .. versionadded:: 0.1
country = CountryField()
#: A free form "city" or "region" field used to display where
#: exactly the society is within a country
#: .. versionadded:: 0.1
region = models.CharField(max_length=512, null=True, default=None, blank=True)
def __str__(self):
return self.name
def __repr__(self):
return 'GuitarSociety("{}")'.format(self.name)
|
Make the Guitar Society __str__ Method a bit more Logical
|
Make the Guitar Society __str__ Method a bit more Logical
|
Python
|
bsd-3-clause
|
chrisguitarguy/GuitarSocieties.org,chrisguitarguy/GuitarSocieties.org
|
c7a209d2c4455325f1d215ca1c12074b394ae00e
|
gitdir/host/__init__.py
|
gitdir/host/__init__.py
|
import abc
import subprocess
import gitdir
class Host(abc.ABC):
@abc.abstractmethod
def __iter__(self):
raise NotImplementedError()
@abc.abstractmethod
def __str__(self):
raise NotImplementedError()
def clone(self, repo_spec):
raise NotImplementedError('Host {} does not support cloning'.format(self))
@property
def dir(self):
return gitdir.GITDIR / str(self)
def update(self):
for repo_dir in self:
subprocess.check_call(['git', 'pull'], cwd=str(repo_dir / 'master'))
def all():
for host_dir in gitdir.GITDIR.iterdir():
yield by_name(host_dir.name)
def by_name(hostname):
if hostname == 'github.com':
import gitdir.host.github
return gitdir.host.github.GitHub()
else:
raise ValueError('Unsupported hostname: {}'.format(hostname))
|
import abc
import subprocess
import gitdir
class Host(abc.ABC):
@abc.abstractmethod
def __iter__(self):
raise NotImplementedError()
@abc.abstractmethod
def __str__(self):
raise NotImplementedError()
def clone(self, repo_spec):
raise NotImplementedError('Host {} does not support cloning'.format(self))
@property
def dir(self):
return gitdir.GITDIR / str(self)
def update(self):
for repo_dir in self:
print('[ ** ] updating {}'.format(repo_dir))
subprocess.check_call(['git', 'pull'], cwd=str(repo_dir / 'master'))
def all():
for host_dir in gitdir.GITDIR.iterdir():
yield by_name(host_dir.name)
def by_name(hostname):
if hostname == 'github.com':
import gitdir.host.github
return gitdir.host.github.GitHub()
else:
raise ValueError('Unsupported hostname: {}'.format(hostname))
|
Add status messages to `gitdir update`
|
Add status messages to `gitdir update`
|
Python
|
mit
|
fenhl/gitdir
|
11278ec546cf1c84a6aefff7ed4e5a677203d008
|
index_addresses.py
|
index_addresses.py
|
import csv
import re
import os
from urlparse import urlparse
from elasticsearch import Elasticsearch
if os.environ.get('BONSAI_URL'):
url = urlparse(os.environ['BONSAI_URL'])
bonsai_tuple = url.netloc.partition('@')
ELASTICSEARCH_HOST = bonsai_tuple[2]
ELASTICSEARCH_AUTH = bonsai_tuple[0]
es = Elasticsearch([{'host': ELASTICSEARCH_HOST}], http_auth=ELASTICSEARCH_AUTH)
else:
es = Elasticsearch()
with open('data/ParcelCentroids.csv', 'r') as csvfile:
print "open file"
csv_reader = csv.DictReader(csvfile, fieldnames=[], restkey='undefined-fieldnames', delimiter=',')
current_row = 0
for row in csv_reader:
current_row += 1
if current_row == 1:
csv_reader.fieldnames = row['undefined-fieldnames']
continue
address = row
if re.match('\d+', address['PVANUM']):
es.index(index='addresses', doc_type='address', id=address['PVANUM'], body={'PVANUM': address['PVANUM'], 'NUM1': address['NUM1'], 'NAME': address['NAME'], 'TYPE': address['TYPE'], 'ADDRESS': address['ADDRESS'], 'UNIT': address['UNIT'], 'X': address['X'], 'Y': address['Y']})
csvfile.close()
|
import sys
import csv
import re
import os
from urlparse import urlparse
from elasticsearch import Elasticsearch
if os.environ.get('BONSAI_URL'):
url = urlparse(os.environ['BONSAI_URL'])
bonsai_tuple = url.netloc.partition('@')
ELASTICSEARCH_HOST = bonsai_tuple[2]
ELASTICSEARCH_AUTH = bonsai_tuple[0]
es = Elasticsearch([{'host': ELASTICSEARCH_HOST}], http_auth=ELASTICSEARCH_AUTH)
else:
es = Elasticsearch()
files_given = sys.argv
for file_name in files_given:
if file_name = 'index_addresses.py':
continue
else:
file_path = file_name
print 'adding ' + file_path
with open(file_path, 'r') as csvfile:
print "open file"
csv_reader = csv.DictReader(csvfile, fieldnames=[], restkey='undefined-fieldnames', delimiter=',')
current_row = 0
for row in csv_reader:
current_row += 1
if current_row == 1:
csv_reader.fieldnames = row['undefined-fieldnames']
continue
address = row
es.index(index='addresses', doc_type='address', id=current_row-1, body={'NUMBER': address[' NUMBER'], 'STREET': address[' STREET'], 'ADDRESS': address[' NUMBER'] + ' ' + address[' STREET'], 'X': address['LON'], 'Y': address[' LAT']})
csvfile.close()
|
Change index to OpenAddresses schema
|
Change index to OpenAddresses schema
|
Python
|
mit
|
codeforamerica/streetscope,codeforamerica/streetscope
|
932ee2737b822742996f234c90b715771fb876bf
|
tests/functional/api/view_pdf_test.py
|
tests/functional/api/view_pdf_test.py
|
import pytest
from tests.conftest import assert_cache_control
class TestViewPDFAPI:
def test_caching_is_disabled(self, test_app):
response = test_app.get("/pdf?url=http://example.com/foo.pdf")
assert_cache_control(
response.headers, ["max-age=0", "must-revalidate", "no-cache", "no-store"]
)
|
from tests.conftest import assert_cache_control
class TestViewPDFAPI:
def test_caching_is_disabled(self, test_app):
response = test_app.get("/pdf?url=http://example.com/foo.pdf")
assert_cache_control(
response.headers, ["max-age=0", "must-revalidate", "no-cache", "no-store"]
)
|
Fix lint errors after adding missing __init__ files
|
Fix lint errors after adding missing __init__ files
|
Python
|
bsd-2-clause
|
hypothesis/via,hypothesis/via,hypothesis/via
|
50f2cd076aae183376ab14d31594c104ac210738
|
shivyc.py
|
shivyc.py
|
#!/usr/bin/env python3
"""Main executable for ShivyC compiler
For usage, run "./shivyc.py --help".
"""
import argparse
def get_arguments():
"""Set up the argument parser and return an object storing the
argument values.
return - An object storing argument values, as returned by
argparse.parse_args()
"""
parser = argparse.ArgumentParser(description="Compile C files.")
# The C file to compile
parser.add_argument("file_name")
return parser.parse_args()
def main():
"""Run the compiler
"""
arguments = get_arguments()
print(arguments)
if __name__ == "__main__":
main()
|
#!/usr/bin/env python3
"""Main executable for ShivyC compiler
For usage, run "./shivyc.py --help".
"""
import argparse
def get_arguments():
"""Set up the argument parser and return an object storing the
argument values.
return - An object storing argument values, as returned by
argparse.parse_args()
"""
parser = argparse.ArgumentParser(description="Compile C files.")
# The file name of the C file to compile. The file name gets saved to the
# file_name attribute of the returned object, but this parameter appears as
# "filename" (no underscore) on the command line.
parser.add_argument("file_name", metavar="filename")
return parser.parse_args()
def main():
"""Run the compiler
"""
arguments = get_arguments()
print(arguments)
if __name__ == "__main__":
main()
|
Rename file_name argument on command line
|
Rename file_name argument on command line
|
Python
|
mit
|
ShivamSarodia/ShivyC,ShivamSarodia/ShivyC,ShivamSarodia/ShivyC
|
d7149d8ea09c897fb954652beeef3bf008448d9e
|
mopidy/__init__.py
|
mopidy/__init__.py
|
import sys
if not (2, 6) <= sys.version_info < (3,):
sys.exit(u'Mopidy requires Python >= 2.6, < 3')
from subprocess import PIPE, Popen
VERSION = (0, 4, 0)
def get_git_version():
process = Popen(['git', 'describe'], stdout=PIPE, stderr=PIPE)
if process.wait() != 0:
raise Exception('Execution of "git describe" failed')
version = process.stdout.read().strip()
if version.startswith('v'):
version = version[1:]
return version
def get_plain_version():
return '.'.join(map(str, VERSION))
def get_version():
try:
return get_git_version()
except Exception:
return get_plain_version()
class MopidyException(Exception):
def __init__(self, message, *args, **kwargs):
super(MopidyException, self).__init__(message, *args, **kwargs)
self._message = message
@property
def message(self):
"""Reimplement message field that was deprecated in Python 2.6"""
return self._message
@message.setter
def message(self, message):
self._message = message
class SettingsError(MopidyException):
pass
class OptionalDependencyError(MopidyException):
pass
from mopidy import settings as default_settings_module
from mopidy.utils.settings import SettingsProxy
settings = SettingsProxy(default_settings_module)
|
import sys
if not (2, 6) <= sys.version_info < (3,):
sys.exit(u'Mopidy requires Python >= 2.6, < 3')
from subprocess import PIPE, Popen
VERSION = (0, 4, 0)
def get_git_version():
process = Popen(['git', 'describe'], stdout=PIPE, stderr=PIPE)
if process.wait() != 0:
raise EnvironmentError('Execution of "git describe" failed')
version = process.stdout.read().strip()
if version.startswith('v'):
version = version[1:]
return version
def get_plain_version():
return '.'.join(map(str, VERSION))
def get_version():
try:
return get_git_version()
except EnvironmentError:
return get_plain_version()
class MopidyException(Exception):
def __init__(self, message, *args, **kwargs):
super(MopidyException, self).__init__(message, *args, **kwargs)
self._message = message
@property
def message(self):
"""Reimplement message field that was deprecated in Python 2.6"""
return self._message
@message.setter
def message(self, message):
self._message = message
class SettingsError(MopidyException):
pass
class OptionalDependencyError(MopidyException):
pass
from mopidy import settings as default_settings_module
from mopidy.utils.settings import SettingsProxy
settings = SettingsProxy(default_settings_module)
|
Raise EnvironmentError instead of Exception to make pylint happy
|
Raise EnvironmentError instead of Exception to make pylint happy
|
Python
|
apache-2.0
|
pacificIT/mopidy,swak/mopidy,jodal/mopidy,vrs01/mopidy,swak/mopidy,woutervanwijk/mopidy,tkem/mopidy,rawdlite/mopidy,jodal/mopidy,mokieyue/mopidy,rawdlite/mopidy,jmarsik/mopidy,bacontext/mopidy,mokieyue/mopidy,quartz55/mopidy,ZenithDK/mopidy,dbrgn/mopidy,priestd09/mopidy,mopidy/mopidy,quartz55/mopidy,glogiotatidis/mopidy,vrs01/mopidy,hkariti/mopidy,bencevans/mopidy,ZenithDK/mopidy,dbrgn/mopidy,jmarsik/mopidy,mokieyue/mopidy,tkem/mopidy,bencevans/mopidy,pacificIT/mopidy,dbrgn/mopidy,priestd09/mopidy,SuperStarPL/mopidy,hkariti/mopidy,bacontext/mopidy,kingosticks/mopidy,mokieyue/mopidy,bencevans/mopidy,bencevans/mopidy,liamw9534/mopidy,hkariti/mopidy,hkariti/mopidy,jcass77/mopidy,liamw9534/mopidy,kingosticks/mopidy,mopidy/mopidy,adamcik/mopidy,swak/mopidy,tkem/mopidy,ZenithDK/mopidy,ali/mopidy,jcass77/mopidy,diandiankan/mopidy,vrs01/mopidy,jcass77/mopidy,kingosticks/mopidy,jodal/mopidy,jmarsik/mopidy,glogiotatidis/mopidy,diandiankan/mopidy,mopidy/mopidy,SuperStarPL/mopidy,rawdlite/mopidy,glogiotatidis/mopidy,pacificIT/mopidy,pacificIT/mopidy,dbrgn/mopidy,bacontext/mopidy,quartz55/mopidy,jmarsik/mopidy,abarisain/mopidy,diandiankan/mopidy,adamcik/mopidy,adamcik/mopidy,abarisain/mopidy,diandiankan/mopidy,vrs01/mopidy,ali/mopidy,woutervanwijk/mopidy,ali/mopidy,tkem/mopidy,ali/mopidy,glogiotatidis/mopidy,swak/mopidy,SuperStarPL/mopidy,bacontext/mopidy,ZenithDK/mopidy,priestd09/mopidy,SuperStarPL/mopidy,quartz55/mopidy,rawdlite/mopidy
|
66a9d140feb3a0bd332031853fb1038622fd5c5b
|
oidc_apis/utils.py
|
oidc_apis/utils.py
|
from collections import OrderedDict
def combine_uniquely(iterable1, iterable2):
"""
Combine unique items of two sequences preserving order.
:type seq1: Iterable[Any]
:type seq2: Iterable[Any]
:rtype: list[Any]
"""
result = OrderedDict.fromkeys(iterable1)
for item in iterable2:
result[item] = None
return list(result.keys())
def after_userlogin_hook(request, user, client):
"""Marks Django session modified
The purpose of this function is to keep the session used by the
oidc-provider fresh. This is achieved by pointing
'OIDC_AFTER_USERLOGIN_HOOK' setting to this."""
request.session.modified = True
# Return None to continue the login flow
return None
|
from collections import OrderedDict
import django
from oidc_provider import settings
from django.contrib.auth import BACKEND_SESSION_KEY
from django.contrib.auth import logout as django_user_logout
from users.models import LoginMethod, OidcClientOptions
from django.contrib.auth.views import redirect_to_login
def combine_uniquely(iterable1, iterable2):
"""
Combine unique items of two sequences preserving order.
:type seq1: Iterable[Any]
:type seq2: Iterable[Any]
:rtype: list[Any]
"""
result = OrderedDict.fromkeys(iterable1)
for item in iterable2:
result[item] = None
return list(result.keys())
def after_userlogin_hook(request, user, client):
"""Marks Django session modified
The purpose of this function is to keep the session used by the
oidc-provider fresh. This is achieved by pointing
'OIDC_AFTER_USERLOGIN_HOOK' setting to this."""
request.session.modified = True
last_login_backend = request.session.get('social_auth_last_login_backend')
client_options = OidcClientOptions.objects.get(oidc_client=client)
allowed_methods = client_options.login_methods.all()
if allowed_methods is None:
raise django.core.exceptions.PermissionDenied
allowed_providers = set((x.provider_id for x in allowed_methods))
if last_login_backend is not None:
active_backend = user.social_auth.filter(provider=last_login_backend)
if ((last_login_backend is None and user is not None)
or (active_backend.exists() and active_backend.first().provider not in allowed_providers)):
django_user_logout(request)
next_page = request.get_full_path()
return redirect_to_login(next_page, settings.get('OIDC_LOGIN_URL'))
# Return None to continue the login flow
return None
|
Implement current session auth method check
|
Implement current session auth method check
|
Python
|
mit
|
mikkokeskinen/tunnistamo,mikkokeskinen/tunnistamo
|
23ca8b449a075b4d8ebee19e7756e39f327e9988
|
dwitter/user/urls.py
|
dwitter/user/urls.py
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^(?P<url_username>\w+)$',
views.user_feed, {'page_nr': '1', 'sort': 'new'}, name='user_feed'),
url(r'^(?P<url_username>\w+)/(?P<sort>hot|new|top)$',
views.user_feed, {'page_nr': '1'}, name='user_sort_feed'),
url(r'^(?P<url_username>\w+)/(?P<sort>hot|new|top)/(?P<page_nr>\d+)$',
views.user_feed, name='user_feed_page'),
]
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^(?P<url_username>[\w.@+-]+)$',
views.user_feed, {'page_nr': '1', 'sort': 'new'}, name='user_feed'),
url(r'^(?P<url_username>[\w.@+-]+)/(?P<sort>hot|new|top)$',
views.user_feed, {'page_nr': '1'}, name='user_sort_feed'),
url(r'^(?P<url_username>[\w.@+-]+)/(?P<sort>hot|new|top)/(?P<page_nr>\d+)$',
views.user_feed, name='user_feed_page'),
]
|
Fix url lookup error for usernames certain special characters
|
Fix url lookup error for usernames certain special characters
|
Python
|
apache-2.0
|
lionleaf/dwitter,lionleaf/dwitter,lionleaf/dwitter
|
bca736ac15b06263c88d0265339b93b8c2b20d79
|
test/settings/gyptest-settings.py
|
test/settings/gyptest-settings.py
|
#!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Smoke-tests 'settings' blocks.
"""
import TestGyp
test = TestGyp.TestGyp()
test.run_gyp('settings.gyp')
test.build('test.gyp', test.ALL)
test.pass_test()
|
#!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Smoke-tests 'settings' blocks.
"""
import TestGyp
# 'settings' is only supported for make and scons (and will be removed there as
# well eventually).
test = TestGyp.TestGyp(formats=['make', 'scons'])
test.run_gyp('settings.gyp')
test.build('test.gyp', test.ALL)
test.pass_test()
|
Make new settings test not run for xcode generator.
|
Make new settings test not run for xcode generator.
TBR=evan
Review URL: http://codereview.chromium.org/7472006
|
Python
|
bsd-3-clause
|
witwall/gyp,witwall/gyp,witwall/gyp,witwall/gyp,witwall/gyp
|
9ec80ed117ca393a63bf7eb739b4702bfbc0884e
|
tartpy/eventloop.py
|
tartpy/eventloop.py
|
"""
Very basic implementation of an event loop
==========================================
The eventloop is a singleton to schedule and run events.
Exports
-------
- ``EventLoop``: the basic eventloop
"""
import queue
import sched
import threading
import time
from .singleton import Singleton
class EventLoop(object, metaclass=Singleton):
"""A generic event loop object."""
def __init__(self):
self.scheduler = sched.scheduler()
def schedule(self, event):
"""Schedule an event.
An `event` is a thunk.
"""
self.scheduler.enter(0, 1, event)
def stop(self):
"""Stop the loop."""
pass
def run(self, block=False):
self.scheduler.run(blocking=block)
def run_forever(self, wait=0.05):
while True:
self.run()
time.sleep(wait)
def run_in_thread(self):
self.thread = threading.Thread(target=self.run_forever,
name='event_loop')
self.thread.daemon = True
self.thread.start()
|
"""
Very basic implementation of an event loop
==========================================
The eventloop is a singleton to schedule and run events.
Exports
-------
- ``EventLoop``: the basic eventloop
"""
import queue
import sched
import threading
import time
from .singleton import Singleton
class EventLoop(object, metaclass=Singleton):
"""A generic event loop object."""
def __init__(self):
self.scheduler = sched.scheduler()
def schedule(self, event):
"""Schedule an event.
An `event` is a thunk.
"""
self.scheduler.enter(0, 1, event)
def later(self, delay, event):
self.scheduler.enter(delay, 1, event)
def stop(self):
"""Stop the loop."""
pass
def run(self, block=False):
self.scheduler.run(blocking=block)
def run_forever(self, wait=0.05):
while True:
self.run()
time.sleep(wait)
def run_in_thread(self):
self.thread = threading.Thread(target=self.run_forever,
name='event_loop')
self.thread.daemon = True
self.thread.start()
|
Add function to schedule later
|
Add function to schedule later
|
Python
|
mit
|
waltermoreira/tartpy
|
b552d550ca7e4468d95da9a3005e07cbd2ab49d6
|
tests/test_stock.py
|
tests/test_stock.py
|
import cutplanner
import unittest
class TestStock(unittest.TestCase):
def setUp(self):
self.stock = cutplanner.Stock(120)
def test_cut(self):
self.stock.assign_cut(20)
self.assertEqual(self.stock.remaining_length, 100)
if __name__ == '__main__':
unittest.main()
|
import cutplanner
import unittest
class TestStock(unittest.TestCase):
def setUp(self):
self.stock = cutplanner.Stock(120)
self.piece = cutplanner.Piece(1, 20)
def test_cut(self):
self.stock.cut(self.piece)
self.assertEqual(self.stock.remaining_length, 100)
def test_used_length(self):
self.assertEqual(self.stock.used_length, 0)
self.stock.cut(self.piece)
self.assertEqual(self.stock.used_length, self.piece.length)
def test_shrink(self):
self.stock.cut(self.piece)
print "{0} remaining, {1} used".format(self.stock.remaining_length, self.stock.used_length)
new_len = self.stock.shrink(80)
self.assertEqual(new_len, 80)
if __name__ == '__main__':
unittest.main()
|
Add some initial tests for Stock.
|
Add some initial tests for Stock.
|
Python
|
mit
|
alanc10n/py-cutplanner
|
54eb7862d6b17f4e86a380004f6e682452fbebce
|
git_gutter_change.py
|
git_gutter_change.py
|
import sublime
import sublime_plugin
try:
from GitGutter.view_collection import ViewCollection
except ImportError:
from view_collection import ViewCollection
class GitGutterBaseChangeCommand(sublime_plugin.WindowCommand):
def run(self):
view = self.window.active_view()
inserted, modified, deleted = ViewCollection.diff(view)
all_changes = sorted(inserted + modified + deleted)
row, col = view.rowcol(view.sel()[0].begin())
current_row = row + 1
line = self.jump(all_changes, current_row)
self.window.active_view().run_command("goto_line", {"line": line})
class GitGutterNextChangeCommand(GitGutterBaseChangeCommand):
def jump(self, all_changes, current_row):
return next((change for change in all_changes
if change > current_row), current_row)
class GitGutterPrevChangeCommand(GitGutterBaseChangeCommand):
def jump(self, all_changes, current_row):
return next((change for change in reversed(all_changes)
if change < current_row), current_row)
|
import sublime
import sublime_plugin
try:
from GitGutter.view_collection import ViewCollection
except ImportError:
from view_collection import ViewCollection
class GitGutterBaseChangeCommand(sublime_plugin.WindowCommand):
def lines_to_blocks(self, lines):
blocks = []
last_line = -2
for line in lines:
if line > last_line+1:
blocks.append(line)
last_line = line
return blocks
def run(self):
view = self.window.active_view()
inserted, modified, deleted = ViewCollection.diff(view)
inserted = self.lines_to_blocks(inserted)
modified = self.lines_to_blocks(modified)
all_changes = sorted(inserted + modified + deleted)
row, col = view.rowcol(view.sel()[0].begin())
current_row = row + 1
line = self.jump(all_changes, current_row)
self.window.active_view().run_command("goto_line", {"line": line})
class GitGutterNextChangeCommand(GitGutterBaseChangeCommand):
def jump(self, all_changes, current_row):
return next((change for change in all_changes
if change > current_row), current_row)
class GitGutterPrevChangeCommand(GitGutterBaseChangeCommand):
def jump(self, all_changes, current_row):
return next((change for change in reversed(all_changes)
if change < current_row), current_row)
|
Make lines jumps only jump to blocks over changes
|
Make lines jumps only jump to blocks over changes
Instead of every line in a block of modifications which is tedious
|
Python
|
mit
|
tushortz/GitGutter,biodamasceno/GitGutter,tushortz/GitGutter,akpersad/GitGutter,michaelhogg/GitGutter,natecavanaugh/GitGutter,natecavanaugh/GitGutter,tushortz/GitGutter,michaelhogg/GitGutter,natecavanaugh/GitGutter,biodamasceno/GitGutter,akpersad/GitGutter,akpersad/GitGutter,robfrawley/sublime-git-gutter,natecavanaugh/GitGutter,michaelhogg/GitGutter,robfrawley/sublime-git-gutter,michaelhogg/GitGutter,jisaacks/GitGutter,tushortz/GitGutter,robfrawley/sublime-git-gutter,robfrawley/sublime-git-gutter,biodamasceno/GitGutter,biodamasceno/GitGutter,akpersad/GitGutter
|
a36fe5002bbf5dfcf27a3251cfed85c341e2156d
|
cbcollections.py
|
cbcollections.py
|
class defaultdict(dict):
"""Poor man's implementation of defaultdict for Python 2.4
"""
def __init__(self, default_factory=None, **kwargs):
self.default_factory = default_factory
super(defaultdict, self).__init__(**kwargs)
def __getitem__(self, key):
if self.default_factory is None:
return super(defaultdict, self).__getitem__(key)
else:
try:
return super(defaultdict, self).__getitem__(key)
except KeyError:
return self.default_factory()
|
class defaultdict(dict):
"""Poor man's implementation of defaultdict for Python 2.4
"""
def __init__(self, default_factory=None, **kwargs):
self.default_factory = default_factory
super(defaultdict, self).__init__(**kwargs)
def __getitem__(self, key):
if self.default_factory is None:
return super(defaultdict, self).__getitem__(key)
else:
try:
return super(defaultdict, self).__getitem__(key)
except KeyError:
self[key] = self.default_factory()
return self[key]
|
Save generated value for defaultdict
|
MB-6867: Save generated value for defaultdict
Instead of just returning value, keep it in dict.
Change-Id: I2a9862503b71f2234a4a450c48998b5f53a951bc
Reviewed-on: http://review.couchbase.org/21602
Tested-by: Bin Cui <ed18693fff32c00e22495f4877a3b901bed09041@gmail.com>
Reviewed-by: Pavel Paulau <dd88eded64e90046a680e3a6c0828ceb8fe8a0e7@gmail.com>
|
Python
|
apache-2.0
|
couchbase/couchbase-cli,couchbaselabs/couchbase-cli,membase/membase-cli,membase/membase-cli,couchbase/couchbase-cli,membase/membase-cli,couchbaselabs/couchbase-cli,couchbaselabs/couchbase-cli
|
b27a51f19ea3f9d13672a0db51f7d2b05f9539f0
|
kitten/validation.py
|
kitten/validation.py
|
import jsonschema
CORE_SCHEMA = {
'type': 'object',
'properties': {
'paradigm': {
'type': 'string',
},
'method': {
'type': 'string',
},
},
'additionalProperties': False,
}
VALIDATORS = {
'core': CORE_SCHEMA
}
def validate(request, schema_name):
jsonschema.validate(request, VALIDATORS[schema_name])
|
import jsonschema
CORE_SCHEMA = {
'type': 'object',
'properties': {
'paradigm': {
'type': 'string',
},
'method': {
'type': 'string',
},
'address': {
'type': 'string',
},
},
'additionalProperties': False,
}
VALIDATORS = {
'core': CORE_SCHEMA
}
def validate(request, schema_name):
jsonschema.validate(request, VALIDATORS[schema_name])
|
Add 'address' field to core schema
|
Add 'address' field to core schema
|
Python
|
mit
|
thiderman/network-kitten
|
fb0b956563efbcd22af8300fd4341e3cb277b80a
|
app/models/user.py
|
app/models/user.py
|
from app import db
from flask import Flask
from datetime import datetime
class User(db.Model):
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(80), unique=True)
email = db.Column(db.String(120), unique=True)
name = db.Column(db.String(80))
bio = db.Column(db.String(180))
github_id = db.Column(db.Integer, unique=True)
github_username = db.Column(db.String(64), unique=True)
github_token = db.Column(db.String(300), unique=True)
password = db.Column(db.String(300))
created_at = db.Column(db.DateTime)
def __init__(self, username, email, password, name=None):
self.email = email
self.username = username
self.password = password
if name is None:
self.name = username
else:
self.name = name
self.created_at = datetime.now()
is_authenticated = True
is_anonymous = False
is_active = True
def get_id(self):
return unicode(self.id)
def __repr__(self):
return '<User %r>' % self.username
|
from app import db
from flask import Flask
from datetime import datetime
class User(db.Model):
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(80), unique=True)
email = db.Column(db.String(120), unique=True)
name = db.Column(db.String(80))
bio = db.Column(db.String(180))
avatar_url = db.Column(db.String(256))
owner = db.Column(db.String(32), nullable=False, default='user')
github_id = db.Column(db.Integer, unique=True)
github_username = db.Column(db.String(64), unique=True)
github_token = db.Column(db.String(300), unique=True)
password = db.Column(db.String(300))
created_at = db.Column(db.DateTime)
def __init__(self, username, email, password, name=None):
self.email = email
self.username = username
self.password = password
if name is None:
self.name = username
else:
self.name = name
self.created_at = datetime.now()
is_authenticated = True
is_anonymous = False
is_active = True
def get_id(self):
return unicode(self.id)
def __repr__(self):
return '<User %r>' % self.username
|
Add avatar_url and owner field for User
|
Add avatar_url and owner field for User
|
Python
|
agpl-3.0
|
lc-soft/GitDigger,lc-soft/GitDigger,lc-soft/GitDigger,lc-soft/GitDigger
|
f42e62005ea4cc3e71cf10dda8c0bace029014c5
|
kubespawner/utils.py
|
kubespawner/utils.py
|
"""
Misc. general utility functions, not tied to Kubespawner directly
"""
from concurrent.futures import ThreadPoolExecutor
from jupyterhub.utils import DT_MIN, DT_MAX, DT_SCALE
from tornado import gen, ioloop
from traitlets.config import SingletonConfigurable
class SingletonExecutor(SingletonConfigurable, ThreadPoolExecutor):
"""
Simple wrapper to ThreadPoolExecutor that is also a singleton.
We want one ThreadPool that is used by all the spawners, rather
than one ThreadPool per spawner!
"""
pass
@gen.coroutine
def exponential_backoff(func, fail_message, timeout=10, *args, **kwargs):
loop = ioloop.IOLoop.current()
tic = loop.time()
dt = DT_MIN
while dt > 0:
if func(*args, **kwargs):
return
else:
yield gen.sleep(dt)
dt = min(dt * DT_SCALE, DT_MAX, timeout - (loop.time() - tic))
raise TimeoutError(fail_message)
|
"""
Misc. general utility functions, not tied to Kubespawner directly
"""
from concurrent.futures import ThreadPoolExecutor
import random
from jupyterhub.utils import DT_MIN, DT_MAX, DT_SCALE
from tornado import gen, ioloop
from traitlets.config import SingletonConfigurable
class SingletonExecutor(SingletonConfigurable, ThreadPoolExecutor):
"""
Simple wrapper to ThreadPoolExecutor that is also a singleton.
We want one ThreadPool that is used by all the spawners, rather
than one ThreadPool per spawner!
"""
pass
@gen.coroutine
def exponential_backoff(func, fail_message, timeout=10, *args, **kwargs):
loop = ioloop.IOLoop.current()
start_tic = loop.time()
dt = DT_MIN
while True:
if (loop.time() - start_tic) > timeout:
# We time out!
break
if func(*args, **kwargs):
return
else:
yield gen.sleep(dt)
# Add some random jitter to improve performance
# This makes sure that we don't overload any single iteration
# of the tornado loop with too many things
# See https://www.awsarchitectureblog.com/2015/03/backoff.html
# for a good example of why and how this helps
dt = min(DT_MAX, (1 + random.random()) * (dt * DT_SCALE))
raise TimeoutError(fail_message)
|
Add random jitter to the exponential backoff function
|
Add random jitter to the exponential backoff function
|
Python
|
bsd-3-clause
|
yuvipanda/jupyterhub-kubernetes-spawner,jupyterhub/kubespawner
|
9f6d4d9e82ef575164535a8fb9ea80417458dd6b
|
website/files/models/dataverse.py
|
website/files/models/dataverse.py
|
import requests
from framework.auth.core import _get_current_user
from website.files.models.base import File, Folder, FileNode, FileVersion
__all__ = ('DataverseFile', 'DataverseFolder', 'DataverseFileNode')
class DataverseFileNode(FileNode):
provider = 'dataverse'
class DataverseFolder(DataverseFileNode, Folder):
pass
class DataverseFile(DataverseFileNode, File):
def touch(self, version=None, revision=None, **kwargs):
"""Note: Dataverse only has psuedo versions, don't save them"""
version = revision or version # Use revision or version
resp = requests.get(self.generate_waterbutler_url(meta=True, version=version, **kwargs))
if resp.status_code != 200:
return None
data = resp.json()
self.name = data['data']['name']
self.materialized_path = data['data']['materialized']
version = FileVersion(identifier=version)
version.update_metadata(data['data'], save=False)
user = _get_current_user()
if not user or not self.node.can_edit(user=user):
try:
# Users without edit permission can only see published files
if not data['data']['extra']['hasPublishedVersion']:
# Blank out name and path for the render
# Dont save because there's no reason to persist the change
self.name = ''
self.materialized_path = ''
return (version, '<div class="alert alert-info" role="alert">This file does not exist.</div>')
except (KeyError, IndexError):
pass
|
from framework.auth.core import _get_current_user
from website.files.models.base import File, Folder, FileNode, FileVersion
__all__ = ('DataverseFile', 'DataverseFolder', 'DataverseFileNode')
class DataverseFileNode(FileNode):
provider = 'dataverse'
class DataverseFolder(DataverseFileNode, Folder):
pass
class DataverseFile(DataverseFileNode, File):
version_identifier = 'version'
def update(self, revision, data):
"""Note: Dataverse only has psuedo versions, don't save them"""
self.name = data['name']
self.materialized_path = data['materialized']
version = FileVersion(identifier=revision)
version.update_metadata(data, save=False)
user = _get_current_user()
if not user or not self.node.can_edit(user=user):
try:
# Users without edit permission can only see published files
if not data['extra']['hasPublishedVersion']:
# Blank out name and path for the render
# Dont save because there's no reason to persist the change
self.name = ''
self.materialized_path = ''
return (version, '<div class="alert alert-info" role="alert">This file does not exist.</div>')
except (KeyError, IndexError):
pass
return version
|
Move override logic into update rather than touch
|
Move override logic into update rather than touch
|
Python
|
apache-2.0
|
Johnetordoff/osf.io,mluke93/osf.io,SSJohns/osf.io,chrisseto/osf.io,hmoco/osf.io,caseyrygt/osf.io,GageGaskins/osf.io,acshi/osf.io,alexschiller/osf.io,caseyrollins/osf.io,ZobairAlijan/osf.io,wearpants/osf.io,GageGaskins/osf.io,brandonPurvis/osf.io,CenterForOpenScience/osf.io,SSJohns/osf.io,alexschiller/osf.io,adlius/osf.io,samchrisinger/osf.io,sloria/osf.io,hmoco/osf.io,erinspace/osf.io,DanielSBrown/osf.io,samanehsan/osf.io,haoyuchen1992/osf.io,Ghalko/osf.io,crcresearch/osf.io,brandonPurvis/osf.io,CenterForOpenScience/osf.io,danielneis/osf.io,doublebits/osf.io,zamattiac/osf.io,baylee-d/osf.io,Johnetordoff/osf.io,KAsante95/osf.io,wearpants/osf.io,caneruguz/osf.io,petermalcolm/osf.io,pattisdr/osf.io,mattclark/osf.io,brianjgeiger/osf.io,cwisecarver/osf.io,samchrisinger/osf.io,billyhunt/osf.io,asanfilippo7/osf.io,chrisseto/osf.io,Johnetordoff/osf.io,caneruguz/osf.io,rdhyee/osf.io,abought/osf.io,RomanZWang/osf.io,mluke93/osf.io,monikagrabowska/osf.io,TomHeatwole/osf.io,mfraezz/osf.io,cosenal/osf.io,saradbowman/osf.io,felliott/osf.io,aaxelb/osf.io,jnayak1/osf.io,cwisecarver/osf.io,danielneis/osf.io,kch8qx/osf.io,GageGaskins/osf.io,aaxelb/osf.io,aaxelb/osf.io,TomHeatwole/osf.io,crcresearch/osf.io,ZobairAlijan/osf.io,zamattiac/osf.io,caneruguz/osf.io,monikagrabowska/osf.io,brianjgeiger/osf.io,zachjanicki/osf.io,rdhyee/osf.io,samchrisinger/osf.io,kch8qx/osf.io,Johnetordoff/osf.io,chennan47/osf.io,TomBaxter/osf.io,jnayak1/osf.io,felliott/osf.io,binoculars/osf.io,acshi/osf.io,Nesiehr/osf.io,kch8qx/osf.io,Nesiehr/osf.io,Ghalko/osf.io,mfraezz/osf.io,GageGaskins/osf.io,petermalcolm/osf.io,arpitar/osf.io,samanehsan/osf.io,abought/osf.io,laurenrevere/osf.io,emetsger/osf.io,petermalcolm/osf.io,mattclark/osf.io,RomanZWang/osf.io,jnayak1/osf.io,kwierman/osf.io,leb2dg/osf.io,doublebits/osf.io,cslzchen/osf.io,zachjanicki/osf.io,njantrania/osf.io,chennan47/osf.io,kwierman/osf.io,brianjgeiger/osf.io,hmoco/osf.io,GageGaskins/osf.io,zamattiac/osf.io,emetsger/osf.io,caseyrollins/osf.io,binoculars/osf.io,Nesiehr/osf.io,monikagrabowska/osf.io,cslzchen/osf.io,brandonPurvis/osf.io,acshi/osf.io,rdhyee/osf.io,petermalcolm/osf.io,cosenal/osf.io,asanfilippo7/osf.io,adlius/osf.io,mluke93/osf.io,adlius/osf.io,mluo613/osf.io,kch8qx/osf.io,Ghalko/osf.io,haoyuchen1992/osf.io,HalcyonChimera/osf.io,abought/osf.io,pattisdr/osf.io,haoyuchen1992/osf.io,brandonPurvis/osf.io,mluo613/osf.io,saradbowman/osf.io,mattclark/osf.io,jnayak1/osf.io,felliott/osf.io,arpitar/osf.io,caseyrollins/osf.io,amyshi188/osf.io,acshi/osf.io,asanfilippo7/osf.io,emetsger/osf.io,RomanZWang/osf.io,njantrania/osf.io,sloria/osf.io,SSJohns/osf.io,cwisecarver/osf.io,leb2dg/osf.io,leb2dg/osf.io,felliott/osf.io,CenterForOpenScience/osf.io,TomBaxter/osf.io,billyhunt/osf.io,adlius/osf.io,ticklemepierce/osf.io,chrisseto/osf.io,haoyuchen1992/osf.io,erinspace/osf.io,Ghalko/osf.io,acshi/osf.io,doublebits/osf.io,wearpants/osf.io,alexschiller/osf.io,icereval/osf.io,amyshi188/osf.io,doublebits/osf.io,KAsante95/osf.io,alexschiller/osf.io,wearpants/osf.io,samchrisinger/osf.io,njantrania/osf.io,njantrania/osf.io,cwisecarver/osf.io,KAsante95/osf.io,zamattiac/osf.io,kch8qx/osf.io,TomBaxter/osf.io,ZobairAlijan/osf.io,HalcyonChimera/osf.io,KAsante95/osf.io,DanielSBrown/osf.io,aaxelb/osf.io,rdhyee/osf.io,mluke93/osf.io,mfraezz/osf.io,zachjanicki/osf.io,cslzchen/osf.io,hmoco/osf.io,cosenal/osf.io,zachjanicki/osf.io,chennan47/osf.io,doublebits/osf.io,caneruguz/osf.io,monikagrabowska/osf.io,billyhunt/osf.io,KAsante95/osf.io,brandonPurvis/osf.io,brianjgeiger/osf.io,samanehsan/osf.io,binoculars/osf.io,monikagrabowska/osf.io,crcresearch/osf.io,danielneis/osf.io,laurenrevere/osf.io,arpitar/osf.io,kwierman/osf.io,billyhunt/osf.io,kwierman/osf.io,danielneis/osf.io,ticklemepierce/osf.io,baylee-d/osf.io,HalcyonChimera/osf.io,ZobairAlijan/osf.io,cslzchen/osf.io,caseyrygt/osf.io,laurenrevere/osf.io,Nesiehr/osf.io,mluo613/osf.io,TomHeatwole/osf.io,amyshi188/osf.io,caseyrygt/osf.io,mfraezz/osf.io,SSJohns/osf.io,erinspace/osf.io,leb2dg/osf.io,DanielSBrown/osf.io,arpitar/osf.io,baylee-d/osf.io,RomanZWang/osf.io,billyhunt/osf.io,mluo613/osf.io,mluo613/osf.io,amyshi188/osf.io,abought/osf.io,pattisdr/osf.io,ticklemepierce/osf.io,chrisseto/osf.io,emetsger/osf.io,caseyrygt/osf.io,cosenal/osf.io,RomanZWang/osf.io,HalcyonChimera/osf.io,sloria/osf.io,icereval/osf.io,alexschiller/osf.io,samanehsan/osf.io,icereval/osf.io,TomHeatwole/osf.io,ticklemepierce/osf.io,DanielSBrown/osf.io,asanfilippo7/osf.io,CenterForOpenScience/osf.io
|
06d210cdc811f0051a489f335cc94a604e99a35d
|
werobot/session/mongodbstorage.py
|
werobot/session/mongodbstorage.py
|
# -*- coding: utf-8 -*-
from werobot.session import SessionStorage
from werobot.utils import json_loads, json_dumps
class MongoDBStorage(SessionStorage):
"""
MongoDBStorage 会把你的 Session 数据储存在一个 MongoDB Collection 中 ::
import pymongo
import werobot
from werobot.session.mongodbstorage import MongoDBStorage
collection = pymongo.MongoClient()["wechat"]["session"]
session_storage = MongoDBStorage(collection)
robot = werobot.WeRoBot(token="token", enable_session=True,
session_storage=session_storage)
你需要安装 ``pymongo`` 才能使用 MongoDBStorage 。
:param collection: 一个 MongoDB Collection。
"""
def __init__(self, collection):
import pymongo
assert isinstance(collection,
pymongo.collection.Collection)
self.collection = collection
collection.create_index("wechat_id")
def _get_document(self, id):
return self.collection.find_one({"wechat_id": id})
def get(self, id):
document = self._get_document(id)
if document:
session_json = document["session"]
return json_loads(session_json)
return {}
def set(self, id, value):
document = self._get_document(id)
session = json_dumps(value)
if document:
document["session"] = session
self.collection.save(document)
else:
self.collection.insert({
"wechat_id": id,
"session": session
})
def delete(self, id):
document = self._get_document(id)
if document:
self.collection.remove(document["_id"])
|
# -*- coding: utf-8 -*-
from werobot.session import SessionStorage
from werobot.utils import json_loads, json_dumps
class MongoDBStorage(SessionStorage):
"""
MongoDBStorage 会把你的 Session 数据储存在一个 MongoDB Collection 中 ::
import pymongo
import werobot
from werobot.session.mongodbstorage import MongoDBStorage
collection = pymongo.MongoClient()["wechat"]["session"]
session_storage = MongoDBStorage(collection)
robot = werobot.WeRoBot(token="token", enable_session=True,
session_storage=session_storage)
你需要安装 ``pymongo`` 才能使用 MongoDBStorage 。
:param collection: 一个 MongoDB Collection。
"""
def __init__(self, collection):
self.collection = collection
collection.create_index("wechat_id")
def _get_document(self, id):
return self.collection.find_one({"wechat_id": id})
def get(self, id):
document = self._get_document(id)
if document:
session_json = document["session"]
return json_loads(session_json)
return {}
def set(self, id, value):
session = json_dumps(value)
self.collection.replace_one({
"wechat_id": id
}, {
"wechat_id": id,
"session": session
}, upsert=True)
def delete(self, id):
self.collection.delete_one({
"wechat_id": id
})
|
Use new pymongo API in MongoDBStorage
|
Use new pymongo API in MongoDBStorage
|
Python
|
mit
|
whtsky/WeRoBot,whtsky/WeRoBot,adam139/WeRobot,adam139/WeRobot,whtsky/WeRoBot,weberwang/WeRoBot,weberwang/WeRoBot
|
841ca9cfbdb8faac9d8deb47b65717b5fb7c8eb4
|
mfh.py
|
mfh.py
|
import os
import sys
import time
from multiprocessing import Process, Event
import mfhclient
import server
import update
from arguments import parse
from settings import HONEYPORT, HIVEPORT
def main():
update_event = Event()
mfhclient_process = Process(
args=(args, update_event,),
name="mfhclient_process",
target=mfhclient.main,
)
server_process = Process(
args=(args, update_event,),
name="server_process",
target=server.main,
)
if args.client is not None:
mfhclient_process.start()
if args.client is not None:
server_process.start()
if args.updater:
trigger_process = Process(
args=(update_event,),
name="trigger_process",
target=update.trigger,
)
trigger_process.start()
trigger_process.join()
while mfhclient_process.is_alive() or server_process.is_alive():
time.sleep(5)
else:
if args.updater:
# update.pull("origin", "master")
sys.stdout.flush()
os.execl(sys.executable, sys.executable, *sys.argv)
if __name__ == '__main__':
# Parse arguments
args = parse()
if args.c:
args.client = HONEYPORT
if args.s:
args.server = HIVEPORT
main()
|
import os
import sys
import time
from multiprocessing import Process, Event
import mfhclient
import server
import update
from arguments import parse
from settings import HONEYPORT, HIVEPORT
def main():
update_event = Event()
client = create_process("client", mfhclient.main, args, update_event)
serv = create_process("server", server.main, args, update_event)
if args.client is not None:
client.start()
if args.client is not None:
serv.start()
if args.updater:
trigger = create_process("trigger", update.trigger, update_event)
trigger.start()
trigger.join()
while client.is_alive() or serv.is_alive():
time.sleep(5)
else:
if args.updater:
update.pull("origin", "master")
sys.stdout.flush()
os.execl(sys.executable, sys.executable, *sys.argv)
def create_process(name, function, *arguments):
process = Process(
args=arguments,
name="{0}_process".format(name),
target=function,
)
return process
if __name__ == '__main__':
# Parse arguments
args = parse()
if args.c:
args.client = HONEYPORT
if args.s:
args.server = HIVEPORT
processes = {}
main()
|
Move all the process creation in a new function
|
Move all the process creation in a new function
This reduces the size of code.
|
Python
|
mit
|
Zloool/manyfaced-honeypot
|
5f128bbfc61169ac6b5f0e9f4dc6bcd05092382c
|
requests_cache/serializers/pipeline.py
|
requests_cache/serializers/pipeline.py
|
"""
.. automodsumm:: requests_cache.serializers.pipeline
:classes-only:
:nosignatures:
"""
from typing import Any, List, Union
from ..models import CachedResponse
class Stage:
"""Generic class to wrap serialization steps with consistent ``dumps()`` and ``loads()`` methods"""
def __init__(self, obj: Any, dumps: str = "dumps", loads: str = "loads"):
self.obj = obj
self.dumps = getattr(obj, dumps)
self.loads = getattr(obj, loads)
class SerializerPipeline:
"""A sequence of steps used to serialize and deserialize response objects"""
def __init__(self, steps: List):
self.steps = steps
self.dump_steps = [step.dumps for step in steps]
self.load_steps = [step.loads for step in reversed(steps)]
def dumps(self, value) -> Union[str, bytes]:
for step in self.dump_steps:
value = step(value)
return value
def loads(self, value) -> CachedResponse:
for step in self.load_steps:
value = step(value)
return value
|
"""
.. automodsumm:: requests_cache.serializers.pipeline
:classes-only:
:nosignatures:
"""
from typing import Any, Callable, List, Union
from ..models import CachedResponse
class Stage:
"""Generic class to wrap serialization steps with consistent ``dumps()`` and ``loads()`` methods
Args:
obj: Serializer object or module, if applicable
dumps: Serialization function, or name of method on ``obj``
loads: Deserialization function, or name of method on ``obj``
"""
def __init__(
self,
obj: Any = None,
dumps: Union[str, Callable] = 'dumps',
loads: Union[str, Callable] = 'loads',
):
self.obj = obj
self.dumps = getattr(obj, dumps) if isinstance(dumps, str) else dumps
self.loads = getattr(obj, loads) if isinstance(loads, str) else loads
class SerializerPipeline:
"""A sequence of steps used to serialize and deserialize response objects.
This can be initialized with :py:class:`Stage` objects, or any objects with ``dumps()`` and
``loads()`` methods
"""
def __init__(self, stages: List):
self.steps = stages
self.dump_steps = [step.dumps for step in stages]
self.load_steps = [step.loads for step in reversed(stages)]
def dumps(self, value) -> Union[str, bytes]:
for step in self.dump_steps:
value = step(value)
return value
def loads(self, value) -> CachedResponse:
for step in self.load_steps:
value = step(value)
return value
|
Allow Stage objects to take functions instead of object + method names
|
Allow Stage objects to take functions instead of object + method names
|
Python
|
bsd-2-clause
|
reclosedev/requests-cache
|
657741f3d4df734afef228e707005dc21d540e34
|
post-refunds-back.py
|
post-refunds-back.py
|
#!/usr/bin/env python -u
from __future__ import absolute_import, division, print_function, unicode_literals
import csv
from gratipay import wireup
from gratipay.models.exchange_route import ExchangeRoute
from gratipay.models.participant import Participant
from gratipay.billing.exchanges import record_exchange
db = wireup.db(wireup.env())
inp = csv.reader(open('balanced/refund/refunds.completed.csv'))
note = 'refund of advance payment; see https://medium.com/gratipay-blog/charging-in-arrears-18cacf779bee'
for ts, id, amount, username, route_id, status_code, content in inp:
if status_code != '201': continue
amount = '-' + amount[:-2] + '.' + amount[-2:]
print('posting {} back for {}'.format(amount, username))
route = ExchangeRoute.from_id(route_id)
rp = route.participant
participant = Participant.from_id(rp) if type(rp) is long else rp # Such a hack. :(
route.set_attributes(participant=participant)
record_exchange(db, route, amount, 0, participant, 'pending', note)
|
#!/usr/bin/env python -u
from __future__ import absolute_import, division, print_function, unicode_literals
import csv
from decimal import Decimal as D
from gratipay import wireup
from gratipay.models.exchange_route import ExchangeRoute
from gratipay.models.participant import Participant
from gratipay.billing.exchanges import record_exchange
db = wireup.db(wireup.env())
inp = csv.reader(open('refunds.completed.csv'))
note = 'refund of advance payment; see https://medium.com/gratipay-blog/18cacf779bee'
total = N = 0
for ts, id, amount, username, route_id, success, ref in inp:
print('posting {} back for {}'.format(amount, username))
assert success == 'True'
total += D(amount)
N += 1
amount = D('-' + amount)
route = ExchangeRoute.from_id(route_id)
# Such a hack. :(
rp = route.participant
participant = Participant.from_id(rp) if type(rp) is long else rp
route.set_attributes(participant=participant)
exchange_id = record_exchange(db, route, amount, 0, participant, 'pending', note)
db.run("update exchanges set ref=%s where id=%s", (ref, exchange_id))
print('posted {} back for {}'.format(total, N))
|
Update post-back script for Braintree
|
Update post-back script for Braintree
|
Python
|
mit
|
gratipay/gratipay.com,gratipay/gratipay.com,gratipay/gratipay.com,gratipay/gratipay.com
|
022062c409ee06a719b5687ea1feb989c5cad627
|
app/grandchallenge/pages/sitemaps.py
|
app/grandchallenge/pages/sitemaps.py
|
from grandchallenge.core.sitemaps import SubdomainSitemap
from grandchallenge.pages.models import Page
class PagesSitemap(SubdomainSitemap):
priority = 0.8
def items(self):
return Page.objects.filter(
permission_level=Page.ALL, challenge__hidden=False
)
|
from grandchallenge.core.sitemaps import SubdomainSitemap
from grandchallenge.pages.models import Page
class PagesSitemap(SubdomainSitemap):
priority = 0.8
def items(self):
return Page.objects.filter(
permission_level=Page.ALL, challenge__hidden=False, hidden=False,
)
|
Remove hidden public pages from sitemap
|
Remove hidden public pages from sitemap
|
Python
|
apache-2.0
|
comic/comic-django,comic/comic-django,comic/comic-django,comic/comic-django,comic/comic-django
|
c5239c6bbb40ede4279b33b965c5ded26a78b2ae
|
app/tests/manual/test_twitter_api.py
|
app/tests/manual/test_twitter_api.py
|
# -*- coding: utf-8 -*-
"""
Twitter API test module.
Local test to check that Twitter credentials are valid connect to Twitter
API and that the auth functions can be used to do this.
s"""
from __future__ import absolute_import
from unittest import TestCase
from lib.twitter_api import authentication
class TestAuth(TestCase):
def test_generateAppAccessToken(self):
auth = authentication._generateAppAccessToken()
def test_getTweepyConnection(self):
auth = authentication._generateAppAccessToken()
api = authentication._getTweepyConnection(auth)
def test_getAPIConnection(self):
"""
Test that App Access token can be used to connect to Twitter API.
"""
api = authentication.getAPIConnection(userFlow=False)
def test_getAppOnlyConnection(self):
"""
Test App-only token.
"""
api = authentication.getAppOnlyConnection()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Twitter API test module.
Local test to check that Twitter credentials are valid connect to Twitter
API and that the auth functions can be used to do this.
"""
from __future__ import absolute_import
import os
import sys
import unittest
from unittest import TestCase
# Allow imports to be done when executing this file directly.
sys.path.insert(0, os.path.abspath(os.path.join(
os.path.dirname(__file__), os.path.pardir, os.path.pardir)
))
from lib.twitter_api import authentication
class TestAuth(TestCase):
def test_generateAppAccessToken(self):
auth = authentication._generateAppAccessToken()
def test_getTweepyConnection(self):
auth = authentication._generateAppAccessToken()
api = authentication._getTweepyConnection(auth)
def test_getAPIConnection(self):
"""
Test that App Access token can be used to connect to Twitter API.
"""
api = authentication.getAPIConnection(userFlow=False)
def test_getAppOnlyConnection(self):
"""
Test App-only token.
"""
api = authentication.getAppOnlyConnection()
if __name__ == '__main__':
unittest.main()
|
Update Twitter auth test to run directly
|
test: Update Twitter auth test to run directly
|
Python
|
mit
|
MichaelCurrin/twitterverse,MichaelCurrin/twitterverse
|
c6862c5f864db4e77dd835f074efdd284667e6fd
|
util/ldjpp.py
|
util/ldjpp.py
|
#! /usr/bin/env python
from __future__ import print_function
import argparse
import json
parser = argparse.ArgumentParser(description='Pretty-print LDJSON.')
parser.add_argument('--indent', metavar='N', type=int, default=2,
dest='indent', help='indentation for pretty-printing')
parser.add_argument('--file', metavar='FILE', required=True, dest='file',
type=argparse.FileType('r'), help='input LDJSON file')
parser.add_argument('--sort', action='store_true', dest='sortkeys',
help='sort object keys')
args = parser.parse_args()
for line in args.file:
record = json.loads(line)
print(json.dumps(record, indent=args.indent, sort_keys=args.sortkeys))
|
#! /usr/bin/env python
from __future__ import print_function
import click
import json
from collections import OrderedDict
def json_loader(sortkeys):
def _loader(line):
if sortkeys:
return json.loads(line)
else:
# if --no-sortkeys, let's preserve file order
return json.JSONDecoder(object_pairs_hook=OrderedDict).decode(line)
return _loader
@click.command()
@click.option('indent', '-i', '--indent', default=2,
help='indentation for pretty-printing')
@click.option('--sortkeys/--no-sortkeys', default=False,
help='sort object keys')
@click.argument('infile', type=click.File())
def cli(indent, sortkeys, infile):
"""Pretty-print LDJSON."""
loader = json_loader(sortkeys)
for line in infile:
record = loader(line)
print(json.dumps(record, indent=indent, sort_keys=sortkeys))
if __name__ == '__main__':
cli()
|
Use click instead of argparse
|
Use click instead of argparse
|
Python
|
mit
|
mhyfritz/goontools,mhyfritz/goontools,mhyfritz/goontools
|
b7decb588f5b6e4d15fb04fa59aa571e5570cbfe
|
djangae/contrib/contenttypes/apps.py
|
djangae/contrib/contenttypes/apps.py
|
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
from django.contrib.contenttypes.management import update_contenttypes as django_update_contenttypes
from django.db.models.signals import post_migrate
from .management import update_contenttypes
from .models import SimulatedContentTypeManager
class ContentTypesConfig(AppConfig):
name = 'djangae.contrib.contenttypes'
verbose_name = _("Djangae Content Types")
label = "djangae_contenttypes"
def ready(self):
if django_update_contenttypes != update_contenttypes:
post_migrate.disconnect(django_update_contenttypes)
from django.db import models
from django.contrib.contenttypes import models as django_models
if not isinstance(django_models.ContentType.objects, SimulatedContentTypeManager):
django_models.ContentType.objects = SimulatedContentTypeManager()
django_models.ContentType.objects.auto_created = True
# Really force the default manager to use the Simulated one
meta = django_models.ContentType._meta
meta.local_managers[0] = SimulatedContentTypeManager()
meta._expire_cache()
# Our generated IDs take up a 64 bit range (signed) but aren't auto
# incrementing so update the field to reflect that (for validation)
meta.pk.__class__ = models.BigIntegerField
|
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
from django.contrib.contenttypes.management import update_contenttypes as django_update_contenttypes
from django.db.models.signals import post_migrate
from .management import update_contenttypes
from .models import SimulatedContentTypeManager
class ContentTypesConfig(AppConfig):
name = 'djangae.contrib.contenttypes'
verbose_name = _("Djangae Content Types")
label = "djangae_contenttypes"
def ready(self):
if django_update_contenttypes != update_contenttypes:
post_migrate.disconnect(django_update_contenttypes)
from django.db import models
from django.contrib.contenttypes import models as django_models
if not isinstance(django_models.ContentType.objects, SimulatedContentTypeManager):
django_models.ContentType.objects = SimulatedContentTypeManager(django_models.ContentType)
django_models.ContentType.objects.auto_created = True
# Really force the default manager to use the Simulated one
meta = django_models.ContentType._meta
if hasattr(meta, "local_managers"):
# Django >= 1.10
meta.local_managers[0] = SimulatedContentTypeManager()
else:
django_models.ContentType._default_manager = SimulatedContentTypeManager(django_models.ContentType)
meta._expire_cache()
# Our generated IDs take up a 64 bit range (signed) but aren't auto
# incrementing so update the field to reflect that (for validation)
meta.pk.__class__ = models.BigIntegerField
|
Fix up for Django 1.9
|
Fix up for Django 1.9
|
Python
|
bsd-3-clause
|
grzes/djangae,potatolondon/djangae,grzes/djangae,potatolondon/djangae,grzes/djangae
|
dfd3bff4560d1711624b8508795eb3debbaafa40
|
changes/api/snapshotimage_details.py
|
changes/api/snapshotimage_details.py
|
from __future__ import absolute_import
from flask.ext.restful import reqparse
from changes.api.base import APIView
from changes.config import db
from changes.models import SnapshotImage, SnapshotStatus
class SnapshotImageDetailsAPIView(APIView):
parser = reqparse.RequestParser()
parser.add_argument('status', choices=SnapshotStatus._member_names_)
def get(self, image_id):
image = SnapshotImage.query.get(image_id)
if image is None:
return '', 404
return self.respond(image)
def post(self, image_id):
image = SnapshotImage.query.get(image_id)
if image is None:
return '', 404
args = self.parser.parse_args()
if args.status:
image.status = SnapshotStatus[args.status]
db.session.add(image)
db.session.flush()
if image.status == SnapshotStatus.active:
snapshot = image.snapshot
inactive_image_query = SnapshotImage.query.filter(
SnapshotImage.status != SnapshotStatus.active,
SnapshotImage.snapshot_id == snapshot.id,
).exists()
if not db.session.query(inactive_image_query).scalar():
snapshot.status = SnapshotStatus.active
db.session.add(snapshot)
db.session.commit()
return self.respond(image)
|
from __future__ import absolute_import
from flask.ext.restful import reqparse
from changes.api.base import APIView
from changes.config import db
from changes.models import SnapshotImage, SnapshotStatus
class SnapshotImageDetailsAPIView(APIView):
parser = reqparse.RequestParser()
parser.add_argument('status', choices=SnapshotStatus._member_names_)
def get(self, image_id):
image = SnapshotImage.query.get(image_id)
if image is None:
return '', 404
return self.respond(image)
def post(self, image_id):
image = SnapshotImage.query.get(image_id)
if image is None:
return '', 404
args = self.parser.parse_args()
if args.status:
image.status = SnapshotStatus[args.status]
db.session.add(image)
db.session.flush()
if image.status == SnapshotStatus.active:
snapshot = image.snapshot
inactive_image_query = SnapshotImage.query.filter(
SnapshotImage.status != SnapshotStatus.active,
SnapshotImage.snapshot_id == snapshot.id,
).exists()
if not db.session.query(inactive_image_query).scalar():
snapshot.status = SnapshotStatus.active
db.session.add(snapshot)
elif snapshot.status == SnapshotStatus.active:
snapshot.status = SnapshotStatus.inactive
db.session.add(snapshot)
db.session.commit()
return self.respond(image)
|
Mark snapshots as inactive if any are not valid
|
Mark snapshots as inactive if any are not valid
|
Python
|
apache-2.0
|
dropbox/changes,bowlofstew/changes,wfxiang08/changes,bowlofstew/changes,wfxiang08/changes,wfxiang08/changes,dropbox/changes,bowlofstew/changes,dropbox/changes,dropbox/changes,bowlofstew/changes,wfxiang08/changes
|
f8b4b1a860b5c0a3ff16dbb8bbf83010bd9a1009
|
feincms3/plugins/__init__.py
|
feincms3/plugins/__init__.py
|
# flake8: noqa
from . import html
from . import snippet
try:
from . import external
except ImportError: # pragma: no cover
pass
try:
from . import image
except ImportError: # pragma: no cover
pass
try:
from . import richtext
except ImportError: # pragma: no cover
pass
try:
from . import versatileimage
except ImportError: # pragma: no cover
pass
|
# flake8: noqa
from . import html
from . import snippet
try:
import requests
except ImportError: # pragma: no cover
pass
else:
from . import external
try:
import imagefield
except ImportError: # pragma: no cover
pass
else:
from . import image
try:
import feincms3.cleanse
except ImportError: # pragma: no cover
pass
else:
from . import richtext
try:
import versatileimagefield
except ImportError: # pragma: no cover
pass
else:
from . import versatileimage
|
Stop hiding local import errors
|
feincms3.plugins: Stop hiding local import errors
|
Python
|
bsd-3-clause
|
matthiask/feincms3,matthiask/feincms3,matthiask/feincms3
|
b2eebbdcc14dd47d6ad8bb385966f13ed13890c1
|
superdesk/coverages.py
|
superdesk/coverages.py
|
from superdesk.base_model import BaseModel
def init_app(app):
CoverageModel(app=app)
def rel(resource, embeddable=False):
return {
'type': 'objectid',
'data_relation': {'resource': resource, 'field': '_id', 'embeddable': embeddable}
}
class CoverageModel(BaseModel):
endpoint_name = 'coverages'
schema = {
'headline': {'type': 'string'},
'type': {'type': 'string'},
'ed_note': {'type': 'string'},
'scheduled': {'type': 'datetime'},
'delivery': rel('archive'),
'assigned_user': rel('users', True),
'assigned_desk': rel('desks', True),
'planning_item': rel('planning'),
}
|
from superdesk.base_model import BaseModel
def init_app(app):
CoverageModel(app=app)
def rel(resource, embeddable=False):
return {
'type': 'objectid',
'data_relation': {'resource': resource, 'field': '_id', 'embeddable': embeddable}
}
class CoverageModel(BaseModel):
endpoint_name = 'coverages'
schema = {
'headline': {'type': 'string'},
'type': {'type': 'string'},
'ed_note': {'type': 'string'},
'scheduled': {'type': 'datetime'},
'delivery': {'type': 'string'},
'assigned_user': rel('users', True),
'assigned_desk': rel('desks', True),
'planning_item': {'type': 'string'},
}
|
Fix data relation not working for custom Guids
|
Fix data relation not working for custom Guids
|
Python
|
agpl-3.0
|
plamut/superdesk,sivakuna-aap/superdesk,mdhaman/superdesk-aap,sivakuna-aap/superdesk,liveblog/superdesk,pavlovicnemanja/superdesk,petrjasek/superdesk,mugurrus/superdesk,ioanpocol/superdesk,pavlovicnemanja/superdesk,Aca-jov/superdesk,akintolga/superdesk,vied12/superdesk,gbbr/superdesk,fritzSF/superdesk,ancafarcas/superdesk,ioanpocol/superdesk-ntb,mdhaman/superdesk-aap,marwoodandrew/superdesk-aap,darconny/superdesk,akintolga/superdesk-aap,amagdas/superdesk,sivakuna-aap/superdesk,thnkloud9/superdesk,fritzSF/superdesk,akintolga/superdesk-aap,ancafarcas/superdesk,akintolga/superdesk,pavlovicnemanja92/superdesk,amagdas/superdesk,vied12/superdesk,verifiedpixel/superdesk,superdesk/superdesk-ntb,Aca-jov/superdesk,superdesk/superdesk,akintolga/superdesk,marwoodandrew/superdesk-aap,hlmnrmr/superdesk,verifiedpixel/superdesk,pavlovicnemanja/superdesk,petrjasek/superdesk-server,pavlovicnemanja/superdesk,liveblog/superdesk,thnkloud9/superdesk,superdesk/superdesk-aap,plamut/superdesk,vied12/superdesk,ioanpocol/superdesk-ntb,plamut/superdesk,darconny/superdesk,sjunaid/superdesk,superdesk/superdesk-aap,amagdas/superdesk,verifiedpixel/superdesk,vied12/superdesk,darconny/superdesk,fritzSF/superdesk,sivakuna-aap/superdesk,amagdas/superdesk,superdesk/superdesk-aap,gbbr/superdesk,marwoodandrew/superdesk-aap,mdhaman/superdesk,petrjasek/superdesk-ntb,sivakuna-aap/superdesk,akintolga/superdesk-aap,akintolga/superdesk,superdesk/superdesk-ntb,fritzSF/superdesk,marwoodandrew/superdesk,marwoodandrew/superdesk,verifiedpixel/superdesk,amagdas/superdesk,marwoodandrew/superdesk-aap,sjunaid/superdesk,petrjasek/superdesk-ntb,sjunaid/superdesk,Aca-jov/superdesk,ioanpocol/superdesk-ntb,petrjasek/superdesk-server,pavlovicnemanja92/superdesk,superdesk/superdesk,ancafarcas/superdesk,superdesk/superdesk,mdhaman/superdesk,fritzSF/superdesk,vied12/superdesk,pavlovicnemanja92/superdesk,petrjasek/superdesk-ntb,hlmnrmr/superdesk,marwoodandrew/superdesk,superdesk/superdesk-ntb,petrjasek/superdesk,hlmnrmr/superdesk,petrjasek/superdesk,mugurrus/superdesk,gbbr/superdesk,pavlovicnemanja92/superdesk,plamut/superdesk,liveblog/superdesk,verifiedpixel/superdesk,petrjasek/superdesk,mdhaman/superdesk,petrjasek/superdesk-ntb,mugurrus/superdesk,mdhaman/superdesk-aap,superdesk/superdesk-aap,superdesk/superdesk-ntb,marwoodandrew/superdesk,pavlovicnemanja92/superdesk,ioanpocol/superdesk,thnkloud9/superdesk,marwoodandrew/superdesk,liveblog/superdesk,mdhaman/superdesk-aap,ioanpocol/superdesk,akintolga/superdesk,plamut/superdesk,liveblog/superdesk,superdesk/superdesk,akintolga/superdesk-aap
|
4147e6f560889c75abbfd9c8e85ea38ffe408550
|
suelta/mechanisms/facebook_platform.py
|
suelta/mechanisms/facebook_platform.py
|
from suelta.util import bytes
from suelta.sasl import Mechanism, register_mechanism
try:
import urlparse
except ImportError:
import urllib.parse as urlparse
class X_FACEBOOK_PLATFORM(Mechanism):
def __init__(self, sasl, name):
super(X_FACEBOOK_PLATFORM, self).__init__(sasl, name)
self.check_values(['access_token', 'api_key'])
def process(self, challenge=None):
if challenge is not None:
values = {}
for kv in challenge.split('&'):
key, value = kv.split('=')
values[key] = value
resp_data = {
'method': values['method'],
'v': '1.0',
'call_id': '1.0',
'nonce': values['nonce'],
'access_token': self.values['access_token'],
'api_key': self.values['api_key']
}
resp = '&'.join(['%s=%s' % (k, v) for k, v in resp_data.items()])
return bytes(resp)
return bytes('')
def okay(self):
return True
register_mechanism('X-FACEBOOK-PLATFORM', 40, X_FACEBOOK_PLATFORM, use_hashes=False)
|
from suelta.util import bytes
from suelta.sasl import Mechanism, register_mechanism
try:
import urlparse
except ImportError:
import urllib.parse as urlparse
class X_FACEBOOK_PLATFORM(Mechanism):
def __init__(self, sasl, name):
super(X_FACEBOOK_PLATFORM, self).__init__(sasl, name)
self.check_values(['access_token', 'api_key'])
def process(self, challenge=None):
if challenge is not None:
values = {}
for kv in challenge.split(b'&'):
key, value = kv.split(b'=')
values[key] = value
resp_data = {
b'method': values[b'method'],
b'v': b'1.0',
b'call_id': b'1.0',
b'nonce': values[b'nonce'],
b'access_token': self.values['access_token'],
b'api_key': self.values['api_key']
}
resp = '&'.join(['%s=%s' % (k, v) for k, v in resp_data.items()])
return bytes(resp)
return b''
def okay(self):
return True
register_mechanism('X-FACEBOOK-PLATFORM', 40, X_FACEBOOK_PLATFORM, use_hashes=False)
|
Work around Python3's byte semantics.
|
Work around Python3's byte semantics.
|
Python
|
mit
|
dwd/Suelta
|
1dbe7acc945a545d3b18ec5025c19b26d1ed110f
|
test/test_sparql_construct_bindings.py
|
test/test_sparql_construct_bindings.py
|
from rdflib import Graph, URIRef, Literal, BNode
from rdflib.plugins.sparql import prepareQuery
from rdflib.compare import isomorphic
import unittest
class TestConstructInitBindings(unittest.TestCase):
def test_construct_init_bindings(self):
"""
This is issue https://github.com/RDFLib/rdflib/issues/1001
"""
g1 = Graph()
q_str = ("""
PREFIX : <urn:ns1:>
CONSTRUCT {
?uri :prop1 ?val1;
:prop2 ?c .
}
WHERE {
bind(uri(concat("urn:ns1:", ?a)) as ?uri)
bind(?b as ?val1)
}
""")
q_prepared = prepareQuery(q_str)
expected = [
(URIRef('urn:ns1:A'),URIRef('urn:ns1:prop1'), Literal('B')),
(URIRef('urn:ns1:A'),URIRef('urn:ns1:prop2'), Literal('C'))
]
results = g1.query(q_prepared, initBindings={
'a': Literal('A'),
'b': Literal('B'),
'c': Literal('C')
})
self.assertCountEqual(list(results), expected)
|
from rdflib import Graph, URIRef, Literal, BNode
from rdflib.plugins.sparql import prepareQuery
from rdflib.compare import isomorphic
import unittest
from nose.tools import eq_
class TestConstructInitBindings(unittest.TestCase):
def test_construct_init_bindings(self):
"""
This is issue https://github.com/RDFLib/rdflib/issues/1001
"""
g1 = Graph()
q_str = ("""
PREFIX : <urn:ns1:>
CONSTRUCT {
?uri :prop1 ?val1;
:prop2 ?c .
}
WHERE {
bind(uri(concat("urn:ns1:", ?a)) as ?uri)
bind(?b as ?val1)
}
""")
q_prepared = prepareQuery(q_str)
expected = [
(URIRef('urn:ns1:A'),URIRef('urn:ns1:prop1'), Literal('B')),
(URIRef('urn:ns1:A'),URIRef('urn:ns1:prop2'), Literal('C'))
]
results = g1.query(q_prepared, initBindings={
'a': Literal('A'),
'b': Literal('B'),
'c': Literal('C')
})
eq_(sorted(results, key=lambda x: str(x[1])), expected)
|
Fix unit tests for python2
|
Fix unit tests for python2
|
Python
|
bsd-3-clause
|
RDFLib/rdflib,RDFLib/rdflib,RDFLib/rdflib,RDFLib/rdflib
|
2ebbe2f9f23621d10a70d0817d83da33b002299e
|
rest_surveys/urls.py
|
rest_surveys/urls.py
|
from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import include, url
from rest_framework_bulk.routes import BulkRouter
from rest_surveys.views import (
SurveyViewSet,
SurveyResponseViewSet,
)
# API
# With trailing slash appended:
router = BulkRouter()
router.register(r'surveys', SurveyViewSet, base_name='survey')
router.register(r'survey-responses', SurveyResponseViewSet,
base_name='survey-response')
slashless_router = BulkRouter(trailing_slash=False)
slashless_router.registry = router.registry[:]
urlpatterns = [
url(r'^{api_path}'.format(api_path=settings.REST_SURVEYS['API_PATH']),
include(router.urls)),
url(r'^{api_path}'.format(api_path=settings.REST_SURVEYS['API_PATH']),
include(slashless_router.urls)),
]
|
from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import include, url
from rest_framework_bulk.routes import BulkRouter
from rest_surveys.views import (
SurveyViewSet,
SurveyResponseViewSet,
)
# API
# With trailing slash appended:
router = BulkRouter()
router.register(r'surveys', SurveyViewSet, base_name='survey')
router.register(r'survey-responses', SurveyResponseViewSet,
base_name='survey-response')
slashless_router = BulkRouter(trailing_slash=False)
slashless_router.registry = router.registry[:]
urlpatterns = [
url(r'^{api_path}'.format(
api_path=settings.REST_SURVEYS.get('API_PATH', 'api/')),
include(router.urls)),
url(r'^{api_path}'.format(
api_path=settings.REST_SURVEYS.get('API_PATH', 'api/')),
include(slashless_router.urls)),
]
|
Set a default api path
|
Set a default api path
|
Python
|
mit
|
danxshap/django-rest-surveys
|
1cbd56988478320268838f77e8cc6237d95346fd
|
test/dunya/conn_test.py
|
test/dunya/conn_test.py
|
import unittest
from compmusic.dunya.conn import _make_url
class ConnTest(unittest.TestCase):
def test_make_url(self):
params = {"first": "%^grtà"}
url = _make_url("path", **params)
self.assertEqual(url, 'http://dunya.compmusic.upf.edu/path?first=%25%5Egrt%C3%A0')
|
#!/usr/local/bin/python
# -*- coding: utf-8 -*-
import unittest
from compmusic.dunya.conn import _make_url
class ConnTest(unittest.TestCase):
def test_make_url(self):
params = {"first": "%^grtà"}
url = _make_url("path", **params)
self.assertEqual(url, 'http://dunya.compmusic.upf.edu/path?first=%25%5Egrt%C3%A0')
|
Declare the encoding of conn.py as utf-8
|
Declare the encoding of conn.py as utf-8
|
Python
|
agpl-3.0
|
MTG/pycompmusic
|
a7437e657f55cd708baba83421941e67d474daf7
|
tests/test_utilities.py
|
tests/test_utilities.py
|
from __future__ import (absolute_import, division, print_function)
from folium.utilities import camelize
def test_camelize():
assert camelize('variable_name') == 'variableName'
assert camelize('variableName') == 'variableName'
assert camelize('name') == 'name'
assert camelize('very_long_variable_name') == 'veryLongVariableName'
|
from __future__ import (absolute_import, division, print_function)
from folium.utilities import camelize, deep_copy
from folium import Map, FeatureGroup, Marker
def test_camelize():
assert camelize('variable_name') == 'variableName'
assert camelize('variableName') == 'variableName'
assert camelize('name') == 'name'
assert camelize('very_long_variable_name') == 'veryLongVariableName'
def test_deep_copy():
m = Map()
fg = FeatureGroup().add_to(m)
Marker(location=(0, 0)).add_to(fg)
m_copy = deep_copy(m)
def check(item, item_copy):
assert type(item) is type(item_copy)
assert item._name == item_copy._name
for attr in item.__dict__.keys():
if not attr.startswith('_'):
assert getattr(item, attr) == getattr(item_copy, attr)
assert item is not item_copy
assert item._id != item_copy._id
for child, child_copy in zip(item._children.values(),
item_copy._children.values()):
check(child, child_copy)
check(m, m_copy)
|
Add test for deep_copy function
|
Add test for deep_copy function
|
Python
|
mit
|
python-visualization/folium,ocefpaf/folium,ocefpaf/folium,python-visualization/folium
|
fe05b5f694671a46dd3391b9cb6561923345c4b7
|
rpi_gpio_http/app.py
|
rpi_gpio_http/app.py
|
from flask import Flask
import logging
import logging.config
import RPi.GPIO as GPIO
from .config import config, config_loader
from .channel import ChannelFactory
app = Flask('rpi_gpio_http')
logging.config.dictConfig(config['logger'])
logger = logging.getLogger(__name__)
logger.info("Config loaded from %s" % config_loader.filename)
channels = {}
GPIO.setmode(GPIO.BOARD)
for ch in config['channels']:
if ch['enabled'] != True:
continue
channel = ChannelFactory.create(ch)
if channel:
channels[channel.pin] = channel
import controllers
|
from flask import Flask
import logging
import logging.config
import RPi.GPIO as GPIO
from .config import config, config_loader
from .channel import ChannelFactory
app = Flask('rpi_gpio_http')
logging.config.dictConfig(config['logger'])
logger = logging.getLogger(__name__)
logger.info("Config loaded from %s" % config_loader.filename)
channels = {}
GPIO.setwarnings(False)
GPIO.setmode(GPIO.BOARD)
for ch in config['channels']:
if ch['enabled'] != True:
continue
channel = ChannelFactory.create(ch)
if channel:
channels[channel.pin] = channel
import controllers
|
Disable warnings in GPIO lib
|
Disable warnings in GPIO lib
|
Python
|
mit
|
voidpp/rpi-gpio-http
|
378f55687131324bb5c43e3b50f9db5fe3b39662
|
zaqar_ui/__init__.py
|
zaqar_ui/__init__.py
|
# Copyright 2015 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pbr.version
__version__ = pbr.version.VersionInfo(
'neutron_lbaas_dashboard').version_string()
|
# Copyright 2015 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pbr.version
__version__ = pbr.version.VersionInfo('zaqar_ui').version_string()
|
Fix Zaqar-ui with wrong reference pbr version
|
Fix Zaqar-ui with wrong reference pbr version
Change-Id: I84cdb865478a232886ba1059febf56735a0d91ba
|
Python
|
apache-2.0
|
openstack/zaqar-ui,openstack/zaqar-ui,openstack/zaqar-ui,openstack/zaqar-ui
|
d659c685f40de7eb7b2ccd007888177fb158e139
|
tests/integration/players.py
|
tests/integration/players.py
|
#!/usr/bin/env python
import urllib.parse
import urllib.request
def create_player(username, password, email):
url = 'https://localhost:3000/players'
values = {'username' : username,
'password' : password,
'email' : email }
data = urllib.parse.urlencode(values)
data = data.encode('utf-8') # data should be bytes
req = urllib.request.Request(url, data)
response = urllib.request.urlopen(req)
the_page = response.read()
print("Created user \'{}\' with password \'{}\' and email \'{}\'".format(username, password, email))
if __name__ == '__main__':
create_player("chapmang", "password", "chapmang@dropshot.com")
create_player("idlee", "deadparrot", "idlee@dropshot.com")
create_player("gilliamt", "lumberjack", "gilliamt@dropshot.com")
create_player("jonest", "trojanrabbit", "jonest@dropshot.com")
create_player("cleesej", "generaldirection", "cleesej@dropshot.com")
create_player("palinm", "fleshwound", "palinm@dropshot.com")
|
#!/usr/bin/env python
import requests
def create_player(username, password, email):
url = 'https://localhost:3000/players'
values = {'username' : username,
'password' : password,
'email' : email }
r = requests.post(url, params=values, verify=False)
r.raise_for_status()
if (r.status_code == 201):
print("Created user \'{}\' with password \'{}\' and email \'{}\'".format(username,
password,
email))
if __name__ == '__main__':
create_player("chapmang", "password", "chapmang@dropshot.com")
create_player("idlee", "deadparrot", "idlee@dropshot.com")
create_player("gilliamt", "lumberjack", "gilliamt@dropshot.com")
create_player("jonest", "trojanrabbit", "jonest@dropshot.com")
create_player("cleesej", "generaldirection", "cleesej@dropshot.com")
create_player("palinm", "fleshwound", "palinm@dropshot.com")
|
Switch to requests library instead of urllib
|
Switch to requests library instead of urllib
|
Python
|
mit
|
dropshot/dropshot-server
|
eeeba609afe732b8e95aa535e70d4cdd2ae1aac7
|
tests/unit/test_cufflinks.py
|
tests/unit/test_cufflinks.py
|
import os
import unittest
import shutil
from bcbio.rnaseq import cufflinks
from bcbio.utils import file_exists, safe_makedir
from nose.plugins.attrib import attr
DATA_DIR = os.path.join(os.path.dirname(__file__), "bcbio-nextgen-test-data", "data")
class TestCufflinks(unittest.TestCase):
merged_gtf = os.path.join(DATA_DIR, "cufflinks", "merged.gtf")
ref_gtf = os.path.join(DATA_DIR, "cufflinks", "ref-transcripts.gtf")
out_dir = "cufflinks-test"
def setUp(self):
safe_makedir(self.out_dir)
@attr("unit")
def test_cufflinks_clean(self):
clean_fn = os.path.join(self.out_dir, "clean.gtf")
dirty_fn = os.path.join(self.out_dir, "dirty.gtf")
clean, dirty = cufflinks.clean_assembly(self.merged_gtf, clean_fn,
dirty_fn)
# fixed_fn = os.path.join(self.out_dir, "fixed.gtf")
# fixed = cufflinks.fix_cufflinks_attributes(self.ref_gtf, clean, fixed_fn)
assert(file_exists(clean))
assert(os.path.exists(dirty))
# assert(file_exists(fixed))
def tearDown(self):
shutil.rmtree(self.out_dir)
|
import os
import unittest
import shutil
from bcbio.rnaseq import cufflinks
from bcbio.utils import file_exists, safe_makedir
from nose.plugins.attrib import attr
DATA_DIR = os.path.join(os.path.dirname(__file__), "bcbio-nextgen-test-data", "data")
class TestCufflinks(unittest.TestCase):
merged_gtf = os.path.join(DATA_DIR, "cufflinks", "merged.gtf")
ref_gtf = os.path.join(DATA_DIR, "cufflinks", "ref-transcripts.gtf")
out_dir = "cufflinks-test"
def setUp(self):
safe_makedir(self.out_dir)
@attr("unit")
def test_cufflinks_clean(self):
clean_fn = os.path.join(self.out_dir, "clean.gtf")
dirty_fn = os.path.join(self.out_dir, "dirty.gtf")
clean, dirty = cufflinks.clean_assembly(self.merged_gtf, clean_fn,
dirty_fn)
assert(file_exists(clean))
assert(os.path.exists(dirty))
def tearDown(self):
shutil.rmtree(self.out_dir)
|
Remove some cruft from the cufflinks test.
|
Remove some cruft from the cufflinks test.
|
Python
|
mit
|
vladsaveliev/bcbio-nextgen,biocyberman/bcbio-nextgen,verdurin/bcbio-nextgen,fw1121/bcbio-nextgen,gifford-lab/bcbio-nextgen,chapmanb/bcbio-nextgen,Cyberbio-Lab/bcbio-nextgen,hjanime/bcbio-nextgen,verdurin/bcbio-nextgen,lbeltrame/bcbio-nextgen,verdurin/bcbio-nextgen,SciLifeLab/bcbio-nextgen,chapmanb/bcbio-nextgen,lpantano/bcbio-nextgen,vladsaveliev/bcbio-nextgen,elkingtonmcb/bcbio-nextgen,mjafin/bcbio-nextgen,brainstorm/bcbio-nextgen,lbeltrame/bcbio-nextgen,guillermo-carrasco/bcbio-nextgen,fw1121/bcbio-nextgen,a113n/bcbio-nextgen,brainstorm/bcbio-nextgen,SciLifeLab/bcbio-nextgen,mjafin/bcbio-nextgen,elkingtonmcb/bcbio-nextgen,mjafin/bcbio-nextgen,lbeltrame/bcbio-nextgen,biocyberman/bcbio-nextgen,Cyberbio-Lab/bcbio-nextgen,chapmanb/bcbio-nextgen,gifford-lab/bcbio-nextgen,lpantano/bcbio-nextgen,lpantano/bcbio-nextgen,elkingtonmcb/bcbio-nextgen,gifford-lab/bcbio-nextgen,fw1121/bcbio-nextgen,vladsaveliev/bcbio-nextgen,guillermo-carrasco/bcbio-nextgen,a113n/bcbio-nextgen,Cyberbio-Lab/bcbio-nextgen,hjanime/bcbio-nextgen,SciLifeLab/bcbio-nextgen,brainstorm/bcbio-nextgen,biocyberman/bcbio-nextgen,hjanime/bcbio-nextgen,a113n/bcbio-nextgen,guillermo-carrasco/bcbio-nextgen
|
c956fbbbc6e4dbd713728c1feda6bce2956a0894
|
runtime/Python3/src/antlr4/__init__.py
|
runtime/Python3/src/antlr4/__init__.py
|
from antlr4.Token import Token
from antlr4.InputStream import InputStream
from antlr4.FileStream import FileStream
from antlr4.BufferedTokenStream import TokenStream
from antlr4.CommonTokenStream import CommonTokenStream
from antlr4.Lexer import Lexer
from antlr4.Parser import Parser
from antlr4.dfa.DFA import DFA
from antlr4.atn.ATN import ATN
from antlr4.atn.ATNDeserializer import ATNDeserializer
from antlr4.atn.LexerATNSimulator import LexerATNSimulator
from antlr4.atn.ParserATNSimulator import ParserATNSimulator
from antlr4.atn.PredictionMode import PredictionMode
from antlr4.PredictionContext import PredictionContextCache
from antlr4.ParserRuleContext import RuleContext, ParserRuleContext
from antlr4.tree.Tree import ParseTreeListener, ParseTreeVisitor, ParseTreeWalker, TerminalNode, ErrorNode, RuleNode
from antlr4.error.Errors import RecognitionException, IllegalStateException, NoViableAltException
from antlr4.error.ErrorStrategy import BailErrorStrategy
from antlr4.error.DiagnosticErrorListener import DiagnosticErrorListener
from antlr4.Utils import str_list
|
from antlr4.Token import Token
from antlr4.InputStream import InputStream
from antlr4.FileStream import FileStream
from antlr4.StdinStream import StdinStream
from antlr4.BufferedTokenStream import TokenStream
from antlr4.CommonTokenStream import CommonTokenStream
from antlr4.Lexer import Lexer
from antlr4.Parser import Parser
from antlr4.dfa.DFA import DFA
from antlr4.atn.ATN import ATN
from antlr4.atn.ATNDeserializer import ATNDeserializer
from antlr4.atn.LexerATNSimulator import LexerATNSimulator
from antlr4.atn.ParserATNSimulator import ParserATNSimulator
from antlr4.atn.PredictionMode import PredictionMode
from antlr4.PredictionContext import PredictionContextCache
from antlr4.ParserRuleContext import RuleContext, ParserRuleContext
from antlr4.tree.Tree import ParseTreeListener, ParseTreeVisitor, ParseTreeWalker, TerminalNode, ErrorNode, RuleNode
from antlr4.error.Errors import RecognitionException, IllegalStateException, NoViableAltException
from antlr4.error.ErrorStrategy import BailErrorStrategy
from antlr4.error.DiagnosticErrorListener import DiagnosticErrorListener
from antlr4.Utils import str_list
|
Allow importing StdinStream from antlr4 package
|
Allow importing StdinStream from antlr4 package
|
Python
|
bsd-3-clause
|
parrt/antlr4,ericvergnaud/antlr4,antlr/antlr4,antlr/antlr4,ericvergnaud/antlr4,parrt/antlr4,ericvergnaud/antlr4,parrt/antlr4,antlr/antlr4,parrt/antlr4,parrt/antlr4,antlr/antlr4,antlr/antlr4,antlr/antlr4,ericvergnaud/antlr4,ericvergnaud/antlr4,ericvergnaud/antlr4,parrt/antlr4,antlr/antlr4,antlr/antlr4,ericvergnaud/antlr4,ericvergnaud/antlr4,antlr/antlr4,ericvergnaud/antlr4,antlr/antlr4,parrt/antlr4,parrt/antlr4,ericvergnaud/antlr4,parrt/antlr4,parrt/antlr4
|
14c22be85b9c9b3d13cad1130bb8d8d83d69d68a
|
selenium_testcase/testcases/content.py
|
selenium_testcase/testcases/content.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from .utils import dom_contains, wait_for
class ContentTestMixin:
def should_see_immediately(self, text):
""" Assert that DOM contains the given text. """
self.assertTrue(dom_contains(self.browser, text))
@wait_for
def should_see(self, text):
""" Wait for text to appear before testing assertion. """
return self.should_see_immediately(text)
def should_not_see(self, text):
""" Wait for text to not appear before testing assertion. """
self.assertRaises(AssertionError, self.should_see, text)
@wait_for
def has_title(self, title):
""" Assert that page title matches. """
self.assertEqual(self.browser.title, title)
def has_not_title(self, title):
""" Assert when page title does not match. """
self.assertRaises(AssertionError, self.has_title, title)
@wait_for
def title_contains(self, text):
""" Assert that page title contains text. """
self.assertIn(text, self.browser.title)
def title_does_not_contain(self, text):
""" Assert that page title does not contain text. """
self.assertRaises(AssertionError, self.title_contains, text)
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from .utils import wait_for
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.common.by import By
class ContentTestMixin:
content_search_list = (
(By.XPATH,
'//*[contains(normalize-space(.), "{}") '
'and not(./*[contains(normalize-space(.), "{}")])]',),
)
def should_see_immediately(self, text, **kwargs):
""" Assert that DOM contains the given text. """
self.find_element(
self.content_search_list, text, text, **kwargs)
@wait_for
def should_see(self, text):
""" Wait for text to appear before testing assertion. """
return self.should_see_immediately(text)
def should_not_see(self, text):
""" Wait for text to not appear before testing assertion. """
self.assertRaises(NoSuchElementException, self.should_see, text)
@wait_for
def has_title(self, title):
""" Assert that page title matches. """
self.assertEqual(self.browser.title, title)
def has_not_title(self, title):
""" Assert when page title does not match. """
self.assertRaises(AssertionError, self.has_title, title)
@wait_for
def title_contains(self, text):
""" Assert that page title contains text. """
self.assertIn(text, self.browser.title)
def title_does_not_contain(self, text):
""" Assert that page title does not contain text. """
self.assertRaises(AssertionError, self.title_contains, text)
|
Update should_see_immediately to use local find_element method.
|
Update should_see_immediately to use local find_element method.
This commit adds a content_search_list and replaces dom_contains
with our local version of find_element. It adds an attribute
called content_search_list that can be overridden by the derived
TestCase class as necessary for corner cases.
|
Python
|
bsd-3-clause
|
nimbis/django-selenium-testcase,nimbis/django-selenium-testcase
|
7947d474da8bb086493890d81a6788d76e00b108
|
numba/cuda/tests/__init__.py
|
numba/cuda/tests/__init__.py
|
from numba.testing import SerialSuite
from numba.testing import load_testsuite
from numba import cuda
from os.path import dirname, join
def load_tests(loader, tests, pattern):
suite = SerialSuite()
this_dir = dirname(__file__)
suite.addTests(load_testsuite(loader, join(this_dir, 'nocuda')))
suite.addTests(load_testsuite(loader, join(this_dir, 'cudasim')))
if cuda.is_available():
gpus = cuda.list_devices()
if gpus and gpus[0].compute_capability >= (2, 0):
suite.addTests(load_testsuite(loader, join(this_dir, 'cudadrv')))
suite.addTests(load_testsuite(loader, join(this_dir, 'cudapy')))
else:
print("skipped CUDA tests because GPU CC < 2.0")
else:
print("skipped CUDA tests")
return suite
|
from numba.testing import SerialSuite
from numba.testing import load_testsuite
from numba import cuda
from os.path import dirname, join
def load_tests(loader, tests, pattern):
suite = SerialSuite()
this_dir = dirname(__file__)
suite.addTests(load_testsuite(loader, join(this_dir, 'nocuda')))
if cuda.is_available():
suite.addTests(load_testsuite(loader, join(this_dir, 'cudasim')))
gpus = cuda.list_devices()
if gpus and gpus[0].compute_capability >= (2, 0):
suite.addTests(load_testsuite(loader, join(this_dir, 'cudadrv')))
suite.addTests(load_testsuite(loader, join(this_dir, 'cudapy')))
else:
print("skipped CUDA tests because GPU CC < 2.0")
else:
print("skipped CUDA tests")
return suite
|
Fix tests on machine without CUDA
|
Fix tests on machine without CUDA
|
Python
|
bsd-2-clause
|
sklam/numba,numba/numba,seibert/numba,IntelLabs/numba,jriehl/numba,stonebig/numba,gmarkall/numba,cpcloud/numba,IntelLabs/numba,gmarkall/numba,jriehl/numba,cpcloud/numba,sklam/numba,cpcloud/numba,numba/numba,stonebig/numba,stefanseefeld/numba,sklam/numba,cpcloud/numba,seibert/numba,sklam/numba,gmarkall/numba,stefanseefeld/numba,jriehl/numba,numba/numba,cpcloud/numba,stefanseefeld/numba,IntelLabs/numba,numba/numba,IntelLabs/numba,stuartarchibald/numba,jriehl/numba,sklam/numba,IntelLabs/numba,numba/numba,stonebig/numba,stuartarchibald/numba,stonebig/numba,jriehl/numba,gmarkall/numba,stefanseefeld/numba,stuartarchibald/numba,stuartarchibald/numba,stonebig/numba,stefanseefeld/numba,seibert/numba,seibert/numba,gmarkall/numba,stuartarchibald/numba,seibert/numba
|
910d1288adddd0c8dd500c1be5e488502c1ed335
|
localflavor/nl/forms.py
|
localflavor/nl/forms.py
|
# -*- coding: utf-8 -*-
"""NL-specific Form helpers."""
from __future__ import unicode_literals
from django import forms
from django.utils import six
from .nl_provinces import PROVINCE_CHOICES
from .validators import NLBSNFieldValidator, NLZipCodeFieldValidator
class NLZipCodeField(forms.CharField):
"""A Dutch zip code field."""
default_validators = [NLZipCodeFieldValidator()]
def clean(self, value):
if isinstance(value, six.string_types):
value = value.upper().replace(' ', '')
if len(value) == 6:
value = '%s %s' % (value[:4], value[4:])
return super(NLZipCodeField, self).clean(value)
class NLProvinceSelect(forms.Select):
"""A Select widget that uses a list of provinces of the Netherlands as it's choices."""
def __init__(self, attrs=None):
super(NLProvinceSelect, self).__init__(attrs, choices=PROVINCE_CHOICES)
class NLBSNFormField(forms.CharField):
"""
A Dutch social security number (BSN) field.
http://nl.wikipedia.org/wiki/Sofinummer
.. versionadded:: 1.6
"""
default_validators = [NLBSNFieldValidator()]
def __init__(self, *args, **kwargs):
kwargs['max_length'] = 9
super(NLBSNFormField, self).__init__(*args, **kwargs)
|
# -*- coding: utf-8 -*-
"""NL-specific Form helpers."""
from __future__ import unicode_literals
from django import forms
from django.utils import six
from .nl_provinces import PROVINCE_CHOICES
from .validators import NLBSNFieldValidator, NLZipCodeFieldValidator
class NLZipCodeField(forms.CharField):
"""A Dutch zip code field."""
default_validators = [NLZipCodeFieldValidator()]
def clean(self, value):
if isinstance(value, six.string_types):
value = value.upper().replace(' ', '')
if len(value) == 6:
value = '%s %s' % (value[:4], value[4:])
return super(NLZipCodeField, self).clean(value)
class NLProvinceSelect(forms.Select):
"""A Select widget that uses a list of provinces of the Netherlands as it's choices."""
def __init__(self, attrs=None):
super(NLProvinceSelect, self).__init__(attrs, choices=PROVINCE_CHOICES)
class NLBSNFormField(forms.CharField):
"""
A Dutch social security number (BSN) field.
https://nl.wikipedia.org/wiki/Burgerservicenummer
Note that you may only process the BSN if you have a legal basis to do so!
.. versionadded:: 1.6
"""
default_validators = [NLBSNFieldValidator()]
def __init__(self, *args, **kwargs):
kwargs['max_length'] = 9
super(NLBSNFormField, self).__init__(*args, **kwargs)
|
Fix the wikipedia link and include a warning
|
Fix the wikipedia link and include a warning
|
Python
|
bsd-3-clause
|
django/django-localflavor,rsalmaso/django-localflavor
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.