commit
stringlengths 40
40
| old_file
stringlengths 4
236
| new_file
stringlengths 4
236
| old_contents
stringlengths 1
3.26k
| new_contents
stringlengths 16
4.43k
| subject
stringlengths 16
624
| message
stringlengths 17
3.29k
| lang
stringclasses 5
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
|
|---|---|---|---|---|---|---|---|---|---|
96fac3babb22386fd94eccc86abb5bd15c917c53
|
rpyc/core/__init__.py
|
rpyc/core/__init__.py
|
from rpyc.core.stream import SocketStream, PipeStream
from rpyc.core.channel import Channel
from rpyc.core.protocol import Connection
from rpyc.core.netref import BaseNetref
from rpyc.core.async import AsyncResult, AsyncResultTimeout
from rpyc.core.service import Service, VoidService, SlaveService
from rpyc.core.vinegar import GenericException, install_rpyc_excepthook
# for .NET
import platform
if platform.system() == "cli":
import clr
# Add Reference to IronPython zlib (required for channel compression)
# grab it from http://bitbucket.org/jdhardy/ironpythonzlib
clr.AddReference("IronPython.Zlib")
install_rpyc_excepthook()
|
from rpyc.core.stream import SocketStream, PipeStream
from rpyc.core.channel import Channel
from rpyc.core.protocol import Connection
from rpyc.core.netref import BaseNetref
from rpyc.core.async import AsyncResult, AsyncResultTimeout
from rpyc.core.service import Service, VoidService, SlaveService
from rpyc.core.vinegar import GenericException, install_rpyc_excepthook
install_rpyc_excepthook()
|
Remove Code specific for IronPython - useless with 2.7
|
Remove Code specific for IronPython - useless with 2.7
|
Python
|
mit
|
glpatcern/rpyc,sponce/rpyc,pyq881120/rpyc,pombredanne/rpyc,geromueller/rpyc,kwlzn/rpyc,siemens/rpyc,eplaut/rpyc,gleon99/rpyc
|
82cb6d190ce1e805914cc791518c97e063ecdc96
|
tests/test_individual.py
|
tests/test_individual.py
|
import sys, os
myPath = os.path.dirname(os.path.abspath(__file__))
print(myPath)
sys.path.insert(0, myPath + '/../SATSolver')
from unittest import TestCase
from individual import Individual
from BitVector import BitVector
from bitarray import bitarray
class TestIndividual(TestCase):
"""
Testing class for Individual.
"""
def test_get(self):
ind = Individual(9)
ind.data = bitarray("011010100")
self.assertEqual(ind.get(5), 1)
self.assertEqual(ind.get(1), 0)
self.assertEqual(ind.get(10), None)
def test_set(self):
ind = Individual(9)
ind.data = bitarray("011010100")
ind.set(2, 1)
self.assertEqual(ind.get(2), 1)
ind.set(7, 0)
self.assertEqual(ind.get(7), 0)
ind.set(6, 1)
self.assertEqual(ind.get(6), 1)
def test_flip(self):
ind = Individual(9)
ind.data = bitarray("011010100")
ind.flip(1)
self.assertEqual(ind.get(1), 1)
ind.flip(8)
self.assertEqual(ind.get(8), 1)
ind.flip(4)
self.assertEqual(ind.get(4), 1)
|
import sys, os
myPath = os.path.dirname(os.path.abspath(__file__))
print(myPath)
sys.path.insert(0, myPath + '/../SATSolver')
from unittest import TestCase
from individual import Individual
from bitarray import bitarray
class TestIndividual(TestCase):
"""
Testing class for Individual.
"""
def test_get(self):
ind = Individual(9)
ind.data = bitarray("011010100")
self.assertEqual(ind.get(5), 1)
self.assertEqual(ind.get(1), 0)
self.assertEqual(ind.get(10), None)
def test_set(self):
ind = Individual(9)
ind.data = bitarray("011010100")
ind.set(2, 1)
self.assertEqual(ind.get(2), 1)
ind.set(7, 0)
self.assertEqual(ind.get(7), 0)
ind.set(6, 1)
self.assertEqual(ind.get(6), 1)
def test_flip(self):
ind = Individual(9)
ind.data = bitarray("011010100")
ind.flip(1)
self.assertEqual(ind.get(1), 1)
ind.flip(8)
self.assertEqual(ind.get(8), 1)
ind.flip(4)
self.assertEqual(ind.get(4), 1)
|
Remove BitVector import - Build fails
|
Remove BitVector import - Build fails
|
Python
|
mit
|
Imperium-Software/resolver,Imperium-Software/resolver,Imperium-Software/resolver,Imperium-Software/resolver
|
b2764b9ada2ca3bec548ceb82e71697f7515f14f
|
citrination_client/__init__.py
|
citrination_client/__init__.py
|
import os
import re
from citrination_client.base import *
from citrination_client.search import *
from citrination_client.data import *
from citrination_client.models import *
from citrination_client.views.descriptors import *
from .client import CitrinationClient
from pkg_resources import get_distribution, DistributionNotFound
def __get_version():
"""
Returns the version of this package, whether running from source or install
:return: The version of this package
"""
try:
# Try local first, if missing setup.py, then use pkg info
here = os.path.abspath(os.path.dirname(__file__))
print("here:"+here)
with open(os.path.join(here, "../setup.py")) as fp:
version_file = fp.read()
version_match = re.search(r"version=['\"]([^'\"]*)['\"]",
version_file, re.M)
if version_match:
return version_match.group(1)
except IOError:
pass
try:
_dist = get_distribution('citrination_client')
# Normalize case for Windows systems
dist_loc = os.path.normcase(_dist.location)
here = os.path.normcase(__file__)
if not here.startswith(os.path.join(dist_loc, 'citrination_client')):
# not installed, but there is another version that *is*
raise DistributionNotFound
except DistributionNotFound:
raise RuntimeError("Unable to find version string.")
else:
return _dist.version
__version__ = __get_version()
|
import os
import re
from citrination_client.base import *
from citrination_client.search import *
from citrination_client.data import *
from citrination_client.models import *
from citrination_client.views.descriptors import *
from .client import CitrinationClient
from pkg_resources import get_distribution, DistributionNotFound
def __get_version():
"""
Returns the version of this package, whether running from source or install
:return: The version of this package
"""
try:
# Try local first, if missing setup.py, then use pkg info
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, "../setup.py")) as fp:
version_file = fp.read()
version_match = re.search(r"version=['\"]([^'\"]*)['\"]",
version_file, re.M)
if version_match:
return version_match.group(1)
except IOError:
pass
try:
_dist = get_distribution('citrination_client')
# Normalize case for Windows systems
dist_loc = os.path.normcase(_dist.location)
here = os.path.normcase(__file__)
if not here.startswith(os.path.join(dist_loc, 'citrination_client')):
# not installed, but there is another version that *is*
raise DistributionNotFound
except DistributionNotFound:
raise RuntimeError("Unable to find version string.")
else:
return _dist.version
__version__ = __get_version()
|
Remove debug print on getVersion
|
Remove debug print on getVersion
|
Python
|
apache-2.0
|
CitrineInformatics/python-citrination-client
|
2f140327c24a8efab5482a975793dddedd0ebfc4
|
nucleus/wsgi.py
|
nucleus/wsgi.py
|
# flake8: noqa
"""
WSGI config for nucleus project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
# newrelic.agent must be imported and initialized first
# https://docs.newrelic.com/docs/agents/python-agent/installation/python-agent-advanced-integration#manual-integration
import newrelic.agent
newrelic.agent.initialize('newrelic.ini')
import os
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'nucleus.settings') # NOQA
from django.core.handlers.wsgi import WSGIRequest
from django.core.wsgi import get_wsgi_application
from decouple import config
IS_HTTPS = config('HTTPS', default='off', cast=bool)
class WSGIHTTPSRequest(WSGIRequest):
def _get_scheme(self):
if IS_HTTPS:
return 'https'
return super(WSGIHTTPSRequest, self)._get_scheme()
application = get_wsgi_application()
application.request_class = WSGIHTTPSRequest
if config('SENTRY_DSN', None):
from raven.contrib.django.raven_compat.middleware.wsgi import Sentry
application = Sentry(application)
newrelic_license_key = config('NEW_RELIC_LICENSE_KEY', default=None)
if newrelic_license_key:
application = newrelic.agent.WSGIApplicationWrapper(application)
|
# flake8: noqa
# newrelic.agent must be imported and initialized first
# https://docs.newrelic.com/docs/agents/python-agent/installation/python-agent-advanced-integration#manual-integration
import newrelic.agent
newrelic.agent.initialize('newrelic.ini')
import os
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'nucleus.settings') # NOQA
from django.core.handlers.wsgi import WSGIRequest
from django.core.wsgi import get_wsgi_application
from decouple import config
IS_HTTPS = config('HTTPS', default='off', cast=bool)
class WSGIHTTPSRequest(WSGIRequest):
def _get_scheme(self):
if IS_HTTPS:
return 'https'
return super(WSGIHTTPSRequest, self)._get_scheme()
application = get_wsgi_application()
application.request_class = WSGIHTTPSRequest
if config('SENTRY_DSN', None):
from raven.contrib.django.raven_compat.middleware.wsgi import Sentry
application = Sentry(application)
newrelic_license_key = config('NEW_RELIC_LICENSE_KEY', default=None)
if newrelic_license_key:
application = newrelic.agent.WSGIApplicationWrapper(application)
|
Remove old docstring with link to old django docs
|
Remove old docstring with link to old django docs
|
Python
|
mpl-2.0
|
mozilla/nucleus,mozilla/nucleus,mozilla/nucleus,mozilla/nucleus
|
18bf9dd5e1e054d0c260959a8379f331940e167f
|
online_status/__init__.py
|
online_status/__init__.py
|
VERSION = (0, 1, 0)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
if VERSION[3:] == ('alpha', 0):
version = '%s pre-alpha' % version
else:
if VERSION[3] != 'final':
version = "%s %s" % (version, VERSION[3])
if VERSION[4] != 0:
version = '%s %s' % (version, VERSION[4])
return version
|
VERSION = (0, 1, 0)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
return version
|
Fix 'index out of bound' issue
|
Fix 'index out of bound' issue
|
Python
|
unlicense
|
hovel/django-online-status,hovel/django-online-status
|
6d8f79d32194f5b956785a021ee6fad6093a03f1
|
insanity/layers.py
|
insanity/layers.py
|
import numpy as np
import theano
import theano.tensor as T
from theano.tensor.nnet import conv
from theano.tensor.nnet import softmax
from theano.tensor import shared_randomstreams
from theano.tensor.signal import downsample
class Layer(object):
def __init__(self, numInputs, numNeurons, activation):
self.numInputs = numInputs
self.numNeurons = numNeurons
self.activation = activation
#Initialize weights.
self.weights = theano.shared(
np.asarray(
np.random.normal(
loc=0.0, scale=np.sqrt(1.0/self.numNeurons), size=(self.numInputs, self.numNeurons)),
dtype=theano.config.floatX),
name='weights', borrow=True)
#Initialize biases.
self.biases = theano.shared(
np.asarray(
np.random.normal(
loc=0.0, scale=1.0, size=(self.numNeurons,)),
dtype=theano.config.floatX),
name='biases', borrow=True)
@property
def input(value):
#Configure the layer output.
self.output = something
class FullyConnectedLayer(Layer):
|
import numpy as np
import theano
import theano.tensor as T
from theano.tensor.nnet import conv
from theano.tensor.nnet import softmax
from theano.tensor import shared_randomstreams
from theano.tensor.signal import downsample
class Layer(object):
def __init__(self, numInputs, numNeurons, activation, miniBatchSize):
self.numInputs = numInputs
self.numNeurons = numNeurons
self.activation = activation
self.miniBatchSize = miniBatchSize
#Initialize weights.
self.weights = theano.shared(
np.asarray(
np.random.normal(
loc=0.0, scale=np.sqrt(1.0/self.numNeurons), size=(self.numInputs, self.numNeurons)),
dtype=theano.config.floatX),
name='weights', borrow=True)
#Initialize biases.
self.biases = theano.shared(
np.asarray(
np.random.normal(
loc=0.0, scale=1.0, size=(self.numNeurons,)),
dtype=theano.config.floatX),
name='biases', borrow=True)
@input.setter
def input(self, value):
self.input = value
#Configure the layer output.
self.output = something
class FullyConnectedLayer(Layer):
@Layer.input.setter
def input(self, value):
self.input = value
#Configure the layer output.
self.output = something
|
Add proper use of property setter.
|
Add proper use of property setter.
|
Python
|
cc0-1.0
|
cn04/insanity
|
b064d8dbc4be13c12c1c87491ebcb484ab71ac52
|
geopy/__init__.py
|
geopy/__init__.py
|
"""
geopy is a Python 2 and 3 client for several popular geocoding web services.
geopy makes it easy for Python developers to locate the coordinates of
addresses, cities, countries, and landmarks across the globe using third-party
geocoders and other data sources.
geopy is tested against CPython (versions 2.7, 3.4, 3.5, 3.6), PyPy, and
PyPy3. geopy does not and will not support CPython 2.6.
"""
from geopy.point import Point
from geopy.location import Location
from geopy.geocoders import * # pylint: disable=W0401
from geopy.util import __version__
|
"""
geopy is a Python 2 and 3 client for several popular geocoding web services.
geopy makes it easy for Python developers to locate the coordinates of
addresses, cities, countries, and landmarks across the globe using third-party
geocoders and other data sources.
geopy is tested against CPython (versions 2.7, 3.4, 3.5, 3.6), PyPy, and
PyPy3. geopy does not and will not support CPython 2.6.
"""
from geopy.location import Location
from geopy.point import Point
from geopy.util import __version__
from geopy.geocoders import * # noqa
# geopy.geocoders.options must not be importable as `geopy.options`,
# because that is ambiguous (which options are that).
del options # noqa
|
Fix geocoder.options being also exported as `geopy.options`
|
Fix geocoder.options being also exported as `geopy.options`
|
Python
|
mit
|
geopy/geopy,jmb/geopy
|
1a3fb78b32fbb95e3efc0f06ef62690834e820e3
|
libraries/vytree/__init__.py
|
libraries/vytree/__init__.py
|
# vytree.__init__: package init file.
#
# Copyright (C) 2014 VyOS Development Group <maintainers@vyos.net>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
# USA
from vytree.node import (
Node,
ChildNotFoundError,
ChildAlreadyExistsError,
)
from vytree.config_node import ConfigNode
from vytree.reference_node import ReferenceNode
from vytree.reference_tree_loader import ReferenceTreeLoader
|
# vytree.__init__: package init file.
#
# Copyright (C) 2014 VyOS Development Group <maintainers@vyos.net>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
# USA
from vytree.node import (
Node,
ChildNotFoundError,
ChildAlreadyExistsError,
)
from vytree.config_node import ConfigNode
|
Remove referencetree-related imports from the top level vytree package.
|
Remove referencetree-related imports from the top level vytree package.
|
Python
|
lgpl-2.1
|
vyos-legacy/vyconfd,vyos-legacy/vyconfd
|
75fd4aadedb4bcdcfe41f9ae61bf62282ffdadea
|
test/__init__.py
|
test/__init__.py
|
import glob, os.path, sys
version = sys.version.split(" ")[0]
majorminor = version[0:3]
# Add path to hiredis.so load path
path = glob.glob("build/lib*-%s/hiredis/*.so" % majorminor)[0]
sys.path.insert(0, os.path.dirname(path))
from unittest import *
from . import reader
def tests():
suite = TestSuite()
suite.addTest(makeSuite(reader.ReaderTest))
return suite
|
import glob, os.path, sys
version = sys.version.split(" ")[0]
majorminor = version[0:3]
# Add path to hiredis.so load path
path = glob.glob("build/lib*-%s/hiredis" % majorminor)[0]
sys.path.insert(0, path)
from unittest import *
from . import reader
def tests():
suite = TestSuite()
suite.addTest(makeSuite(reader.ReaderTest))
return suite
|
Fix build path detection on SunOS
|
Fix build path detection on SunOS
Inside the hiredis directory there is another directory that contains the
shared object. This is specific to the platform so we shouldn't care where the
shared object itself is placed.
|
Python
|
bsd-3-clause
|
redis/hiredis-py,charsyam/hiredis-py,badboy/hiredis-py-win,badboy/hiredis-py-win,badboy/hiredis-py-win,redis/hiredis-py,charsyam/hiredis-py
|
a6e6e6bf18c48638d4c6c7d97f894edd3fc3c1ad
|
ipython_config.py
|
ipython_config.py
|
c.InteractiveShellApp.exec_lines = []
# ipython-autoimport - Automatically import modules
c.InteractiveShellApp.exec_lines.append(
"try:\n %load_ext ipython_autoimport\nexcept ImportError: pass")
# Automatically reload modules
c.InteractiveShellApp.exec_lines.append('%load_ext autoreload')
c.InteractiveShellApp.exec_lines.append('%autoreload 2')
|
c.InteractiveShellApp.exec_lines = []
# ipython-autoimport - Automatically import modules
c.InteractiveShellApp.exec_lines.append(
"try:\n %load_ext ipython_autoimport\nexcept ImportError: pass")
# Automatically reload modules
c.InteractiveShellApp.exec_lines.append('%load_ext autoreload')
c.InteractiveShellApp.exec_lines.append('%autoreload 2')
c.TerminalInteractiveShell.editor = 'gvim'
|
Set default shell editor for ipython to gvim
|
Set default shell editor for ipython to gvim
|
Python
|
mit
|
brycepg/dotfiles,brycepg/dotfiles
|
bf694ffdf1fd61f6e108f3076ed975d538af5224
|
wlauto/common/android/resources.py
|
wlauto/common/android/resources.py
|
# Copyright 2014-2015 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from wlauto.common.resources import FileResource
class ReventFile(FileResource):
name = 'revent'
def __init__(self, owner, stage):
super(ReventFile, self).__init__(owner)
self.stage = stage
class JarFile(FileResource):
name = 'jar'
class ApkFile(FileResource):
name = 'apk'
def __init__(self, owner, platform=None, uiauto=False):
super(ApkFile, self).__init__(owner)
self.platform = platform
self.uiauto = uiauto
def __str__(self):
apk_type = 'uiautomator ' if self.uiauto else ''
return '<{}\'s {} {}APK>'.format(self.owner, self.platform, apk_type)
class uiautoApkFile(FileResource):
name = 'uiautoapk'
def __init__(self, owner, platform=None):
super(uiautoApkFile, self).__init__(owner)
self.platform = platform
def __str__(self):
return '<{}\'s {} UiAuto APK>'.format(self.owner, self.platform)
|
# Copyright 2014-2015 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from wlauto.common.resources import FileResource
class ReventFile(FileResource):
name = 'revent'
def __init__(self, owner, stage):
super(ReventFile, self).__init__(owner)
self.stage = stage
class JarFile(FileResource):
name = 'jar'
class ApkFile(FileResource):
name = 'apk'
def __init__(self, owner, platform=None, uiauto=False):
super(ApkFile, self).__init__(owner)
self.platform = platform
self.uiauto = uiauto
def __str__(self):
apk_type = 'uiautomator ' if self.uiauto else ''
return '<{}\'s {} {}APK>'.format(self.owner, self.platform, apk_type)
|
Revert "AndroidResource: Add a UiautoApk resource type."
|
Revert "AndroidResource: Add a UiautoApk resource type."
This reverts commit bc6af25366aacf394f96b5a93008109904a89e93.
|
Python
|
apache-2.0
|
bjackman/workload-automation,bjackman/workload-automation,jimboatarm/workload-automation,jimboatarm/workload-automation,jimboatarm/workload-automation,jimboatarm/workload-automation,bjackman/workload-automation,bjackman/workload-automation,jimboatarm/workload-automation,bjackman/workload-automation,jimboatarm/workload-automation,bjackman/workload-automation
|
78ca9c6b8393b1b4f4bddf41febc87696796d28a
|
openpassword/openssl_utils.py
|
openpassword/openssl_utils.py
|
from Crypto.Hash import MD5
def derive_openssl_key(key, salt, hash=MD5):
key = key[0:-16]
openssl_key = bytes()
prev = bytes()
while len(openssl_key) < 32:
prev = hash.new(prev + key + salt).digest()
openssl_key += prev
return openssl_key
|
from Crypto.Hash import MD5
def derive_openssl_key(key, salt, hashing_function=MD5):
key = key[0:-16]
openssl_key = bytes()
prev = bytes()
while len(openssl_key) < 32:
prev = hashing_function.new(prev + key + salt).digest()
openssl_key += prev
return openssl_key
|
Rename hash variable to prevent colision with native method
|
Rename hash variable to prevent colision with native method
|
Python
|
mit
|
openpassword/blimey,openpassword/blimey
|
2c6dd79d419699e61970719dbb369aefe359ea6e
|
tests/test_db.py
|
tests/test_db.py
|
from pypinfo import db
CREDS_FILE = '/path/to/creds_file.json'
def test_get_credentials(tmp_path):
# Arrange
db.DB_FILE = str(tmp_path / 'db.json') # Mock
# Assert
assert db.get_credentials() is None
def test_set_credentials(tmp_path):
# Arrange
db.DB_FILE = str(tmp_path / 'db.json') # Mock
# Act
db.set_credentials(CREDS_FILE)
def test_set_credentials_twice(tmp_path):
# Arrange
db.DB_FILE = str(tmp_path / 'db.json') # Mock
# Act
db.set_credentials(CREDS_FILE)
db.set_credentials(CREDS_FILE)
def test_round_trip(tmp_path):
# Arrange
db.DB_FILE = str(tmp_path / 'db.json') # Mock
# Act
db.set_credentials(CREDS_FILE)
# Assert
assert db.get_credentials() == CREDS_FILE
def test_get_credentials_table(tmp_path):
db.DB_FILE = str(tmp_path / 'db.json')
with db.get_credentials_table() as table:
assert not table._storage._storage._handle.closed
with db.get_credentials_table(table) as table2:
assert table2 is table
assert not table._storage._storage._handle.closed
assert table._storage._storage._handle.closed
|
from pypinfo import db
CREDS_FILE = '/path/to/creds_file.json'
def test_get_credentials(tmp_path):
# Arrange
db.DB_FILE = str(tmp_path / 'db.json') # Mock
# Assert
assert db.get_credentials() is None
def test_set_credentials(tmp_path):
# Arrange
db.DB_FILE = str(tmp_path / 'db.json') # Mock
# Act
db.set_credentials(CREDS_FILE)
def test_set_credentials_twice(tmp_path):
# Arrange
db.DB_FILE = str(tmp_path / 'db.json') # Mock
# Act
db.set_credentials(CREDS_FILE)
db.set_credentials(CREDS_FILE)
def test_round_trip(tmp_path):
# Arrange
db.DB_FILE = str(tmp_path / 'db.json') # Mock
# Act
db.set_credentials(CREDS_FILE)
# Assert
assert db.get_credentials() == CREDS_FILE
def test_get_credentials_table(tmp_path):
db.DB_FILE = str(tmp_path / 'db.json')
with db.get_credentials_table() as table:
assert not table._storage._handle.closed
with db.get_credentials_table(table) as table2:
assert table2 is table
assert not table._storage._handle.closed
assert table._storage._handle.closed
|
Fix tests for updated TinyDB/Tinyrecord
|
Fix tests for updated TinyDB/Tinyrecord
|
Python
|
mit
|
ofek/pypinfo
|
3f909cdfba61719dfa0a860aeba1e418fe740f33
|
indra/__init__.py
|
indra/__init__.py
|
from __future__ import print_function, unicode_literals
import logging
import os
import sys
__version__ = '1.10.0'
__all__ = ['assemblers', 'belief', 'databases', 'explanation', 'literature',
'mechlinker', 'preassembler', 'sources', 'tools', 'util']
logging.basicConfig(format='%(levelname)s: [%(asctime)s] indra/%(name)s - %(message)s',
level=logging.INFO, datefmt='%Y-%m-%d %H:%M:%S')
# Suppress INFO-level logging from some dependencies
logging.getLogger('requests').setLevel(logging.ERROR)
logging.getLogger('urllib3').setLevel(logging.ERROR)
logging.getLogger('rdflib').setLevel(logging.ERROR)
logging.getLogger('boto3').setLevel(logging.CRITICAL)
logging.getLogger('botocore').setLevel(logging.CRITICAL)
# This is specifically to suppress lib2to3 logging from networkx
import lib2to3.pgen2.driver
class Lib2to3LoggingModuleShim(object):
def getLogger(self):
return logging.getLogger('lib2to3')
lib2to3.pgen2.driver.logging = Lib2to3LoggingModuleShim()
logging.getLogger('lib2to3').setLevel(logging.ERROR)
logger = logging.getLogger('indra')
from .config import get_config, has_config
|
from __future__ import print_function, unicode_literals
import logging
import os
import sys
__version__ = '1.10.0'
__all__ = ['assemblers', 'belief', 'databases', 'explanation', 'literature',
'mechlinker', 'preassembler', 'sources', 'tools', 'util']
logging.basicConfig(format=('%(levelname)s: [%(asctime)s] %(name)s'
' - %(message)s'),
level=logging.INFO, datefmt='%Y-%m-%d %H:%M:%S')
# Suppress INFO-level logging from some dependencies
logging.getLogger('requests').setLevel(logging.ERROR)
logging.getLogger('urllib3').setLevel(logging.ERROR)
logging.getLogger('rdflib').setLevel(logging.ERROR)
logging.getLogger('boto3').setLevel(logging.CRITICAL)
logging.getLogger('botocore').setLevel(logging.CRITICAL)
# This is specifically to suppress lib2to3 logging from networkx
import lib2to3.pgen2.driver
class Lib2to3LoggingModuleShim(object):
def getLogger(self):
return logging.getLogger('lib2to3')
lib2to3.pgen2.driver.logging = Lib2to3LoggingModuleShim()
logging.getLogger('lib2to3').setLevel(logging.ERROR)
logger = logging.getLogger('indra')
from .config import get_config, has_config
|
Remove indra prefix from logger
|
Remove indra prefix from logger
|
Python
|
bsd-2-clause
|
bgyori/indra,bgyori/indra,johnbachman/indra,pvtodorov/indra,sorgerlab/indra,bgyori/indra,pvtodorov/indra,johnbachman/belpy,sorgerlab/belpy,sorgerlab/belpy,sorgerlab/belpy,sorgerlab/indra,johnbachman/indra,johnbachman/indra,pvtodorov/indra,sorgerlab/indra,johnbachman/belpy,johnbachman/belpy,pvtodorov/indra
|
a4dc298a487fcf6f1975346ab421cca705c025dc
|
storage/test_driver.py
|
storage/test_driver.py
|
#!/usr/bin/env python
from storage import Storage
NEW_REPORT = {'foo': 'bar', 'boo': 'baz'}
def main():
db_store = Storage.get_storage()
for key, value in db_store.__dict__.iteritems():
print '%s: %s' % (key, value)
print '\n'
# report_id = db_store.store(NEW_REPORT)
report_id = 'AVM0dGOF6iQbRONBw9yB'
print db_store.get_report(report_id)
print db_store.get_report(3)
# db_store.delete(report_id)
# print db_store.delete(2)
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
from storage import Storage
NEW_REPORT = {'foo': 'bar', 'boo': 'baz'}
REPORTS = [
{'report_id': 1, 'report': {"/tmp/example": {"MD5": "53f43f9591749b8cae536ff13e48d6de", "SHA256": "815d310bdbc8684c1163b62f583dbaffb2df74b9104e2aadabf8f8491bafab66", "libmagic": "ASCII text"}}},
{'report_id': 2, 'report': {"/opt/other_file": {"MD5": "96b47da202ddba8d7a6b91fecbf89a41", "SHA256": "26d11f0ea5cc77a59b6e47deee859440f26d2d14440beb712dbac8550d35ef1f", "libmagic": "a /bin/python script text executable"}}},
]
def populate_es():
db_store = Storage.get_storage()
for report in REPORTS:
db_store.store(report)
def main():
db_store = Storage.get_storage()
for key, value in db_store.__dict__.iteritems():
print '%s: %s' % (key, value)
print '\n'
# report_id = db_store.store(NEW_REPORT)
report_id = 'AVM0dGOF6iQbRONBw9yB'
print db_store.get_report(report_id)
print db_store.get_report(3)
# db_store.delete(report_id)
# print db_store.delete(2)
if __name__ == '__main__':
main()
|
Add populate es function to test driver
|
Add populate es function to test driver
|
Python
|
mpl-2.0
|
awest1339/multiscanner,mitre/multiscanner,awest1339/multiscanner,jmlong1027/multiscanner,awest1339/multiscanner,MITRECND/multiscanner,jmlong1027/multiscanner,mitre/multiscanner,mitre/multiscanner,jmlong1027/multiscanner,awest1339/multiscanner,MITRECND/multiscanner,jmlong1027/multiscanner
|
64c2a9ec42fba89225af07a3d0cf84dd9de98e4b
|
legislators/urls.py
|
legislators/urls.py
|
from . import views
from django.conf.urls import url
urlpatterns = [
url(r'^find_legislator/', views.find_legislator),
url(r'^get_latlon/', views.get_latlon)
]
|
from . import views
from django.conf.urls import url
urlpatterns = [
url(r'^find_legislator/', views.find_legislator),
url(r'^get_latlon/', views.get_latlon, name="get_latlon"),
url(r'^latest_latlon/', views.latest_latlon, name="latest_latlon")
]
|
Add url for latest latlon url
|
Add url for latest latlon url
|
Python
|
mit
|
jamesturk/tot,jamesturk/tot,jamesturk/tot,jamesturk/tot,jamesturk/tot
|
70b7090a438f7962f28acc23fa78cdb6f5d084a0
|
docs/sphinxext/configtraits.py
|
docs/sphinxext/configtraits.py
|
"""Directives and roles for documenting traitlets config options.
::
.. configtrait:: Application.log_datefmt
Description goes here.
Cross reference like this: :configtrait:`Application.log_datefmt`.
"""
from sphinx.locale import l_
from sphinx.util.docfields import Field
def setup(app):
app.add_object_type('configtrait', 'configtrait', objname='Config option')
metadata = {'parallel_read_safe': True, 'parallel_write_safe': True}
return metadata
|
"""Directives and roles for documenting traitlets config options.
::
.. configtrait:: Application.log_datefmt
Description goes here.
Cross reference like this: :configtrait:`Application.log_datefmt`.
"""
def setup(app):
app.add_object_type('configtrait', 'configtrait', objname='Config option')
metadata = {'parallel_read_safe': True, 'parallel_write_safe': True}
return metadata
|
Fix compatibility with the latest release of Sphinx
|
Fix compatibility with the latest release of Sphinx
`l_` from sphinx.locale has been deprecated for a long time.
`_` is the new name for the same function but it seems that the
imports there are useless.
https://github.com/sphinx-doc/sphinx/commit/8d653a406dc0dc6c2632176ab4757ca15474b10f
|
Python
|
bsd-3-clause
|
ipython/ipython,ipython/ipython
|
02c2551b2760fc225bb4851c560f1881c7d674a4
|
txircd/modules/extra/listmodules.py
|
txircd/modules/extra/listmodules.py
|
from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implements
class ModulesCommand(ModuleData):
implements(IPlugin, IModuleData)
name = "ModulesCommand"
def actions(self):
return [ ("statsruntype-modules", 1, self.listModules) ]
def listModules(self):
return sorted(self.ircd.loadedModules.keys())
modulesCommand = ModulesCommand()
|
from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implements
class ModulesCommand(ModuleData):
implements(IPlugin, IModuleData)
name = "ModulesCommand"
def actions(self):
return [ ("statsruntype-modules", 1, self.listModules) ]
def listModules(self):
modules = {}
for modName in sorted(self.ircd.loadedModules.keys()):
modules[modName] = "*"
return modules
modulesCommand = ModulesCommand()
|
Return the correct thing for modules
|
Return the correct thing for modules
|
Python
|
bsd-3-clause
|
Heufneutje/txircd
|
ea39c4ebba3d5ab42dfa202f88f7d76386e505fe
|
plugins/MeshView/MeshView.py
|
plugins/MeshView/MeshView.py
|
from Cura.View.View import View
class MeshView(View):
def __init__(self):
super(MeshView, self).__init__()
def render(self):
scene = self.getController().getScene()
renderer = self.getRenderer()
self._renderObject(scene.getRoot(), renderer)
def _renderObject(self, object, renderer):
if object.getMeshData():
renderer.renderMesh(object.getGlobalTransformation(), object.getMeshData())
for child in object.getChildren():
self._renderObject(child, renderer)
|
from Cura.View.View import View
class MeshView(View):
def __init__(self):
super(MeshView, self).__init__()
def render(self):
scene = self.getController().getScene()
renderer = self.getRenderer()
self._renderObject(scene.getRoot(), renderer)
def _renderObject(self, object, renderer):
if not object.render():
if object.getMeshData():
renderer.renderMesh(object.getGlobalTransformation(), object.getMeshData())
for child in object.getChildren():
self._renderObject(child, renderer)
|
Allow SceneObjects to render themselves
|
Allow SceneObjects to render themselves
|
Python
|
agpl-3.0
|
onitake/Uranium,onitake/Uranium
|
e733b0d5192437a95c4eafd1babc02385fb4fcf7
|
cms/sitemaps/cms_sitemap.py
|
cms/sitemaps/cms_sitemap.py
|
# -*- coding: utf-8 -*-
from django.contrib.sitemaps import Sitemap
from django.utils import translation
from cms.models import Title
def from_iterable(iterables):
"""
Backport of itertools.chain.from_iterable
"""
for it in iterables:
for element in it:
yield element
class CMSSitemap(Sitemap):
changefreq = "monthly"
priority = 0.5
def items(self):
all_titles = Title.objects.public().filter(page__login_required=False)
return all_titles
def lastmod(self, title):
modification_dates = [title.page.changed_date, title.page.publication_date]
plugins_for_placeholder = lambda placeholder: placeholder.get_plugins()
plugins = from_iterable(map(plugins_for_placeholder, title.page.placeholders.all()))
plugin_modification_dates = map(lambda plugin: plugin.changed_date, plugins)
modification_dates.extend(plugin_modification_dates)
return max(modification_dates)
def location(self, title):
translation.activate(title.language)
url = title.page.get_absolute_url(title.language)
translation.deactivate()
return url
|
# -*- coding: utf-8 -*-
from django.contrib.sitemaps import Sitemap
from django.db.models import Q
from django.utils import translation
from cms.models import Title
def from_iterable(iterables):
"""
Backport of itertools.chain.from_iterable
"""
for it in iterables:
for element in it:
yield element
class CMSSitemap(Sitemap):
changefreq = "monthly"
priority = 0.5
def items(self):
#
# It is counter-productive to provide entries for:
# > Pages which redirect:
# - If the page redirects to another page on this site, the
# destination page will already be in the sitemap, and
# - If the page redirects externally, then it shouldn't be
# part of our sitemap anyway.
# > Pages which cannot be accessed by anonymous users (like
# search engines are).
#
all_titles = Title.objects.public().filter(
Q(redirect='') | Q(redirect__isnull=True),
page__login_required=False
)
return all_titles
def lastmod(self, title):
modification_dates = [title.page.changed_date, title.page.publication_date]
plugins_for_placeholder = lambda placeholder: placeholder.get_plugins()
plugins = from_iterable(map(plugins_for_placeholder, title.page.placeholders.all()))
plugin_modification_dates = map(lambda plugin: plugin.changed_date, plugins)
modification_dates.extend(plugin_modification_dates)
return max(modification_dates)
def location(self, title):
translation.activate(title.language)
url = title.page.get_absolute_url(title.language)
translation.deactivate()
return url
|
Remove redirected pages from the sitemap
|
Remove redirected pages from the sitemap
|
Python
|
bsd-3-clause
|
ScholzVolkmer/django-cms,wyg3958/django-cms,donce/django-cms,robmagee/django-cms,DylannCordel/django-cms,frnhr/django-cms,jrief/django-cms,wuzhihui1123/django-cms,Livefyre/django-cms,dhorelik/django-cms,netzkolchose/django-cms,intip/django-cms,chkir/django-cms,jproffitt/django-cms,selecsosi/django-cms,czpython/django-cms,liuyisiyisi/django-cms,takeshineshiro/django-cms,saintbird/django-cms,czpython/django-cms,memnonila/django-cms,FinalAngel/django-cms,farhaadila/django-cms,webu/django-cms,divio/django-cms,SmithsonianEnterprises/django-cms,sephii/django-cms,jproffitt/django-cms,czpython/django-cms,astagi/django-cms,bittner/django-cms,AlexProfi/django-cms,dhorelik/django-cms,nostalgiaz/django-cms,netzkolchose/django-cms,iddqd1/django-cms,jeffreylu9/django-cms,astagi/django-cms,SachaMPS/django-cms,chmberl/django-cms,qnub/django-cms,chkir/django-cms,nostalgiaz/django-cms,SachaMPS/django-cms,rscnt/django-cms,benzkji/django-cms,360youlun/django-cms,cyberintruder/django-cms,jproffitt/django-cms,nostalgiaz/django-cms,Vegasvikk/django-cms,FinalAngel/django-cms,wuzhihui1123/django-cms,SachaMPS/django-cms,datakortet/django-cms,farhaadila/django-cms,nimbis/django-cms,owers19856/django-cms,nimbis/django-cms,cyberintruder/django-cms,frnhr/django-cms,intip/django-cms,rsalmaso/django-cms,qnub/django-cms,liuyisiyisi/django-cms,SofiaReis/django-cms,jsma/django-cms,Jaccorot/django-cms,sznekol/django-cms,frnhr/django-cms,nimbis/django-cms,jrief/django-cms,FinalAngel/django-cms,leture/django-cms,philippze/django-cms,nimbis/django-cms,FinalAngel/django-cms,jproffitt/django-cms,frnhr/django-cms,netzkolchose/django-cms,360youlun/django-cms,stefanw/django-cms,owers19856/django-cms,intip/django-cms,takeshineshiro/django-cms,robmagee/django-cms,MagicSolutions/django-cms,benzkji/django-cms,bittner/django-cms,nostalgiaz/django-cms,bittner/django-cms,isotoma/django-cms,vxsx/django-cms,SofiaReis/django-cms,Vegasvikk/django-cms,vxsx/django-cms,philippze/django-cms,vxsx/django-cms,saintbird/django-cms,SmithsonianEnterprises/django-cms,chkir/django-cms,Vegasvikk/django-cms,astagi/django-cms,Jaccorot/django-cms,stefanfoulis/django-cms,divio/django-cms,irudayarajisawa/django-cms,petecummings/django-cms,petecummings/django-cms,vad/django-cms,vstoykov/django-cms,mkoistinen/django-cms,jrclaramunt/django-cms,webu/django-cms,vad/django-cms,sznekol/django-cms,evildmp/django-cms,AlexProfi/django-cms,rsalmaso/django-cms,rryan/django-cms,josjevv/django-cms,yakky/django-cms,rsalmaso/django-cms,takeshineshiro/django-cms,DylannCordel/django-cms,jrief/django-cms,jeffreylu9/django-cms,stefanw/django-cms,intip/django-cms,stefanw/django-cms,mkoistinen/django-cms,chmberl/django-cms,isotoma/django-cms,donce/django-cms,chmberl/django-cms,saintbird/django-cms,leture/django-cms,datakortet/django-cms,dhorelik/django-cms,Livefyre/django-cms,keimlink/django-cms,divio/django-cms,jsma/django-cms,keimlink/django-cms,ScholzVolkmer/django-cms,MagicSolutions/django-cms,selecsosi/django-cms,qnub/django-cms,jsma/django-cms,kk9599/django-cms,andyzsf/django-cms,selecsosi/django-cms,vstoykov/django-cms,wyg3958/django-cms,jeffreylu9/django-cms,bittner/django-cms,jrclaramunt/django-cms,mkoistinen/django-cms,rsalmaso/django-cms,timgraham/django-cms,yakky/django-cms,rscnt/django-cms,vad/django-cms,kk9599/django-cms,benzkji/django-cms,stefanfoulis/django-cms,memnonila/django-cms,donce/django-cms,petecummings/django-cms,isotoma/django-cms,datakortet/django-cms,Livefyre/django-cms,josjevv/django-cms,wuzhihui1123/django-cms,evildmp/django-cms,josjevv/django-cms,stefanw/django-cms,jeffreylu9/django-cms,ScholzVolkmer/django-cms,robmagee/django-cms,MagicSolutions/django-cms,yakky/django-cms,irudayarajisawa/django-cms,czpython/django-cms,leture/django-cms,timgraham/django-cms,evildmp/django-cms,youprofit/django-cms,mkoistinen/django-cms,webu/django-cms,datakortet/django-cms,sephii/django-cms,vad/django-cms,jsma/django-cms,keimlink/django-cms,evildmp/django-cms,vxsx/django-cms,kk9599/django-cms,iddqd1/django-cms,Livefyre/django-cms,vstoykov/django-cms,liuyisiyisi/django-cms,AlexProfi/django-cms,wyg3958/django-cms,farhaadila/django-cms,netzkolchose/django-cms,360youlun/django-cms,Jaccorot/django-cms,iddqd1/django-cms,andyzsf/django-cms,sephii/django-cms,yakky/django-cms,sznekol/django-cms,stefanfoulis/django-cms,andyzsf/django-cms,andyzsf/django-cms,SmithsonianEnterprises/django-cms,benzkji/django-cms,irudayarajisawa/django-cms,youprofit/django-cms,selecsosi/django-cms,philippze/django-cms,timgraham/django-cms,cyberintruder/django-cms,stefanfoulis/django-cms,jrclaramunt/django-cms,memnonila/django-cms,SofiaReis/django-cms,rscnt/django-cms,rryan/django-cms,rryan/django-cms,youprofit/django-cms,divio/django-cms,wuzhihui1123/django-cms,isotoma/django-cms,jrief/django-cms,sephii/django-cms,rryan/django-cms,DylannCordel/django-cms,owers19856/django-cms
|
f48eb543c3ae2222a71080592ae8932c227dc605
|
roche/scripts/xml-load.py
|
roche/scripts/xml-load.py
|
# coding=utf-8
import sys
sys.path.append('../../')
import roche.settings
from eulexistdb.db import ExistDB
from roche.settings import EXISTDB_SERVER_URL
#
# Timeout higher?
#
xmldb = ExistDB(timeout=30)
xmldb.createCollection('docker', True)
xmldb.createCollection(u'docker/浙江大學圖書館', True)
with open('../../../dublin-store/db/test_001.xml') as f:
xmldb.load(f, '/docker/001.xml', True)
|
# coding=utf-8
#
# Must be called in roche root dir
#
import sys
sys.path.append('.')
import roche.settings
from eulexistdb.db import ExistDB
from roche.settings import EXISTDB_SERVER_URL
#
# Timeout higher?
#
xmldb = ExistDB(timeout=30)
xmldb.createCollection('docker', True)
xmldb.createCollection(u'docker/浙江大學圖書館', True)
with open('../dublin-store/db/test_001.xml') as f:
xmldb.load(f, '/docker/001.xml', True)
|
Fix relative path in relation to app root dir
|
Fix relative path in relation to app root dir
|
Python
|
mit
|
beijingren/roche-website,beijingren/roche-website,beijingren/roche-website,beijingren/roche-website
|
eccedb9f938bd74574e4dcdd9ea63f71ac269f20
|
nydus/db/routers/__init__.py
|
nydus/db/routers/__init__.py
|
"""
nydus.db.routers
~~~~~~~~~~~~~~~~
:copyright: (c) 2011 DISQUS.
:license: Apache License 2.0, see LICENSE for more details.
"""
from .base import BaseRouter, RoundRobinRouter
|
"""
nydus.db.routers
~~~~~~~~~~~~~~~~
:copyright: (c) 2011 DISQUS.
:license: Apache License 2.0, see LICENSE for more details.
"""
from .base import BaseRouter, RoundRobinRouter, PartitionRouter
|
Add partition router to base
|
Add partition router to base
|
Python
|
apache-2.0
|
disqus/nydus
|
d4dd408e671d14518b3fabb964027cd006366fca
|
testfixtures/compat.py
|
testfixtures/compat.py
|
# compatibility module for different python versions
import sys
if sys.version_info[:2] > (3, 0):
PY2 = False
PY3 = True
Bytes = bytes
Unicode = str
basestring = str
class_type_name = 'class'
ClassType = type
exception_module = 'builtins'
new_class = type
self_name = '__self__'
from io import StringIO
xrange = range
else:
PY2 = True
PY3 = False
Bytes = str
Unicode = unicode
basestring = basestring
class_type_name = 'type'
from types import ClassType
exception_module = 'exceptions'
from new import classobj as new_class
self_name = 'im_self'
from cStringIO import StringIO
xrange = xrange
try:
from mock import call as mock_call
except ImportError: # pragma: no cover
class MockCall: pass
mock_call = MockCall()
try:
from unittest.mock import call as unittest_mock_call
except ImportError:
class UnittestMockCall: pass
unittest_mock_call = UnittestMockCall()
|
# compatibility module for different python versions
import sys
if sys.version_info[:2] > (3, 0):
PY2 = False
PY3 = True
Bytes = bytes
Unicode = str
basestring = str
BytesLiteral = lambda x: x.encode('latin1')
UnicodeLiteral = lambda x: x
class_type_name = 'class'
ClassType = type
exception_module = 'builtins'
new_class = type
self_name = '__self__'
from io import StringIO
xrange = range
else:
PY2 = True
PY3 = False
Bytes = str
Unicode = unicode
basestring = basestring
BytesLiteral = lambda x: x
UnicodeLiteral = lambda x: x.decode('latin1')
class_type_name = 'type'
from types import ClassType
exception_module = 'exceptions'
from new import classobj as new_class
self_name = 'im_self'
from cStringIO import StringIO
xrange = xrange
try:
from mock import call as mock_call
except ImportError: # pragma: no cover
class MockCall: pass
mock_call = MockCall()
try:
from unittest.mock import call as unittest_mock_call
except ImportError:
class UnittestMockCall: pass
unittest_mock_call = UnittestMockCall()
|
Add Python version agnostic helpers for creating byte and unicode literals.
|
Add Python version agnostic helpers for creating byte and unicode literals.
|
Python
|
mit
|
Simplistix/testfixtures,nebulans/testfixtures
|
bb3d9ec2d9932da2abb50f5cb6bceffae5112abb
|
mrbelvedereci/trigger/admin.py
|
mrbelvedereci/trigger/admin.py
|
from django.contrib import admin
from mrbelvedereci.trigger.models import Trigger
class TriggerAdmin(admin.ModelAdmin):
list_display = ('repo', 'type', 'flows', 'org', 'regex', 'active', 'public')
list_filter = ('active', 'public', 'repo', 'org', 'type')
admin.site.register(Trigger, TriggerAdmin)
|
from django.contrib import admin
from mrbelvedereci.trigger.models import Trigger
class TriggerAdmin(admin.ModelAdmin):
list_display = ('name', 'repo', 'type', 'flows', 'org', 'regex', 'active', 'public')
list_filter = ('active', 'public', 'type', 'org', 'repo')
admin.site.register(Trigger, TriggerAdmin)
|
Add name to trigger list view
|
Add name to trigger list view
|
Python
|
bsd-3-clause
|
SalesforceFoundation/mrbelvedereci,SalesforceFoundation/mrbelvedereci,SalesforceFoundation/mrbelvedereci,SalesforceFoundation/mrbelvedereci
|
437eb8432fe91865d3cb24109e1b99818de8ce4e
|
pysc2/bin/battle_net_maps.py
|
pysc2/bin/battle_net_maps.py
|
#!/usr/bin/python
# Copyright 2019 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Print the list of available maps according to the game."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import app
from pysc2 import run_configs
def main(unused_argv):
with run_configs.get().start(want_rgb=False) as controller:
available_maps = controller.available_maps()
print("\n")
print("Local map paths:")
for m in available_maps.local_map_paths:
print(m)
print()
print("Battle.net maps:")
for m in available_maps.battlenet_map_names:
print(m)
if __name__ == "__main__":
app.run(main)
|
#!/usr/bin/python
# Copyright 2019 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Print the list of available maps according to the game."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import app
from pysc2 import run_configs
def main(unused_argv):
with run_configs.get().start(want_rgb=False) as controller:
available_maps = controller.available_maps()
print("\n")
print("Local map paths:")
for m in sorted(available_maps.local_map_paths):
print(" ", m)
print()
print("Battle.net maps:")
for m in sorted(available_maps.battlenet_map_names):
print(" ", m)
if __name__ == "__main__":
app.run(main)
|
Sort and indent the map lists.
|
Sort and indent the map lists.
PiperOrigin-RevId: 249276696
|
Python
|
apache-2.0
|
deepmind/pysc2
|
73eb3c7c52c2a5c58cad0e1d4dbe09c1e713beeb
|
conductor/vendor/_stripe.py
|
conductor/vendor/_stripe.py
|
from django.conf import settings
import stripe
stripe.api_key = settings.STRIPE_API_KEY
stripe.api_version = "2018-10-31"
class StripeGateway:
"""A gateway to Stripe
This insulates the rest of the system from Stripe errors
and configures the Stripe module with the API key.
"""
def create_customer(self, user: settings.AUTH_USER_MODEL, stripe_token: str) -> str:
"""Add a user to Stripe and join them to the plan."""
# Let this fail on purpose. If it fails, the error monitoring system
# will log it and I'll learn how to harden it for the conductor env.
customer = stripe.Customer.create(email=user.email, source=stripe_token)
stripe.Subscription.create(
customer=customer.id,
items=[{"plan": settings.STRIPE_PLAN}],
trial_from_plan=True,
)
return customer.id
stripe_gateway = StripeGateway()
|
from django.conf import settings
import stripe
stripe.api_key = settings.STRIPE_API_KEY
class StripeGateway:
"""A gateway to Stripe
This insulates the rest of the system from Stripe errors
and configures the Stripe module with the API key.
"""
def create_customer(self, user: settings.AUTH_USER_MODEL, stripe_token: str) -> str:
"""Add a user to Stripe and join them to the plan."""
# Let this fail on purpose. If it fails, the error monitoring system
# will log it and I'll learn how to harden it for the conductor env.
customer = stripe.Customer.create(email=user.email, source=stripe_token)
stripe.Subscription.create(
customer=customer.id,
items=[{"plan": settings.STRIPE_PLAN}],
trial_from_plan=True,
)
return customer.id
stripe_gateway = StripeGateway()
|
Remove pinned Stripe API version.
|
Remove pinned Stripe API version.
|
Python
|
bsd-2-clause
|
mblayman/lcp,mblayman/lcp,mblayman/lcp
|
9d7f2626294fbf25934e7dda4892b7ac13bd5555
|
fireplace/cards/tgt/warlock.py
|
fireplace/cards/tgt/warlock.py
|
from ..utils import *
##
# Minions
# Dreadsteed
class AT_019:
deathrattle = Summon(CONTROLLER, "AT_019")
# Tiny Knight of Evil
class AT_021:
events = Discard(FRIENDLY).on(Buff(SELF, "AT_021e"))
# Wrathguard
class AT_026:
events = Damage(SELF).on(Hit(FRIENDLY_HERO, Damage.Args.AMOUNT))
# Wilfred Fizzlebang
class AT_027:
events = Draw(CONTROLLER).on(
lambda self, target, card, source: source is self.controller.hero.power and Buff(card, "AT_027e")
)
class AT_027e:
cost = lambda self, i: 0
##
# Spells
# Fist of Jaraxxus
class AT_022:
play = Hit(RANDOM_ENEMY_CHARACTER, 4)
in_hand = Discard(SELF).on(play)
# Demonfuse
class AT_024:
play = Buff(TARGET, "AT_024e"), GainMana(OPPONENT, 1)
# Dark Bargain
class AT_025:
play = Destroy(RANDOM(ENEMY_MINIONS) * 2), Discard(RANDOM(CONTROLLER_HAND) * 2)
|
from ..utils import *
##
# Minions
# Dreadsteed
class AT_019:
deathrattle = Summon(CONTROLLER, "AT_019")
# Tiny Knight of Evil
class AT_021:
events = Discard(FRIENDLY).on(Buff(SELF, "AT_021e"))
# Void Crusher
class AT_023:
inspire = Destroy(RANDOM_ENEMY_MINION | RANDOM_FRIENDLY_MINION)
# Wrathguard
class AT_026:
events = Damage(SELF).on(Hit(FRIENDLY_HERO, Damage.Args.AMOUNT))
# Wilfred Fizzlebang
class AT_027:
events = Draw(CONTROLLER).on(
lambda self, target, card, source: source is self.controller.hero.power and Buff(card, "AT_027e")
)
class AT_027e:
cost = lambda self, i: 0
##
# Spells
# Fist of Jaraxxus
class AT_022:
play = Hit(RANDOM_ENEMY_CHARACTER, 4)
in_hand = Discard(SELF).on(play)
# Demonfuse
class AT_024:
play = Buff(TARGET, "AT_024e"), GainMana(OPPONENT, 1)
# Dark Bargain
class AT_025:
play = Destroy(RANDOM(ENEMY_MINIONS) * 2), Discard(RANDOM(CONTROLLER_HAND) * 2)
|
Implement more TGT Warlock cards
|
Implement more TGT Warlock cards
|
Python
|
agpl-3.0
|
liujimj/fireplace,beheh/fireplace,Ragowit/fireplace,Ragowit/fireplace,amw2104/fireplace,amw2104/fireplace,smallnamespace/fireplace,smallnamespace/fireplace,oftc-ftw/fireplace,liujimj/fireplace,oftc-ftw/fireplace,Meerkov/fireplace,jleclanche/fireplace,Meerkov/fireplace,NightKev/fireplace
|
27c115ba875136ced13023999fe13fcf0d798f0e
|
lux_udp_bridge.py
|
lux_udp_bridge.py
|
#!/usr/bin/env python
import select
import serial
import socket
def run_lux_udp(host, port, dev):
with serial.Serial(dev, baudrate=3000000, xonxoff=False) as ser:
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.bind((host, port))
last_addr = None
serial_buffer = ""
while True:
inputs, outputs, errors = select.select([sock.fileno(), ser.fileno()], [], [])
if sock.fileno() in inputs:
packet, last_addr = sock.recvfrom(1100)
#print ">", repr(packet)
if len(packet) == 0: # Ping, respond back
sock.sendto("", 0, last_addr)
else:
ser.write(packet)
if ser.fileno() in inputs:
serial_buffer += ser.read()
while "\0" in serial_buffer:
packet, null, serial_buffer = serial_buffer.partition("\0")
sock.sendto(packet + null, 0, last_addr)
#print "<", repr(packet)
if __name__ == "__main__":
while True:
try:
run_lux_udp(host="0.0.0.0", port=1365, dev="/dev/ttyACM0")
except Exception as e:
print e
select.select([], [], [], 5)
|
#!/usr/bin/env python
import select
import serial
import socket
def run_lux_udp(host, port, dev):
with serial.Serial(dev, baudrate=3000000, xonxoff=False) as ser:
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.bind((host, port))
last_addr = None
serial_buffer = ""
while True:
inputs, outputs, errors = select.select([sock.fileno(), ser.fileno()], [], [])
while sock.fileno() in inputs:
try:
packet, last_addr = sock.recvfrom(1100, socket.MSG_DONTWAIT)
except socket.error:
break
#print ">", repr(packet)
if len(packet) == 0: # Ping, respond back
sock.sendto("", 0, last_addr)
else:
ser.write(packet)
if ser.fileno() in inputs:
serial_buffer += ser.read()
while "\0" in serial_buffer:
packet, null, serial_buffer = serial_buffer.partition("\0")
sock.sendto(packet + null, 0, last_addr)
#print "<", repr(packet)
if __name__ == "__main__":
while True:
try:
run_lux_udp(host="0.0.0.0", port=1365, dev="/dev/ttyACM0")
except Exception as e:
print e
select.select([], [], [], 5)
|
Fix buffering issue in udp bridge
|
Fix buffering issue in udp bridge
|
Python
|
mit
|
zbanks/radiance,zbanks/radiance,zbanks/radiance,zbanks/radiance
|
4c3e92c8847b35c4afa53a90dd823c89d1d534d1
|
mamba/__init__.py
|
mamba/__init__.py
|
__version__ = '0.8.6'
|
__version__ = '0.8.6'
def description(message):
pass
def _description(message):
pass
def it(message):
pass
def _it(message):
pass
def context(message):
pass
def _context(message):
pass
def before():
pass
def after():
pass
|
Make mamba more friendly to linters and IDE's
|
Make mamba more friendly to linters and IDE's
Some empty functions are added and we are able to optionally import them
|
Python
|
mit
|
nestorsalceda/mamba
|
21bf18a03c485304aa00dc2af86aa91930e4b1ac
|
tests/test_grammar.py
|
tests/test_grammar.py
|
import pytest
from parglare import Grammar
from parglare.exceptions import GrammarError
def test_terminal_nonterminal_conflict():
# Production A is a terminal ("a") and non-terminal at the same time.
g = """
A = "a" | B;
B = "b";
"""
try:
Grammar.from_string(g)
assert False
except GrammarError as e:
assert 'Multiple definition' in str(e)
def test_multiple_terminal_definition():
g = """
S = A A;
A = "a";
A = "b";
"""
try:
Grammar.from_string(g)
assert False
except GrammarError as e:
assert 'Multiple definition' in str(e)
|
import pytest
from parglare import Grammar
def test_terminal_nonterminal():
# Production A is a terminal ("a") and non-terminal at the same time.
# Thus, it must be recognized as non-terminal.
g = """
S = A B;
A = "a" | B;
B = "b";
"""
Grammar.from_string(g)
# Here A shoud be non-terminal while B will be terminal.
g = """
S = A B;
A = B;
B = "b";
"""
Grammar.from_string(g)
def test_multiple_terminal_definition():
# A is defined multiple times as terminal thus it must be recognized
# as non-terminal with alternative expansions.
g = """
S = A A;
A = "a";
A = "b";
"""
Grammar.from_string(g)
|
Fix in tests for terminal definitions.
|
Fix in tests for terminal definitions.
|
Python
|
mit
|
igordejanovic/parglare,igordejanovic/parglare
|
eacbc67cdaa7016d1098e9f63a50ae7ca6b4924a
|
app/auth/views.py
|
app/auth/views.py
|
# Copyright (C) 2016 University of Zurich. All rights reserved.
#
# This file is part of MSRegistry Backend.
#
# MSRegistry Backend is free software: you can redistribute it and/or
# modify it under the terms of the version 3 of the GNU Affero General
# Public License as published by the Free Software Foundation, or any
# other later version.
#
# MSRegistry Backend is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the version
# 3 of the GNU Affero General Public License for more details.
#
# You should have received a copy of the version 3 of the GNU Affero
# General Public License along with MSRegistry Backend. If not, see
# <http://www.gnu.org/licenses/>.
__author__ = "Filippo Panessa <filippo.panessa@uzh.ch>"
__copyright__ = ("Copyright (c) 2016 S3IT, Zentrale Informatik,"
" University of Zurich")
from . import auth
from ..decorators import requires_auth
@auth.route('/test')
@requires_auth
def authTest():
return "All good. You only get this message if you're authenticated."
|
# Copyright (C) 2016 University of Zurich. All rights reserved.
#
# This file is part of MSRegistry Backend.
#
# MSRegistry Backend is free software: you can redistribute it and/or
# modify it under the terms of the version 3 of the GNU Affero General
# Public License as published by the Free Software Foundation, or any
# other later version.
#
# MSRegistry Backend is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the version
# 3 of the GNU Affero General Public License for more details.
#
# You should have received a copy of the version 3 of the GNU Affero
# General Public License along with MSRegistry Backend. If not, see
# <http://www.gnu.org/licenses/>.
__author__ = "Filippo Panessa <filippo.panessa@uzh.ch>"
__copyright__ = ("Copyright (c) 2016 S3IT, Zentrale Informatik,"
" University of Zurich")
from flask import jsonify
from . import auth
from ..decorators import requires_auth
@auth.route('/test')
@requires_auth
def authTest():
return jsonify({'code': 'authorization_success',
'description': "All good. You only get this message if you're authenticated."})
|
Use JSON for API GET /auth/test response
|
Use JSON for API GET /auth/test response
|
Python
|
agpl-3.0
|
uzh/msregistry
|
e8b1d0aff6333d6f8cfb4c81262550c670ea7e86
|
factory/tools/cat_StartdLog.py
|
factory/tools/cat_StartdLog.py
|
#!/bin/env python
#
# cat_StartdLog.py
#
# Print out the StartdLog for a glidein output file
#
# Usage: cat_StartdLog.py logname
#
import sys
STARTUP_DIR=sys.path[0]
sys.path.append(os.path.join(STARTUP_DIR,"lib"))
import gWftLogParser
USAGE="Usage: cat_StartdLog.py <logname>"
def main():
try:
print gWftLogParser.get_CondorLog(sys.argv[1],"StartdLog")
except:
sys.stderr.write("%s\n"%USAGE)
sys.exit(1)
if __name__ == '__main__':
main()
|
#!/bin/env python
#
# cat_StartdLog.py
#
# Print out the StartdLog for a glidein output file
#
# Usage: cat_StartdLog.py logname
#
import os.path
import sys
STARTUP_DIR=sys.path[0]
sys.path.append(os.path.join(STARTUP_DIR,"lib"))
import gWftLogParser
USAGE="Usage: cat_StartdLog.py <logname>"
def main():
try:
print gWftLogParser.get_CondorLog(sys.argv[1],"StartdLog")
except:
sys.stderr.write("%s\n"%USAGE)
sys.exit(1)
if __name__ == '__main__':
main()
|
Allow for startup in a different dir
|
Allow for startup in a different dir
|
Python
|
bsd-3-clause
|
bbockelm/glideinWMS,bbockelm/glideinWMS,holzman/glideinwms-old,holzman/glideinwms-old,bbockelm/glideinWMS,holzman/glideinwms-old,bbockelm/glideinWMS
|
7bf4083ef44585116f0eff86753080612a26b374
|
src/__init__.py
|
src/__init__.py
|
from bayeslite.api import barplot
from bayeslite.api import cardinality
from bayeslite.api import draw_crosscat
from bayeslite.api import estimate_log_likelihood
from bayeslite.api import heatmap
from bayeslite.api import histogram
from bayeslite.api import mi_hist
from bayeslite.api import nullify
from bayeslite.api import pairplot
from bayeslite.api import plot_crosscat_chain_diagnostics
"""Main bdbcontrib API.
The bdbcontrib module servers a sandbox for experimental and semi-stable
features that are not yet ready for integreation to the bayeslite repository.
"""
__all__ = [
'barplot',
'cardinality',
'draw_crosscat',
'estimate_log_likelihood',
'heatmap',
'histogram',
'mi_hist',
'nullify',
'pairplot',
'plot_crosscat_chain_diagnostics'
]
|
from bdbcontrib.api import barplot
from bdbcontrib.api import cardinality
from bdbcontrib.api import draw_crosscat
from bdbcontrib.api import estimate_log_likelihood
from bdbcontrib.api import heatmap
from bdbcontrib.api import histogram
from bdbcontrib.api import mi_hist
from bdbcontrib.api import nullify
from bdbcontrib.api import pairplot
from bdbcontrib.api import plot_crosscat_chain_diagnostics
"""Main bdbcontrib API.
The bdbcontrib module servers a sandbox for experimental and semi-stable
features that are not yet ready for integreation to the bayeslite repository.
"""
__all__ = [
'barplot',
'cardinality',
'draw_crosscat',
'estimate_log_likelihood',
'heatmap',
'histogram',
'mi_hist',
'nullify',
'pairplot',
'plot_crosscat_chain_diagnostics'
]
|
Fix big from bayeslite to bdbcontrib.
|
Fix big from bayeslite to bdbcontrib.
|
Python
|
apache-2.0
|
probcomp/bdbcontrib,probcomp/bdbcontrib
|
27f503ef57a1582f7cc792d61f537bec71a4b02c
|
dhcp2nest/util.py
|
dhcp2nest/util.py
|
"""
Utility functions for dhcp2nest
"""
from queue import Queue
from subprocess import Popen, PIPE
from threading import Thread
def follow_file(fn, max_lines=100):
"""
Return a Queue that is fed lines (up to max_lines) from the given file (fn)
continuously
The implementation given here was inspired by
http://stackoverflow.com/questions/12523044/how-can-i-tail-a-log-file-in-python
"""
fq = Queue(maxsize=max_lines)
def _follow_file_thread(fn, fq):
"""
Queue lines from the given file (fn) continuously, even as the file
grows or is replaced
WARNING: This generator will block forever on the tail subprocess--no
timeouts are enforced.
"""
# Use system tail with name-based following and retry
p = Popen(["tail", "-n0", "-F", fn], stdout=PIPE)
# Loop forever on pulling data from tail
line = True
while line:
line = p.stdout.readline().decode('utf-8')
fq.put(line)
# Spawn a thread to read data from tail
Thread(target=_follow_file_thread, args=(fn, fq)).start()
# Return the queue
return fq
|
"""
Utility functions for dhcp2nest
"""
from queue import Queue
from subprocess import Popen, PIPE
from threading import Thread
def follow_file(fn, max_lines=100):
"""
Return a Queue that is fed lines (up to max_lines) from the given file (fn)
continuously
The implementation given here was inspired by
http://stackoverflow.com/questions/12523044/how-can-i-tail-a-log-file-in-python
"""
fq = Queue(maxsize=max_lines)
def _follow_file_thread(fn, fq):
"""
Queue lines from the given file (fn) continuously, even as the file
grows or is replaced
WARNING: This generator will block forever on the tail subprocess--no
timeouts are enforced.
"""
# Use system tail with name-based following and retry
p = Popen(["tail", "-n0", "-F", fn], stdout=PIPE)
# Loop forever on pulling data from tail
line = True
while line:
line = p.stdout.readline().decode('utf-8')
fq.put(line)
# Spawn a thread to read data from tail
Thread(target=_follow_file_thread, args=(fn, fq), daemon=True).start()
# Return the queue
return fq
|
Use daemon threads for follow_file()
|
Use daemon threads for follow_file()
Signed-off-by: Jason Bernardino Alonso <f71c42a1353bbcdbe07e24c2a1c893f8ea1d05ee@hackorp.com>
|
Python
|
mit
|
jbalonso/dhcp2nest
|
af0ec29ce0c830f096ab809fd2d69affd887ce16
|
feincms/module/page/admin.py
|
feincms/module/page/admin.py
|
# ------------------------------------------------------------------------
# coding=utf-8
# ------------------------------------------------------------------------
from __future__ import absolute_import
from django.contrib import admin
from .models import Page
from .modeladmins import PageAdmin
# ------------------------------------------------------------------------
admin.site.register(Page, PageAdmin)
# ------------------------------------------------------------------------
# ------------------------------------------------------------------------
|
# ------------------------------------------------------------------------
# coding=utf-8
# ------------------------------------------------------------------------
from __future__ import absolute_import
from django.contrib import admin
from django.core.exceptions import ImproperlyConfigured
from django.db.models import FieldDoesNotExist
from .models import Page
from .modeladmins import PageAdmin
# ------------------------------------------------------------------------
try:
Page._meta.get_field('template_key')
except FieldDoesNotExist:
raise ImproperlyConfigured(
'The page module requires a \'Page.register_templates()\' call somewhere'
' (\'Page.register_regions()\' is not sufficient).')
admin.site.register(Page, PageAdmin)
# ------------------------------------------------------------------------
# ------------------------------------------------------------------------
|
Abort when the page model does not have a template_key field
|
Abort when the page model does not have a template_key field
|
Python
|
bsd-3-clause
|
joshuajonah/feincms,mjl/feincms,matthiask/django-content-editor,matthiask/feincms2-content,feincms/feincms,feincms/feincms,pjdelport/feincms,pjdelport/feincms,mjl/feincms,michaelkuty/feincms,matthiask/django-content-editor,joshuajonah/feincms,nickburlett/feincms,matthiask/feincms2-content,matthiask/django-content-editor,michaelkuty/feincms,michaelkuty/feincms,joshuajonah/feincms,nickburlett/feincms,matthiask/feincms2-content,nickburlett/feincms,michaelkuty/feincms,pjdelport/feincms,matthiask/django-content-editor,joshuajonah/feincms,mjl/feincms,feincms/feincms,nickburlett/feincms
|
2c5650ef41aaf8c116f3922be02e7c5e7a79524b
|
pychecker/pychecker2/File.py
|
pychecker/pychecker2/File.py
|
from pychecker2.util import type_filter
from compiler import ast
class File:
def __init__(self, name):
self.name = name
self.parseTree = None
self.scopes = {}
self.root_scope = None
self.warnings = []
def __cmp__(self, other):
return cmp(self.name, other.name)
def warning(self, line, warn, *args):
try:
line = line.lineno
except AttributeError:
pass
self.warnings.append( (line, warn, args) )
def scope_filter(self, type):
return [(n, s)
for n, s in self.scopes.iteritems() if isinstance(n, type)
]
def function_scopes(self):
return self.scope_filter(ast.Function)
def class_scopes(self):
return self.scope_filter(ast.Class)
|
from pychecker2.util import parents
from compiler import ast
class File:
def __init__(self, name):
self.name = name
self.parseTree = None
self.scopes = {}
self.root_scope = None
self.warnings = []
def __cmp__(self, other):
return cmp(self.name, other.name)
def warning(self, line, warn, *args):
lineno = line
try:
lineno = line.lineno
except AttributeError:
pass
if not lineno:
try:
for p in parents(line):
if p.lineno:
lineno = p.lineno
break
except AttributeError:
pass
self.warnings.append( (lineno, warn, args) )
def scope_filter(self, type):
return [(n, s)
for n, s in self.scopes.iteritems() if isinstance(n, type)
]
def function_scopes(self):
return self.scope_filter(ast.Function)
def class_scopes(self):
return self.scope_filter(ast.Class)
|
Add more ways to suck line numbers from nodes
|
Add more ways to suck line numbers from nodes
|
Python
|
bsd-3-clause
|
smspillaz/pychecker,smspillaz/pychecker,smspillaz/pychecker
|
2fb27cf8f4399ec6aba36b86d2993e6c3b81d0ee
|
coalib/bearlib/languages/__init__.py
|
coalib/bearlib/languages/__init__.py
|
"""
This directory holds means to get generic information for specific languages.
"""
# Start ignoring PyUnusedCodeBear
from .Language import Language
from .Language import Languages
from .definitions.Unknown import Unknown
from .definitions.C import C
from .definitions.CPP import CPP
from .definitions.CSharp import CSharp
from .definitions.CSS import CSS
from .definitions.Java import Java
from .definitions.JavaScript import JavaScript
from .definitions.Python import Python
from .definitions.Vala import Vala
from .definitions.html import HTML
# Stop ignoring PyUnusedCodeBear
|
"""
This directory holds means to get generic information for specific languages.
"""
# Start ignoring PyUnusedCodeBear
from .Language import Language
from .Language import Languages
from .definitions.Unknown import Unknown
from .definitions.C import C
from .definitions.CPP import CPP
from .definitions.CSharp import CSharp
from .definitions.CSS import CSS
from .definitions.Fortran import Fortran
from .definitions.Golang import Golang
from .definitions.html import HTML
from .definitions.Java import Java
from .definitions.JavaScript import JavaScript
from .definitions.JSP import JSP
from .definitions.Matlab import Matlab
from .definitions.ObjectiveC import ObjectiveC
from .definitions.PHP import PHP
from .definitions.PLSQL import PLSQL
from .definitions.Python import Python
from .definitions.Ruby import Ruby
from .definitions.Scala import Scala
from .definitions.Swift import Swift
from .definitions.Vala import Vala
# Stop ignoring PyUnusedCodeBear
|
Add definition into default import
|
Language: Add definition into default import
Fixes https://github.com/coala/coala/issues/4688
|
Python
|
agpl-3.0
|
coala/coala,SanketDG/coala,shreyans800755/coala,karansingh1559/coala,kartikeys98/coala,kartikeys98/coala,jayvdb/coala,CruiseDevice/coala,Nosferatul/coala,shreyans800755/coala,aptrishu/coala,nemaniarjun/coala,aptrishu/coala,karansingh1559/coala,jayvdb/coala,rimacone/testing2,Asalle/coala,CruiseDevice/coala,shreyans800755/coala,coala-analyzer/coala,coala-analyzer/coala,nemaniarjun/coala,karansingh1559/coala,Asalle/coala,coala/coala,SanketDG/coala,coala-analyzer/coala,SanketDG/coala,rimacone/testing2,CruiseDevice/coala,coala/coala,aptrishu/coala,Nosferatul/coala,kartikeys98/coala,jayvdb/coala,Nosferatul/coala,rimacone/testing2,Asalle/coala,nemaniarjun/coala
|
ac25dd0b2bf3188e1f4325ccdab78e79e7f0a937
|
spiceminer/kernel/__init__.py
|
spiceminer/kernel/__init__.py
|
#!/usr/bin/env python
#-*- coding:utf-8 -*-
from .highlevel import Kernel
# Legacy support (DEPRECATED)
from .legacy_support import *
from ..bodies import get
def load(path='.', recursive=True, followlinks=False):
return Kernel.load(**locals())
def unload(path='.', recursive=True, followlinks=False):
return Kernel.unload(**locals())
|
#!/usr/bin/env python
#-*- coding:utf-8 -*-
from .highlevel import Kernel
def load(path='.', recursive=True, followlinks=False, force_reload=False):
return Kernel.load(**locals())
def load_single(cls, path, extension=None, force_reload=False):
return Kernel.load_single(**locals())
def unload(path='.', recursive=True, followlinks=False):
return Kernel.unload(**locals())
|
Change the interface of the kernel submodule to prepare for the API change. * Remove legacy support * Add load_single() * Fix missing keyword argument in load()
|
Change the interface of the kernel submodule to prepare for the API change.
* Remove legacy support
* Add load_single()
* Fix missing keyword argument in load()
|
Python
|
mit
|
DaRasch/spiceminer,DaRasch/spiceminer
|
7ea0e2d8387b622f671638613a476dcbff6438e1
|
rest_framework_swagger/urls.py
|
rest_framework_swagger/urls.py
|
from django.conf.urls import patterns
from django.conf.urls import url
from rest_framework_swagger.views import SwaggerResourcesView, SwaggerApiView, SwaggerUIView
urlpatterns = patterns(
'',
url(r'^$', SwaggerUIView.as_view(), name="django.swagger.base.view"),
url(r'^api-docs/$', SwaggerResourcesView.as_view(), name="django.swagger.resources.view"),
url(r'^api-docs/(?P<path>.*)/?$', SwaggerApiView.as_view(), name='django.swagger.api.view'),
)
|
from django.conf.urls import url
from rest_framework_swagger.views import SwaggerResourcesView, SwaggerApiView, SwaggerUIView
urlpatterns = [
url(r'^$', SwaggerUIView.as_view(), name="django.swagger.base.view"),
url(r'^api-docs/$', SwaggerResourcesView.as_view(), name="django.swagger.resources.view"),
url(r'^api-docs/(?P<path>.*)/?$', SwaggerApiView.as_view(), name='django.swagger.api.view'),
]
|
Use the new style urlpatterns syntax to fix Django deprecation warnings
|
Use the new style urlpatterns syntax to fix Django deprecation warnings
The `patterns()` syntax is now deprecated:
https://docs.djangoproject.com/en/1.8/releases/1.8/#django-conf-urls-patterns
And so under Django 1.8 results in warnings:
rest_framework_swagger/urls.py:10: RemovedInDjango110Warning:
django.conf.urls.patterns() is deprecated and will be removed in
Django 1.10. Update your urlpatterns to be a list of
django.conf.urls.url() instances instead.
Fixes #380.
|
Python
|
bsd-2-clause
|
pombredanne/django-rest-swagger,aioTV/django-rest-swagger,cancan101/django-rest-swagger,visasq/django-rest-swagger,aioTV/django-rest-swagger,marcgibbons/django-rest-swagger,marcgibbons/django-rest-swagger,aioTV/django-rest-swagger,cancan101/django-rest-swagger,pombredanne/django-rest-swagger,arc6373/django-rest-swagger,cancan101/django-rest-swagger,visasq/django-rest-swagger,arc6373/django-rest-swagger,marcgibbons/django-rest-swagger,pombredanne/django-rest-swagger,marcgibbons/django-rest-swagger,visasq/django-rest-swagger,arc6373/django-rest-swagger,pombredanne/django-rest-swagger
|
3724f2895c704df595b083ecc56c56c351b6e32f
|
runbot_pylint/__openerp__.py
|
runbot_pylint/__openerp__.py
|
{
'name': 'Runbot Pylint',
'category': 'Website',
'summary': 'Runbot',
'version': '1.0',
'description': "Runbot",
'author': 'OpenERP SA',
'depends': ['runbot'],
'external_dependencies': {
},
'data': [
"view/runbot_pylint_view.xml"
],
'installable': True,
}
|
{
'name': 'Runbot Pylint',
'category': 'Website',
'summary': 'Runbot',
'version': '1.0',
'description': "Runbot",
'author': 'OpenERP SA',
'depends': ['runbot'],
'external_dependencies': {
'bin': ['pylint'],
},
'data': [
"view/runbot_pylint_view.xml"
],
'installable': True,
}
|
Add external depedencies to pylint bin
|
Add external depedencies to pylint bin
|
Python
|
agpl-3.0
|
amoya-dx/runbot-addons
|
d4722298a0fc03fca6ef17e246e3ffd74efc4d60
|
src/isomorphic_strings.py
|
src/isomorphic_strings.py
|
class Solution:
# @param {string} s
# @param {string} t
# @return {boolean}
def isIsomorphic(self, s, t):
if len(s) != len(t):
return False
charDict = {}
for i, c in enumerate(s):
if c not in charDict.keys() and t[i] not in charDict.values():
charDict[c] = t[i]
elif t[i] in charDict.values() or charDict[c] != t[i]:
return False
return True
if __name__ == '__main__':
test_list = [["ab","aa"],["aa", "bb"]
result_list = [False, True]
success = True
solution = Solution()
for i, s in enumerate(test_list):
result = solution.isIsomorphic(s[0], s[1])
if result != result_list[i]:
success = False
print s
print 'Expected value', result_list[i]
print 'Actual value', result
if success:
print 'All the tests passed.'
else:
print 'Please fix the failed test'
|
class Solution:
# @param {string} s
# @param {string} t
# @return {boolean}
def isIsomorphic(self, s, t):
if len(s) != len(t):
return False
charDict = {}
for i, c in enumerate(s):
if c not in charDict.keys() and t[i] not in charDict.values():
charDict[c] = t[i]
elif c in charDict.keys() and charDict[c] != t[i]:
return False
elif t[i] in charDict.values():
if c not in charDict.keys():
return False
elif charDict[c] != t[i]:
return False
return True
if __name__ == '__main__':
test_list = [["ab","aa"],["aa", "bb"], ["egg", "add"],["foo","bar"],["paper","title"]]
result_list = [False, True, True, False, True]
success = True
solution = Solution()
for i, s in enumerate(test_list):
result = solution.isIsomorphic(s[0], s[1])
if result != result_list[i]:
success = False
print s
print 'Expected value', result_list[i]
print 'Actual value', result
if success:
print 'All the tests passed.'
else:
print 'Please fix the failed test'
|
Add solution for the isomorphic strings
|
Add solution for the isomorphic strings
|
Python
|
mit
|
chancyWu/leetcode
|
b19746badd83190b4e908144d6bc830178445dc2
|
cc/license/tests/test_cc_license.py
|
cc/license/tests/test_cc_license.py
|
"""Tests for functionality within the cc.license module.
This file is a catch-all for tests with no place else to go."""
import cc.license
def test_locales():
locales = cc.license.locales()
for l in locales:
assert type(l) == unicode
for c in ('en', 'de', 'he', 'ja', 'fr'):
assert c in locales
|
"""Tests for functionality within the cc.license module.
This file is a catch-all for tests with no place else to go."""
import cc.license
def test_locales():
locales = cc.license.locales()
for l in locales:
assert type(l) == unicode
for c in ('en', 'de', 'he', 'ja', 'fr'):
assert c in locales
def test_cc_license_classes():
cc_dir = dir(cc.license)
assert 'Jurisdiction' in cc_dir
assert 'License' in cc_dir
assert 'Question' in cc_dir
assert 'LicenseSelector' in cc_dir
|
Add test to make sure certain classes are always found in cc.license, no matter where they are internally.
|
Add test to make sure certain classes are always found in cc.license,
no matter where they are internally.
|
Python
|
mit
|
creativecommons/cc.license,creativecommons/cc.license
|
0ed7e87a6eeaab56d5c59a7e6874b5a5b0bab314
|
tests/test_pointcloud.py
|
tests/test_pointcloud.py
|
from simulocloud import PointCloud
import json
import numpy as np
_TEST_XYZ = """[[10.0, 12.2, 14.4, 16.6, 18.8],
[11.1, 13.3, 15.5, 17.7, 19.9],
[0.1, 2.1, 4.5, 6.7, 8.9]]"""
_EXPECTED_POINTS = np.array([( 10. , 11.1, 0.1),
( 12.2, 13.3, 2.1),
( 14.4, 15.5, 4.5),
( 16.6, 17.7, 6.7),
( 18.8, 19.9, 8.9)],
dtype=[('x', '<f8'), ('y', '<f8'), ('z', '<f8')])
def test_PointCloud_from_lists():
""" Can PointCloud initialisable directly from `[[xs], [ys], [zs]]` ?"""
assert np.all(PointCloud(json.loads(_TEST_XYZ)).points == _EXPECTED_POINTS)
|
from simulocloud import PointCloud
import json
import numpy as np
_TEST_XYZ = [[10.0, 12.2, 14.4, 16.6, 18.8],
[11.1, 13.3, 15.5, 17.7, 19.9],
[0.1, 2.1, 4.5, 6.7, 8.9]]
_EXPECTED_POINTS = np.array([( 10. , 11.1, 0.1),
( 12.2, 13.3, 2.1),
( 14.4, 15.5, 4.5),
( 16.6, 17.7, 6.7),
( 18.8, 19.9, 8.9)],
dtype=[('x', '<f8'), ('y', '<f8'), ('z', '<f8')])
def test_PointCloud_from_lists():
""" Can PointCloud initialisable directly from `[[xs], [ys], [zs]]` ?"""
assert np.all(PointCloud(_TEST_XYZ).points == _EXPECTED_POINTS)
|
Write test data as list unless otherwise needed
|
Write test data as list unless otherwise needed
|
Python
|
mit
|
stainbank/simulocloud
|
8ce14cfb0044d90f2503a7bd940a7f6401c15db2
|
wagtail/admin/rich_text/editors/draftail.py
|
wagtail/admin/rich_text/editors/draftail.py
|
from django.forms import widgets
from wagtail.admin.edit_handlers import RichTextFieldPanel
from wagtail.admin.rich_text.converters.contentstate import ContentstateConverter
from wagtail.core.rich_text import features
class DraftailRichTextArea(widgets.Textarea):
# this class's constructor accepts a 'features' kwarg
accepts_features = True
def get_panel(self):
return RichTextFieldPanel
def __init__(self, *args, **kwargs):
self.options = kwargs.pop('options', None)
self.features = kwargs.pop('features', None)
if self.features is None:
self.features = features.get_default_features()
self.converter = ContentstateConverter(self.features)
super().__init__(*args, **kwargs)
def render(self, name, value, attrs=None):
if value is None:
translated_value = None
else:
translated_value = self.converter.from_database_format(value)
return super().render(name, translated_value, attrs)
def value_from_datadict(self, data, files, name):
original_value = super().value_from_datadict(data, files, name)
if original_value is None:
return None
return self.converter.to_database_format(original_value)
|
import json
from django.forms import Media, widgets
from wagtail.admin.edit_handlers import RichTextFieldPanel
from wagtail.admin.rich_text.converters.contentstate import ContentstateConverter
from wagtail.core.rich_text import features
class DraftailRichTextArea(widgets.Textarea):
# this class's constructor accepts a 'features' kwarg
accepts_features = True
def get_panel(self):
return RichTextFieldPanel
def __init__(self, *args, **kwargs):
self.options = kwargs.pop('options', None)
self.features = kwargs.pop('features', None)
if self.features is None:
self.features = features.get_default_features()
self.converter = ContentstateConverter(self.features)
super().__init__(*args, **kwargs)
def render(self, name, value, attrs=None):
if value is None:
translated_value = None
else:
translated_value = self.converter.from_database_format(value)
return super().render(name, translated_value, attrs)
def render_js_init(self, id_, name, value):
return "window.draftail.initEditor('{name}', {opts})".format(
name=name, opts=json.dumps(self.options))
def value_from_datadict(self, data, files, name):
original_value = super().value_from_datadict(data, files, name)
if original_value is None:
return None
return self.converter.to_database_format(original_value)
@property
def media(self):
return Media(js=[
'wagtailadmin/js/draftail.js',
], css={
'all': ['wagtailadmin/css/panels/dratail.css']
})
|
Integrate Draftail-related assets with Django widget
|
Integrate Draftail-related assets with Django widget
|
Python
|
bsd-3-clause
|
mikedingjan/wagtail,kaedroho/wagtail,timorieber/wagtail,mixxorz/wagtail,torchbox/wagtail,gasman/wagtail,gasman/wagtail,wagtail/wagtail,timorieber/wagtail,mixxorz/wagtail,nealtodd/wagtail,nimasmi/wagtail,kaedroho/wagtail,mikedingjan/wagtail,takeflight/wagtail,thenewguy/wagtail,zerolab/wagtail,timorieber/wagtail,thenewguy/wagtail,mixxorz/wagtail,FlipperPA/wagtail,zerolab/wagtail,takeflight/wagtail,nealtodd/wagtail,nimasmi/wagtail,zerolab/wagtail,takeflight/wagtail,zerolab/wagtail,mikedingjan/wagtail,mixxorz/wagtail,kaedroho/wagtail,torchbox/wagtail,thenewguy/wagtail,wagtail/wagtail,torchbox/wagtail,rsalmaso/wagtail,gasman/wagtail,rsalmaso/wagtail,zerolab/wagtail,nimasmi/wagtail,thenewguy/wagtail,wagtail/wagtail,rsalmaso/wagtail,nealtodd/wagtail,thenewguy/wagtail,timorieber/wagtail,rsalmaso/wagtail,torchbox/wagtail,nimasmi/wagtail,rsalmaso/wagtail,jnns/wagtail,kaedroho/wagtail,FlipperPA/wagtail,kaedroho/wagtail,takeflight/wagtail,gasman/wagtail,nealtodd/wagtail,wagtail/wagtail,FlipperPA/wagtail,wagtail/wagtail,jnns/wagtail,jnns/wagtail,FlipperPA/wagtail,mikedingjan/wagtail,jnns/wagtail,mixxorz/wagtail,gasman/wagtail
|
3db4d306c779ef3a84133dbbfc5614d514d72411
|
pi_gpio/handlers.py
|
pi_gpio/handlers.py
|
from flask.ext.restful import fields
from meta import BasicResource
from config.pins import PinHttpManager
from pi_gpio import app
HTTP_MANAGER = PinHttpManager()
class Pin(BasicResource):
def __init__(self):
super(Pin, self).__init__()
self.fields = {
"num": fields.Integer,
"mode": fields.String,
"value": fields.Integer
}
def pin_not_found(self):
return {'message': 'Pin not found'}, 404
class PinList(Pin):
def get(self):
result = HTTP_MANAGER.read_all()
return self.response(result, 200)
class PinDetail(Pin):
def get(self, pin_num):
result = HTTP_MANAGER.read_one(pin_num)
if not result:
return self.pin_not_found()
return self.response(result, 200)
def patch(self, pin_num):
self.parser.add_argument('value', type=int)
args = self.parser.parse_args()
result = HTTP_MANAGER.update_value(pin_num, args['value'])
if not result:
return self.pin_not_found()
return self.response(HTTP_MANAGER.read_one(pin_num), 200)
|
from flask.ext.restful import fields
from meta import BasicResource
from config.pins import PinHttpManager
from pi_gpio import app
HTTP_MANAGER = PinHttpManager()
class Pin(BasicResource):
def __init__(self):
super(Pin, self).__init__()
self.fields = {
"num": fields.Integer,
"mode": fields.String,
"value": fields.Integer,
"resistor": fields.String,
"initial": fields.String,
"event": fields.String,
"bounce": fields.Integer
}
def pin_not_found(self):
return {'message': 'Pin not found'}, 404
class PinList(Pin):
def get(self):
result = HTTP_MANAGER.read_all()
return self.response(result, 200)
class PinDetail(Pin):
def get(self, pin_num):
result = HTTP_MANAGER.read_one(pin_num)
if not result:
return self.pin_not_found()
return self.response(result, 200)
def patch(self, pin_num):
self.parser.add_argument('value', type=int)
args = self.parser.parse_args()
result = HTTP_MANAGER.update_value(pin_num, args['value'])
if not result:
return self.pin_not_found()
return self.response(HTTP_MANAGER.read_one(pin_num), 200)
|
Add new fields to response
|
Add new fields to response
|
Python
|
mit
|
projectweekend/Pi-GPIO-Server,projectweekend/Pi-GPIO-Server,thijstriemstra/Pi-GPIO-Server,projectweekend/Pi-GPIO-Server,thijstriemstra/Pi-GPIO-Server,thijstriemstra/Pi-GPIO-Server,projectweekend/Pi-GPIO-Server,thijstriemstra/Pi-GPIO-Server
|
23bbb5737602408ba553b77810103d7b32140c89
|
test.py
|
test.py
|
#!/usr/bin/env python
import neukrill_net.utils as utils
import neukrill_net.image_processing as image_processing
import csv
import pickle
from sklearn.externals import joblib
import numpy as np
import glob
import os
def main():
settings = utils.Settings('settings.json')
image_fname_dict = settings.image_fnames
processing = lambda image: image_processing.resize_image(image, (48,48))
X, names = utils.load_data(image_fname_dict, processing=processing,
verbose=True)
clf = joblib.load('model.pkl')
p = clf.predict_proba(X)
with open('submission.csv', 'w') as csv_out:
out_writer = csv.writer(csv_out, delimiter=',')
out_writer.writerow(['image'] + list(settings.classes))
for index in range(len(names)):
out_writer.writerow([names[index]] + list(p[index,]))
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
import neukrill_net.utils as utils
import neukrill_net.image_processing as image_processing
import csv
import pickle
from sklearn.externals import joblib
import numpy as np
import glob
import os
def main():
settings = utils.Settings('settings.json')
image_fname_dict = settings.image_fnames
processing = lambda image: image_processing.resize_image(image, (48,48))
X, names = utils.load_data(image_fname_dict, processing=processing,
verbose=True)
clf = joblib.load('model.pkl')
p = clf.predict_proba(X)
utils.write_predictions('submission.csv', p, names, settings)
if __name__ == '__main__':
main()
|
Swap to using submission prediction writer function
|
Swap to using submission prediction writer function
|
Python
|
mit
|
Neuroglycerin/neukrill-net-work,Neuroglycerin/neukrill-net-work,Neuroglycerin/neukrill-net-work
|
9a19da30a933bc2872b9fc5b5966823c43e1982f
|
website/pages/tests.py
|
website/pages/tests.py
|
# -*- coding: utf-8 -*-
"""
File: tests.py
Creator: MazeFX
Date: 12-7-2016
Tests written for testing main website pages (home, about, contact, etc)
"""
from django.core.urlresolvers import resolve
from django.test import TestCase
from django.http import HttpRequest
from django.template.loader import render_to_string
from website.pages.views import home_page, send_email
class HomePageTest(TestCase):
def test_root_url_resolves_to_home_page_view(self):
found = resolve('/')
self.assertEqual(found.func, home_page)
def test_home_page_returns_correct_html(self):
request = HttpRequest()
response = home_page(request)
expected_html = render_to_string('pages/home.html')
self.assertEqual(response.content.decode(), expected_html)
class SendEmailTest(TestCase):
def test_send_email_url_resolves_to_send_email_view(self):
found = resolve('/send-email/')
self.assertEqual(found.func, send_email)
def test_send_email_returns_correct_html(self):
request = HttpRequest()
response = send_email(request)
expected_html = render_to_string('pages/send_email.html')
self.assertEqual(response.content.decode(), expected_html)
|
# -*- coding: utf-8 -*-
"""
File: tests.py
Creator: MazeFX
Date: 12-7-2016
Tests written for testing main website pages (home, about, contact, etc)
Contact page has the ability to send emails through anymail/mailgun.
"""
from django.core.urlresolvers import resolve
from django.test import TestCase
from django.http import HttpRequest
from django.template.loader import render_to_string
from website.pages.views import home_page, contact
class HomePageTest(TestCase):
def test_root_url_resolves_to_home_page_view(self):
found = resolve('/')
self.assertEqual(found.func, home_page)
def test_home_page_returns_correct_html(self):
request = HttpRequest()
response = home_page(request)
expected_html = render_to_string('pages/home.html')
self.assertEqual(response.content.decode(), expected_html)
class ContactTest(TestCase):
def test_contact_url_resolves_to_contact_view(self):
found = resolve('/contact/')
self.assertEqual(found.func, contact)
def test_contact_returns_correct_html(self):
request = HttpRequest()
response = contact(request)
expected_html = render_to_string('pages/contact.html')
self.assertEqual(response.content.decode(), expected_html)
|
Change send email to contact namespace
|
Change send email to contact namespace
|
Python
|
mit
|
MazeFX/cookiecutter_website_project,MazeFX/cookiecutter_website_project,MazeFX/cookiecutter_website_project,MazeFX/cookiecutter_website_project
|
f185f04f6efdabe161ae29ba72f7208b8adccc41
|
bulletin/tools/plugins/models.py
|
bulletin/tools/plugins/models.py
|
from django.db import models
from bulletin.models import Post
class Event(Post):
start_date = models.DateTimeField()
end_date = models.DateTimeField(null=True, blank=True)
time = models.CharField(max_length=255,
null=True, blank=True)
organization = models.CharField(max_length=255,
null=True, blank=True)
location = models.CharField(max_length=255)
class Job(Post):
organization = models.CharField(max_length=255)
class NewResource(Post):
blurb = models.TextField()
class Opportunity(Post):
blurb = models.TextField()
class Meta:
verbose_name_plural = 'opportunities'
class Story(Post):
blurb = models.TextField()
date = models.DateTimeField()
class Meta:
verbose_name_plural = 'stories'
|
from django.db import models
from bulletin.models import Post
class Event(Post):
start_date = models.DateTimeField()
end_date = models.DateTimeField(null=True, blank=True)
time = models.CharField(max_length=255,
null=True, blank=True)
organization = models.CharField(max_length=255,
null=True, blank=True)
location = models.CharField(max_length=255)
class Job(Post):
organization = models.CharField(max_length=255)
class NewResource(Post):
blurb = models.TextField()
verbose_name = 'newresource'
class Opportunity(Post):
blurb = models.TextField()
class Meta:
verbose_name_plural = 'opportunities'
class Story(Post):
blurb = models.TextField()
date = models.DateTimeField()
class Meta:
verbose_name_plural = 'stories'
|
Set verbose name of NewResource.
|
Set verbose name of NewResource.
|
Python
|
mit
|
AASHE/django-bulletin,AASHE/django-bulletin,AASHE/django-bulletin
|
4641b9a1b9a79fdeb0aaa3264de7bd1703b1d1fa
|
alexandria/web.py
|
alexandria/web.py
|
from alexandria import app, mongo
from decorators import *
from flask import render_template, request, jsonify, g, send_from_directory, redirect, url_for, session, flash
import os
import shutil
import requests
from pymongo import MongoClient
from functools import wraps
import bcrypt
from bson.objectid import ObjectId
@app.route('/', methods=['GET'])
@authenticated
def index():
return render_template('app.html')
@app.route('/portal')
def portal():
if not session.get('username'):
return render_template('portal.html')
else:
return render_template('index.html')
@app.route('/logout')
def logout():
session.pop('username', None)
session.pop('role', None)
session.pop('realname', None)
return redirect(url_for('index'))
@app.route('/download/<id>/<format>')
@authenticated
def download(id, format):
book = mongo.Books.find({'id':id})[0]
response = send_from_directory(app.config['LIB_DIR'], id+'.'+format)
response.headers.add('Content-Disposition', 'attachment; filename="' + book['title'] + '.' + format + '"')
return response
@app.route('/upload')
@authenticated
@administrator
def upload():
return render_template('upload.html')
if __name__ == "__main__":
app.run()
|
from alexandria import app, mongo
from decorators import *
from flask import render_template, request, jsonify, g, send_from_directory, redirect, url_for, session, flash
import os
import shutil
import requests
from pymongo import MongoClient
from functools import wraps
import bcrypt
from bson.objectid import ObjectId
@app.route('/', methods=['GET'])
@authenticated
def index():
return render_template('app.html')
@app.route('/portal')
def portal():
if not session.get('username'):
return render_template('portal.html')
else:
return redirect(url_for('index'))
@app.route('/logout')
def logout():
session.pop('username', None)
session.pop('role', None)
session.pop('realname', None)
return redirect(url_for('index'))
@app.route('/download/<id>/<format>')
@authenticated
def download(id, format):
book = mongo.Books.find({'id':id})[0]
response = send_from_directory(app.config['LIB_DIR'], id+'.'+format)
response.headers.add('Content-Disposition', 'attachment; filename="' + book['title'] + '.' + format + '"')
return response
@app.route('/upload')
@authenticated
@administrator
def upload():
return render_template('upload.html')
if __name__ == "__main__":
app.run()
|
Fix return on active user accessing the portal
|
Fix return on active user accessing the portal
|
Python
|
mit
|
citruspi/Alexandria,citruspi/Alexandria
|
e3928f489f481c9e44c634d7ee98afc5425b4432
|
tests/test_yaml_utils.py
|
tests/test_yaml_utils.py
|
import pytest
from apispec import yaml_utils
def test_load_yaml_from_docstring():
def f():
"""
Foo
bar
baz quux
---
herp: 1
derp: 2
"""
result = yaml_utils.load_yaml_from_docstring(f.__doc__)
assert result == {"herp": 1, "derp": 2}
@pytest.mark.parametrize("docstring", (None, "", "---"))
def test_load_yaml_from_docstring_empty_docstring(docstring):
assert yaml_utils.load_yaml_from_docstring(docstring) == {}
@pytest.mark.parametrize("docstring", (None, "", "---"))
def test_load_operations_from_docstring_empty_docstring(docstring):
assert yaml_utils.load_operations_from_docstring(docstring) == {}
|
import pytest
from apispec import yaml_utils
def test_load_yaml_from_docstring():
def f():
"""
Foo
bar
baz quux
---
herp: 1
derp: 2
"""
result = yaml_utils.load_yaml_from_docstring(f.__doc__)
assert result == {"herp": 1, "derp": 2}
@pytest.mark.parametrize("docstring", (None, "", "---"))
def test_load_yaml_from_docstring_empty_docstring(docstring):
assert yaml_utils.load_yaml_from_docstring(docstring) == {}
@pytest.mark.parametrize("docstring", (None, "", "---"))
def test_load_operations_from_docstring_empty_docstring(docstring):
assert yaml_utils.load_operations_from_docstring(docstring) == {}
def test_dict_to_yaml_unicode():
assert yaml_utils.dict_to_yaml({"가": "나"}) == '"\\uAC00": "\\uB098"\n'
assert yaml_utils.dict_to_yaml({"가": "나"}, {"allow_unicode": True}) == "가: 나\n"
|
Add regression test for generating yaml with unicode
|
Add regression test for generating yaml with unicode
|
Python
|
mit
|
marshmallow-code/smore,marshmallow-code/apispec
|
baf09f8b308626abb81431ddca4498409fc9d5ce
|
campaigns/tests/test_views.py
|
campaigns/tests/test_views.py
|
from django.test import TestCase
from django.http import HttpRequest
from campaigns.views import create_campaign
from campaigns.models import Campaign
from campaigns.forms import CampaignForm
class HomePageTest(TestCase):
def test_does_root_url_resolves_the_home_page(self):
called = self.client.get('/')
self.assertTemplateUsed(called, 'home.html')
class CampaignsViewsTest(TestCase):
def test_does_create_campaign_resolves_the_right_url(self):
called = self.client.get('/campaigns/new')
self.assertTemplateUsed(called, 'new_campaign.html')
# Trying to do self.client.post was using GET request for some
# reason so i made it that ugly
def test_does_create_camapign_saves_objects_with_POST_requests(self):
self.assertEqual(Campaign.objects.count(), 0)
request = HttpRequest()
request.method = 'POST'
request.POST['title'] = 'C1'
request.POST['description'] = 'C1Descr'
create_campaign(request)
campaign = Campaign.objects.first()
self.assertEqual(Campaign.objects.count(), 1)
self.assertEqual(campaign.title, 'C1')
self.assertEqual(campaign.description, 'C1Descr')
|
from django.test import TestCase
from django.http import HttpRequest
from campaigns.views import create_campaign
from campaigns.models import Campaign
from campaigns.forms import CampaignForm
def make_POST_request(titleValue, descriptionValue):
request = HttpRequest()
request.method = 'POST'
request.POST['title'] = titleValue
request.POST['description'] = descriptionValue
return request
class HomePageTest(TestCase):
def test_does_root_url_resolves_the_home_page(self):
called = self.client.get('/')
self.assertTemplateUsed(called, 'home.html')
class CampaignsViewsTest(TestCase):
def test_does_create_campaign_resolves_the_right_url(self):
called = self.client.get('/campaigns/new')
self.assertTemplateUsed(called, 'new_campaign.html')
# Trying to do self.client.post was using GET request for some
# reason so i made it that ugly
def test_does_create_campaign_saves_objects_with_POST_requests(self):
self.assertEqual(Campaign.objects.count(), 0)
create_campaign(make_POST_request('C1', 'C1Descr'))
campaign = Campaign.objects.first()
self.assertEqual(Campaign.objects.count(), 1)
self.assertEqual(campaign.title, 'C1')
self.assertEqual(campaign.description, 'C1Descr')
def test_create_campaign_dont_saves_empty_objects(self):
self.assertEqual(Campaign.objects.count(), 0)
create_campaign(make_POST_request('', ''))
self.assertEqual(Campaign.objects.count(), 0)
|
Refactor some redundancy in the views tests
|
Refactor some redundancy in the views tests
|
Python
|
apache-2.0
|
Springsteen/tues_admission,Springsteen/tues_admission,Springsteen/tues_admission,Springsteen/tues_admission
|
2ca6f765a3bd1eca6bd255f9c679c9fbea78484a
|
run_maya_tests.py
|
run_maya_tests.py
|
"""Use Mayapy for testing
Usage:
$ mayapy run_maya_tests.py
"""
import sys
import nose
import warnings
from nose_exclude import NoseExclude
warnings.filterwarnings("ignore", category=DeprecationWarning)
if __name__ == "__main__":
from maya import standalone
standalone.initialize()
argv = sys.argv[:]
argv.extend([
# Sometimes, files from Windows accessed
# from Linux cause the executable flag to be
# set, and Nose has an aversion to these
# per default.
"--exe",
"--verbose",
"--with-doctest",
"--with-coverage",
"--cover-html",
"--cover-tests",
"--cover-erase",
"--exclude-dir=mindbender/nuke",
"--exclude-dir=mindbender/houdini",
"--exclude-dir=mindbender/schema",
"--exclude-dir=mindbender/plugins",
# We can expect any vendors to
# be well tested beforehand.
"--exclude-dir=mindbender/vendor",
])
nose.main(argv=argv,
addplugins=[NoseExclude()])
|
"""Use Mayapy for testing
Usage:
$ mayapy run_maya_tests.py
"""
import sys
import nose
import logging
import warnings
from nose_exclude import NoseExclude
warnings.filterwarnings("ignore", category=DeprecationWarning)
if __name__ == "__main__":
from maya import standalone
standalone.initialize()
log = logging.getLogger()
# Discard default Maya logging handler
log.handlers[:] = []
argv = sys.argv[:]
argv.extend([
# Sometimes, files from Windows accessed
# from Linux cause the executable flag to be
# set, and Nose has an aversion to these
# per default.
"--exe",
"--verbose",
"--with-doctest",
"--with-coverage",
"--cover-html",
"--cover-tests",
"--cover-erase",
"--exclude-dir=mindbender/nuke",
"--exclude-dir=mindbender/houdini",
"--exclude-dir=mindbender/schema",
"--exclude-dir=mindbender/plugins",
# We can expect any vendors to
# be well tested beforehand.
"--exclude-dir=mindbender/vendor",
])
nose.main(argv=argv,
addplugins=[NoseExclude()])
|
Enhance readability of test output
|
Enhance readability of test output
|
Python
|
mit
|
MoonShineVFX/core,mindbender-studio/core,MoonShineVFX/core,mindbender-studio/core,getavalon/core,getavalon/core
|
6578b6d2dfca38940be278d82e4f8d8248ae3c79
|
convert_codecs.py
|
convert_codecs.py
|
#!/usr/env python
# -*- coding: utf-8 -*-
"""Convert file codecs
Usage:
convert_codecs.py <sourceFile> <targetFile> <sourceEncoding> <targetEncoding>
convert_codecs.py (-h | --help)
"""
import codecs
from docopt import docopt
__version__ = '0.1'
__author__ = 'Honghe'
BLOCKSIZE = 1024**2 # size in bytes
def convert(sourceFile, targetFile, sourceEncoding, targetEncoding):
with codecs.open(sourceFile, 'rb', sourceEncoding) as sfile:
with codecs.open(targetFile, 'wb', targetEncoding) as tfile:
while True:
contents = sfile.read(BLOCKSIZE)
if not contents:
break
tfile.write(contents)
if __name__ == '__main__':
arguments = docopt(__doc__)
sourceFile = arguments['<sourceFile>']
targetFile = arguments['<targetFile>']
sourceEncoding = arguments['<sourceEncoding>']
targetEncoding = arguments['<targetEncoding>']
convert(sourceFile, targetFile, sourceEncoding, targetEncoding)
|
#!/usr/env python
# -*- coding: utf-8 -*-
"""Convert file codecs
Usage:
convert_codecs.py <sourceFile> <targetFile> <sourceEncoding> <targetEncoding>
convert_codecs.py (-h | --help)
"""
import codecs
from docopt import docopt
__version__ = '0.1'
__author__ = 'Honghe'
BLOCKSIZE = 1024 # number of characters in corresponding encoding, not bytes
def convert(sourceFile, targetFile, sourceEncoding, targetEncoding):
with codecs.open(sourceFile, 'rb', sourceEncoding) as sfile:
with codecs.open(targetFile, 'wb', targetEncoding) as tfile:
while True:
contents = sfile.read(BLOCKSIZE)
if not contents:
break
tfile.write(contents)
if __name__ == '__main__':
arguments = docopt(__doc__)
sourceFile = arguments['<sourceFile>']
targetFile = arguments['<targetFile>']
sourceEncoding = arguments['<sourceEncoding>']
targetEncoding = arguments['<targetEncoding>']
convert(sourceFile, targetFile, sourceEncoding, targetEncoding)
|
Correct the comment of BLOCKSIZE
|
Correct the comment of BLOCKSIZE
|
Python
|
apache-2.0
|
Honghe/convert_codecs
|
e0b2ac4b71859708f4b35ae8ef7227b630a6e2d9
|
ctypeslib/test/test_toolchain.py
|
ctypeslib/test/test_toolchain.py
|
import unittest
import sys
from ctypeslib import h2xml, xml2py
class ToolchainTest(unittest.TestCase):
if sys.platform == "win32":
def test(self):
h2xml.main(["h2xml", "-q",
"-D WIN32_LEAN_AND_MEAN",
"-D _UNICODE", "-D UNICODE",
"-c", "windows.h",
"-o", "_windows_gen.xml"])
xml2py.main(["xml2py", "_windows_gen.xml", "-w", "-o", "_winapi_gen.py"])
import _winapi_gen
if __name__ == "__main__":
import unittest
unittest.main()
|
import unittest
import sys
from ctypeslib import h2xml, xml2py
class ToolchainTest(unittest.TestCase):
if sys.platform == "win32":
def test_windows(self):
h2xml.main(["h2xml", "-q",
"-D WIN32_LEAN_AND_MEAN",
"-D _UNICODE", "-D UNICODE",
"-c", "windows.h",
"-o", "_windows_gen.xml"])
xml2py.main(["xml2py", "_windows_gen.xml", "-w", "-o", "_winapi_gen.py"])
import _winapi_gen
def test(self):
h2xml.main(["h2xml", "-q",
"-D WIN32_LEAN_AND_MEAN",
"-D _UNICODE", "-D UNICODE",
"-c", "stdio.h",
"-o", "_stdio_gen.xml"])
xml2py.main(["xml2py", "_stdio_gen.xml", "-o", "_stdio_gen.py"])
import _stdio_gen
if __name__ == "__main__":
import unittest
unittest.main()
|
Add a test for stdio.h.
|
Add a test for stdio.h.
|
Python
|
mit
|
sugarmanz/ctypeslib
|
649bea9ce3ebaf4ba44919097b731ba915703852
|
alembic/versions/30d0a626888_add_username.py
|
alembic/versions/30d0a626888_add_username.py
|
"""Add username
Revision ID: 30d0a626888
Revises: 51375067b45
Create Date: 2015-10-29 10:32:03.077400
"""
# revision identifiers, used by Alembic.
revision = '30d0a626888'
down_revision = '51375067b45'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('_user', sa.Column('username', sa.Unicode(255), unique=True))
op.create_unique_constraint(
'_user_email_key', '_user', ['email'])
def downgrade():
op.drop_column('_user', 'username')
op.drop_constraint(
'_user_email_key', table_name='_user', type_='unique')
|
"""Add username
Revision ID: 30d0a626888
Revises: 51375067b45
Create Date: 2015-10-29 10:32:03.077400
"""
# revision identifiers, used by Alembic.
revision = '30d0a626888'
down_revision = '51375067b45'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
"""
SQL That equal to the following
ALTER TABLE app_name._user ADD COLUMN username varchar(255);
ALTER TABLE app_name._user ADD CONSTRAINT '_user_email_key' UNIQUE('email');
UPDATE app_name._version set version_num = '30d0a626888;
"""
op.add_column('_user', sa.Column('username', sa.Unicode(255), unique=True))
op.create_unique_constraint(
'_user_email_key', '_user', ['email'])
def downgrade():
op.drop_column('_user', 'username')
op.drop_constraint(
'_user_email_key', table_name='_user', type_='unique')
|
Add generate sql example as comment
|
Add generate sql example as comment
|
Python
|
apache-2.0
|
SkygearIO/skygear-server,rickmak/skygear-server,rickmak/skygear-server,SkygearIO/skygear-server,rickmak/skygear-server,SkygearIO/skygear-server,SkygearIO/skygear-server
|
281a096cea735845bdb74d60abf14f1422f2c624
|
test_runner/executable.py
|
test_runner/executable.py
|
import argh
from .environments import Environment
from .frameworks import Tempest
from .utils import cleanup, Reporter
LOG = Reporter(__name__).setup()
def main(endpoint, username='admin', password='secrete', test_path='api'):
environment = Environment(username, password, endpoint)
with cleanup(environment):
environment.build()
framework = Tempest(environment, repo_dir='/opt/tempest',
test_path=test_path)
results = framework.run_tests()
LOG.info('Results: {0}'.format(results))
if __name__ == '__main__':
argh.dispatch_command(main)
|
import argh
from .environments import Environment
from .frameworks import Tempest
from .utils import cleanup, Reporter
LOG = Reporter(__name__).setup()
def main(endpoint, username='admin', password='secrete', test_path='api'):
environment = Environment(username, password, endpoint)
with cleanup(environment):
environment.build()
framework = Tempest(environment, repo_dir='/opt/tempest',
test_path=test_path)
results = framework.run_tests()
LOG.info('Results: {0}'.format(results))
argh.dispatch_command(main)
|
Move command dispatch into full module
|
Move command dispatch into full module
|
Python
|
mit
|
rcbops-qa/test_runner
|
0dd935a383d4b8d066dc091226b61119d245a7f9
|
threeOhOne.py
|
threeOhOne.py
|
#!/usr/bin/env python
# -*- Coding: utf-8 -*-
"""
" In its present form, it simply takes a comma delimited .csv file and outputs a .txt file containing valid 301 redirect statements for an .htaccess file
"
" author: Claude Müller
" wbsite: http://mediarocket.co.za
"
"""
import csv
class ThreeOhOne:
def __ini__(self):
pass
def main():
threeOhOne = ThreeOhOne()
if __name__ == "__main__":
main()
|
#!/usr/bin/env python
# -*- Coding: utf-8 -*-
"""
" In its present form, it simply takes a comma delimited .csv file and outputs a .txt file containing valid 301 redirect statements for an .htaccess file
"
" author: Claude Müller
" wbsite: http://mediarocket.co.za
"
"""
import sys
import csv
class ThreeOhOne:
outputDir = 'outputs'
def __init__(self, filename):
self._process(sys.argv[1])
def _process(self, filename):
try:
fd = open(filename, 'rt')
except FileNotFoundError:
print('Error: File not found ;/')
def main():
if len(sys.argv) < 2:
print("usage: " + sys.argv[0] + " <the_file.csv>")
exit(1)
else:
threeOhOne = ThreeOhOne(sys.argv[1])
if __name__ == "__main__":
main()
|
Add command line argument capability
|
[py] Add command line argument capability
|
Python
|
mit
|
claudemuller/301csv2htaccess
|
52c7321c78c8a81b6b557d67fe5af44b8b32df4c
|
src/octoprint/__main__.py
|
src/octoprint/__main__.py
|
#!/usr/bin/env python2
# coding=utf-8
from __future__ import absolute_import, division, print_function
if __name__ == "__main__":
import octoprint
octoprint.main()
|
#!/usr/bin/env python2
# coding=utf-8
from __future__ import absolute_import, division, print_function
import sys
if sys.version_info[0] >= 3:
raise Exception("Octoprint does not support Python 3")
if __name__ == "__main__":
import octoprint
octoprint.main()
|
Handle unsupported version at runtime.
|
Handle unsupported version at runtime.
If you have an ancient setuptools, 4a36ddb3aa77b8d1b1a64c197607fa652705856c
won't successfully prevent installing. These changes will at least give a sane error,
rather then just barfing on random syntax errors due to the `unicode` type not
being present in py3k.
Cherry picked from 2f20f2d
|
Python
|
agpl-3.0
|
Jaesin/OctoPrint,foosel/OctoPrint,Jaesin/OctoPrint,Jaesin/OctoPrint,foosel/OctoPrint,Jaesin/OctoPrint,foosel/OctoPrint,foosel/OctoPrint
|
ca7fb88d36b386defab610388761609539e0a9cf
|
conary/build/capsulerecipe.py
|
conary/build/capsulerecipe.py
|
#
# Copyright (c) 2009 rPath, Inc.
#
# This program is distributed under the terms of the Common Public License,
# version 1.0. A copy of this license should have been distributed with this
# source file in a file called LICENSE. If it is not present, the license
# is always available at http://www.rpath.com/permanent/licenses/CPL-1.0.
#
# This program is distributed in the hope that it will be useful, but
# without any warranty; without even the implied warranty of merchantability
# or fitness for a particular purpose. See the Common Public License for
# full details.
#
from conary.build import defaultrecipes
from conary.build.recipe import RECIPE_TYPE_CAPSULE
from conary.build.packagerecipe import BaseRequiresRecipe, AbstractPackageRecipe
class AbstractCapsuleRecipe(AbstractPackageRecipe):
internalAbstractBaseClass = 1
internalPolicyModules = ( 'packagepolicy', 'capsulepolicy' )
_recipeType = RECIPE_TYPE_CAPSULE
def __init__(self, *args, **kwargs):
klass = self._getParentClass('AbstractPackageRecipe')
klass.__init__(self, *args, **kwargs)
from conary.build import source
self._addSourceAction('source.addCapsule', source.addCapsule)
self._addSourceAction('source.addSource', source.addSource)
exec defaultrecipes.CapsuleRecipe
|
#
# Copyright (c) 2009 rPath, Inc.
#
# This program is distributed under the terms of the Common Public License,
# version 1.0. A copy of this license should have been distributed with this
# source file in a file called LICENSE. If it is not present, the license
# is always available at http://www.rpath.com/permanent/licenses/CPL-1.0.
#
# This program is distributed in the hope that it will be useful, but
# without any warranty; without even the implied warranty of merchantability
# or fitness for a particular purpose. See the Common Public License for
# full details.
#
import inspect
from conary.build import action, defaultrecipes
from conary.build.recipe import RECIPE_TYPE_CAPSULE
from conary.build.packagerecipe import BaseRequiresRecipe, AbstractPackageRecipe
class AbstractCapsuleRecipe(AbstractPackageRecipe):
internalAbstractBaseClass = 1
internalPolicyModules = ( 'packagepolicy', 'capsulepolicy' )
_recipeType = RECIPE_TYPE_CAPSULE
def __init__(self, *args, **kwargs):
klass = self._getParentClass('AbstractPackageRecipe')
klass.__init__(self, *args, **kwargs)
from conary.build import build
for name, item in build.__dict__.items():
if inspect.isclass(item) and issubclass(item, action.Action):
self._addBuildAction(name, item)
def loadSourceActions(self):
self._loadSourceActions(lambda item: item._packageAction is True)
exec defaultrecipes.CapsuleRecipe
|
Enable building hybrid capsule/non-capsule packages (CNY-3271)
|
Enable building hybrid capsule/non-capsule packages (CNY-3271)
|
Python
|
apache-2.0
|
fedora-conary/conary,fedora-conary/conary,fedora-conary/conary,fedora-conary/conary,fedora-conary/conary
|
d54e5f25601fe2f57a2c6be5524430f0068e05c4
|
image_translate/frames_rendering.py
|
image_translate/frames_rendering.py
|
# need to install python-opencv, pygame, numpy, scipy, PIL
import sys
import pygame
from pygame.locals import *
import opencv
#this is important for capturing/displaying images
from opencv import highgui
def get_image(camera):
img = highgui.cvQueryFrame(camera)
# Add the line below if you need it (Ubuntu 8.04+)
# im = opencv.cvGetMat(im)
# convert Ipl image to PIL image
return opencv.adaptors.Ipl2PIL(img)
def render_flipped_camera():
camera = highgui.cvCreateCameraCapture(0)
fps = 30.0
pygame.init()
pygame.display.set_mode((640, 480))
pygame.display.set_caption("WebCam Demo")
screen = pygame.display.get_surface()
while True:
events = pygame.event.get()
for event in events:
if event.type == QUIT or event.type == KEYDOWN:
sys.exit(0)
im = get_image(camera)
pg_img = pygame.image.frombuffer(im.tostring(), im.size, im.mode)
screen.blit(pg_img, (0, 0))
pygame.display.flip()
pygame.time.delay(int(1000 * 1.0/fps))
if __name__ == "__main__":
render_flipped_camera()
|
# need to install python-opencv, pygame, numpy, scipy, PIL
import sys
import pygame
from pygame.locals import QUIT, KEYDOWN
import opencv
#this is important for capturing/displaying images
from opencv import highgui
def get_image(camera):
img = highgui.cvQueryFrame(camera)
# Add the line below if you need it (Ubuntu 8.04+)
# im = opencv.cvGetMat(im)
# convert Ipl image to PIL image
return opencv.adaptors.Ipl2PIL(img)
def render_flipped_camera():
camera = highgui.cvCreateCameraCapture(0)
fps = 30.0
pygame.init()
pygame.display.set_mode((640, 480))
pygame.display.set_caption("WebCam Demo")
screen = pygame.display.get_surface()
while True:
events = pygame.event.get()
for event in events:
if event.type == QUIT or event.type == KEYDOWN:
sys.exit(0)
im = get_image(camera)
pg_img = pygame.image.frombuffer(im.tostring(), im.size, im.mode)
screen.blit(pg_img, (0, 0))
pygame.display.flip()
pygame.time.delay(int(1000 * 1.0/fps))
if __name__ == "__main__":
render_flipped_camera()
|
Remove brute and inconvinient star import
|
Remove brute and inconvinient star import
|
Python
|
mit
|
duboviy/study_languages
|
1d53f6dc8346a655a86e670d0d4de56f7dc93d04
|
gala/sparselol.py
|
gala/sparselol.py
|
import numpy as np
from scipy import sparse
from .sparselol_cy import extents_count
def extents(labels):
"""Compute the extents of every integer value in ``arr``.
Parameters
----------
labels : array of ints
The array of values to be mapped.
Returns
-------
locs : sparse.csr_matrix
A sparse matrix in which the nonzero elements of row i are the
indices of value i in ``arr``.
"""
labels = labels.ravel()
counts = np.bincount(labels)
indptr = np.concatenate([[0], np.cumsum(counts)])
indices = np.empty(labels.size, int)
extents_count(labels.ravel(), indptr.copy(), out=indices)
locs = sparse.csr_matrix((indices, indices, indptr), dtype=int)
return locs
|
import numpy as np
from scipy import sparse
from .sparselol_cy import extents_count
def extents(labels):
"""Compute the extents of every integer value in ``arr``.
Parameters
----------
labels : array of ints
The array of values to be mapped.
Returns
-------
locs : sparse.csr_matrix
A sparse matrix in which the nonzero elements of row i are the
indices of value i in ``arr``.
"""
labels = labels.ravel()
counts = np.bincount(labels)
indptr = np.concatenate([[0], np.cumsum(counts)])
indices = np.empty(labels.size, int)
extents_count(labels.ravel(), indptr.copy(), out=indices)
one = np.ones((1,), dtype=int)
data = np.lib.as_strided(one, shape=indices.shape, strides=(0,))
locs = sparse.csr_matrix((data, indices, indptr), dtype=int)
return locs
|
Use stride tricks to save data memory
|
Use stride tricks to save data memory
|
Python
|
bsd-3-clause
|
janelia-flyem/gala,jni/gala
|
7ebc9a4511d52707ce88a1b8bc2d3fa638e1fb91
|
c2rst.py
|
c2rst.py
|
import sphinx.parsers
import docutils.parsers.rst as rst
class CStrip(sphinx.parsers.Parser):
def __init__(self):
self.rst_parser = rst.Parser()
def parse(self, inputstring, document):
stripped = []
for line in inputstring.split("\n"):
line = line.strip()
if line == "//|":
stripped.append("")
elif line.startswith("//| "):
stripped.append(line[len("//| "):])
stripped = "\r\n".join(stripped)
self.rst_parser.parse(stripped, document)
|
import docutils.parsers
import docutils.parsers.rst as rst
class CStrip(docutils.parsers.Parser):
def __init__(self):
self.rst_parser = rst.Parser()
def parse(self, inputstring, document):
stripped = []
for line in inputstring.split("\n"):
line = line.strip()
if line == "//|":
stripped.append("")
elif line.startswith("//| "):
stripped.append(line[len("//| "):])
stripped = "\r\n".join(stripped)
self.rst_parser.parse(stripped, document)
|
Switch away from sphinx.parsers which isn't available in sphinx 1.3.5 on Read The Docs.
|
Switch away from sphinx.parsers which isn't available in sphinx 1.3.5 on Read The Docs.
|
Python
|
mit
|
adafruit/circuitpython,adafruit/circuitpython,adafruit/circuitpython,adafruit/micropython,adafruit/micropython,adafruit/circuitpython,adafruit/micropython,adafruit/circuitpython,adafruit/micropython,adafruit/micropython,adafruit/circuitpython
|
e37c7cace441e837120b820936c6f4ae8de78996
|
sts/controller_manager.py
|
sts/controller_manager.py
|
from sts.util.console import msg
class ControllerManager(object):
''' Encapsulate a list of controllers objects '''
def __init__(self, controllers):
self.uuid2controller = {
controller.uuid : controller
for controller in controllers
}
@property
def controllers(self):
return self.uuid2controller.values()
@property
def live_controllers(self):
alive = [controller for controller in self.controllers if controller.alive]
return set(alive)
@property
def down_controllers(self):
down = [controller for controller in self.controllers if not controller.alive]
return set(down)
def get_controller(self, uuid):
if uuid not in self.uuid2controller:
raise ValueError("unknown uuid %s" % str(uuid))
return self.uuid2controller[uuid]
def kill_all(self):
for c in self.live_controllers:
c.kill()
self.uuid2controller = {}
@staticmethod
def kill_controller(controller):
msg.event("Killing controller %s" % str(controller))
controller.kill()
@staticmethod
def reboot_controller(controller):
msg.event("Restarting controller %s" % str(controller))
controller.start()
def check_controller_processes_alive(self):
controllers_with_problems = []
for c in self.live_controllers:
(rc, msg) = c.check_process_status()
if not rc:
c.alive = False
controllers_with_problems.append ( (c, msg) )
return controllers_with_problems
|
from sts.util.console import msg
class ControllerManager(object):
''' Encapsulate a list of controllers objects '''
def __init__(self, controllers):
self.uuid2controller = {
controller.uuid : controller
for controller in controllers
}
@property
def controllers(self):
cs = self.uuid2controller.values()
cs.sort(key=lambda c: c.uuid)
return cs
@property
def live_controllers(self):
alive = [controller for controller in self.controllers if controller.alive]
return set(alive)
@property
def down_controllers(self):
down = [controller for controller in self.controllers if not controller.alive]
return set(down)
def get_controller(self, uuid):
if uuid not in self.uuid2controller:
raise ValueError("unknown uuid %s" % str(uuid))
return self.uuid2controller[uuid]
def kill_all(self):
for c in self.live_controllers:
c.kill()
self.uuid2controller = {}
@staticmethod
def kill_controller(controller):
msg.event("Killing controller %s" % str(controller))
controller.kill()
@staticmethod
def reboot_controller(controller):
msg.event("Restarting controller %s" % str(controller))
controller.start()
def check_controller_processes_alive(self):
controllers_with_problems = []
live = list(self.live_controllers)
live.sort(key=lambda c: c.uuid)
for c in live:
(rc, msg) = c.check_process_status()
if not rc:
c.alive = False
controllers_with_problems.append ( (c, msg) )
return controllers_with_problems
|
Make .contollers() deterministic (was using hash.values())
|
Make .contollers() deterministic (was using hash.values())
|
Python
|
apache-2.0
|
ucb-sts/sts,jmiserez/sts,ucb-sts/sts,jmiserez/sts
|
34b57742801f888af7597378bd00f9d06c2d3b66
|
packages/Python/lldbsuite/test/repl/quicklookobject/TestREPLQuickLookObject.py
|
packages/Python/lldbsuite/test/repl/quicklookobject/TestREPLQuickLookObject.py
|
# TestREPLQuickLookObject.py
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2015 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See http://swift.org/LICENSE.txt for license information
# See http://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ------------------------------------------------------------------------------
"""Test that QuickLookObject works correctly in the REPL"""
import os, time
import unittest2
import lldb
from lldbsuite.test.lldbrepl import REPLTest, load_tests
import lldbsuite.test.lldbtest as lldbtest
class REPLQuickLookTestCase (REPLTest):
mydir = REPLTest.compute_mydir(__file__)
def doTest(self):
self.command('true.customPlaygroundQuickLook()', patterns=['Logical = true'])
self.command('1.25.customPlaygroundQuickLook()', patterns=['Double = 1.25'])
self.command('Float(1.25).customPlaygroundQuickLook()', patterns=['Float = 1.25'])
self.command('"Hello".customPlaygroundQuickLook()', patterns=['Text = \"Hello\"'])
|
# TestREPLQuickLookObject.py
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2015 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See http://swift.org/LICENSE.txt for license information
# See http://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ------------------------------------------------------------------------------
"""Test that QuickLookObject works correctly in the REPL"""
import os, time
import unittest2
import lldb
from lldbsuite.test.lldbrepl import REPLTest, load_tests
import lldbsuite.test.lldbtest as lldbtest
class REPLQuickLookTestCase (REPLTest):
mydir = REPLTest.compute_mydir(__file__)
def doTest(self):
self.command('PlaygroundQuickLook(reflecting: true)', patterns=['Logical = true'])
self.command('PlaygroundQuickLook(reflecting: 1.25)', patterns=['Double = 1.25'])
self.command('PlaygroundQuickLook(reflecting: Float(1.25))', patterns=['Float = 1.25'])
self.command('PlaygroundQuickLook(reflecting: "Hello")', patterns=['Text = \"Hello\"'])
|
Use the PlaygroundQuickLook(reflecting:) constructor in this test case
|
Use the PlaygroundQuickLook(reflecting:) constructor in this test case
|
Python
|
apache-2.0
|
apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb
|
58c13375f7ea4acaf21ccf1151460d79e59bfdf1
|
spotify/__init__.py
|
spotify/__init__.py
|
from __future__ import unicode_literals
import os
import cffi
__version__ = '2.0.0a1'
_header_file = os.path.join(os.path.dirname(__file__), 'api.processed.h')
_header = open(_header_file).read()
_header += '#define SPOTIFY_API_VERSION ...\n'
ffi = cffi.FFI()
ffi.cdef(_header)
lib = ffi.verify('#include "libspotify/api.h"', libraries=[str('spotify')])
from spotify.error import * # noqa
|
from __future__ import unicode_literals
import os
import weakref
import cffi
__version__ = '2.0.0a1'
_header_file = os.path.join(os.path.dirname(__file__), 'api.processed.h')
_header = open(_header_file).read()
_header += '#define SPOTIFY_API_VERSION ...\n'
ffi = cffi.FFI()
ffi.cdef(_header)
lib = ffi.verify('#include "libspotify/api.h"', libraries=[str('spotify')])
# Mapping between keys and objects that should be kept alive as long as the key
# is alive. May be used to keep objects alive when there isn't a more
# convenient place to keep a reference to it. The keys are weakrefs, so entries
# disappear from the dict when the key is garbage collected, potentially
# causing objects associated to the key to be garbage collected as well. For
# further details, refer to the CFFI docs.
global_weakrefs = weakref.WeakKeyDictionary()
from spotify.error import * # noqa
|
Add global_weakrefs dict to keep strings in structs, etc alive
|
Add global_weakrefs dict to keep strings in structs, etc alive
|
Python
|
apache-2.0
|
kotamat/pyspotify,felix1m/pyspotify,jodal/pyspotify,mopidy/pyspotify,jodal/pyspotify,kotamat/pyspotify,mopidy/pyspotify,jodal/pyspotify,felix1m/pyspotify,felix1m/pyspotify,kotamat/pyspotify
|
723f59d43cce9d7a09386447389e8df33b5d323e
|
tests/base/base.py
|
tests/base/base.py
|
import steel
import unittest
class NameAwareOrderedDictTests(unittest.TestCase):
def setUp(self):
self.d = steel.NameAwareOrderedDict()
def test_ignore_object(self):
# Objects without a set_name() method should be ignored
self.d['example'] = object()
self.assertFalse(hasattr(self.d['example'], 'name'))
def test_auto_name(self):
# Objects with a set_name() method should be told their name
class NamedObject(object):
def set_name(self, name):
self.name = name
self.d['example'] = NamedObject()
self.assertEqual(self.d['example'].name, 'example')
def test_errors(self):
# Make sure set_name() errors are raised, not swallowed
class ErrorObject(object):
"Just a simple object that errors out while setting its name"
def set_name(self, name):
raise TypeError('Something went wrong')
with self.assertRaises(TypeError):
self.d['example'] = ErrorObject()
|
import steel
import unittest
class NameAwareOrderedDictTests(unittest.TestCase):
def setUp(self):
self.d = steel.NameAwareOrderedDict()
def test_ignore_object(self):
# Objects without a set_name() method should be ignored
self.d['example'] = object()
self.assertFalse(hasattr(self.d['example'], 'name'))
def test_auto_name(self):
# Objects with a set_name() method should be told their name
class NamedObject(object):
def set_name(self, name):
self.name = name
self.d['example'] = NamedObject()
self.assertEqual(self.d['example'].name, 'example')
def test_errors(self):
# Make sure set_name() errors are raised, not swallowed
class ErrorObject(object):
"Just a simple object that errors out while setting its name"
def set_name(self, name):
raise TypeError('Something went wrong')
with self.assertRaises(TypeError):
self.d['example'] = ErrorObject()
class SizeTests(unittest.TestCase):
def test_explicit_sizes(self):
class Test(steel.Structure):
field1 = steel.Bytes(size=2)
field2 = steel.Bytes(size=4)
self.assertEqual(Test.size, 6)
|
Add a simple test for calculating the size of a structure
|
Add a simple test for calculating the size of a structure
|
Python
|
bsd-3-clause
|
gulopine/steel-experiment
|
73af60749ea7b031473bc5f0c3ddd60d39ec6fa6
|
docs/examples/customer_fetch/get_customer.py
|
docs/examples/customer_fetch/get_customer.py
|
from sharpy.product import CheddarProduct
# Get a product instance to work with
product = CheddarProduct(
username = CHEDDAR_USERNAME,
password = CHEDDAR_PASSWORD,
product_code = CHEDDAR_PRODUCT,
)
# Get the customer from Cheddar Getter
customer = product.get_customer(code='1BDI')
|
from sharpy.product import CheddarProduct
from sharpy import exceptions
# Get a product instance to work with
product = CheddarProduct(
username = CHEDDAR_USERNAME,
password = CHEDDAR_PASSWORD,
product_code = CHEDDAR_PRODUCT,
)
try:
# Get the customer from Cheddar Getter
customer = product.get_customer(code='1BDI')
except exceptions.NotFound, err:
print 'You do not appear to be a customer yet'
else:
# Test if the customer's subscription is canceled
if customer.subscription.canceled:
if customer.subscription.cancel_type == 'paypal-pending':
print 'Waiting for Paypal authorization'
else:
print 'Your subscription appears to have been cancelled'
else:
print 'Your subscription appears to be active'
|
Add a bit to the get customer to show handling not-found and testing for canceled status
|
Add a bit to the get customer to show handling not-found and testing for canceled status
|
Python
|
bsd-3-clause
|
SeanOC/sharpy,smartfile/sharpy
|
7f9ea07f2ee55ff36009bc67068c36bc1180c909
|
tests/test_credentials.py
|
tests/test_credentials.py
|
import json
import keyring
from pyutrack import Credentials
from tests import PyutrackTest
class CredentialsTests(PyutrackTest):
def test_empty(self):
c = Credentials('root')
self.assertIsNone(c.password)
self.assertIsNone(c.cookies)
def test_persistence(self):
c = Credentials('root', 'passwd', {"key": "value"})
c.persist()
self.assertEqual(
keyring.get_password(Credentials.KEYRING_PASSWORD, 'root'), 'passwd'
)
self.assertEqual(
json.loads(keyring.get_password(Credentials.KEYRING_COOKIE, 'root')),
{"key": "value"}
)
|
import json
import keyring
from pyutrack import Credentials
from tests import PyutrackTest
class CredentialsTests(PyutrackTest):
def test_empty(self):
c = Credentials('root')
self.assertIsNone(c.password)
self.assertIsNone(c.cookies)
def test_persistence(self):
c = Credentials('root', 'passwd', {"key": "value"})
c.persist()
self.assertEqual(
keyring.get_password(Credentials.KEYRING_PASSWORD, 'root'), 'passwd'
)
self.assertEqual(
json.loads(keyring.get_password(Credentials.KEYRING_COOKIE, 'root')),
{"key": "value"}
)
def test_reload(self):
Credentials('root', 'passwd', {"key": "value"}).persist()
c = Credentials('root')
self.assertEqual(
keyring.get_password(Credentials.KEYRING_PASSWORD, 'root'), 'passwd'
)
self.assertEqual(
json.loads(keyring.get_password(Credentials.KEYRING_COOKIE, 'root')),
{"key": "value"}
)
|
Add test for credentials reload
|
Add test for credentials reload
|
Python
|
mit
|
alisaifee/pyutrack,alisaifee/pyutrack
|
82b7e46ebdeb154963520fec1d41cc624ceb806d
|
tests/test_vendcrawler.py
|
tests/test_vendcrawler.py
|
import unittest
from vendcrawler.scripts.vendcrawler import VendCrawler
class TestVendCrawlerMethods(unittest.TestCase):
def test_get_links(self):
links = VendCrawler().get_links(2)
self.assertEqual(links,
['https://sarahserver.net/?module=vendor&p=1',
'https://sarahserver.net/?module=vendor&p=2'])
def test_get_page_count(self):
with open('test_vendcrawler.html', 'r') as f:
data = f.read()
page_count = VendCrawler().get_page_count(str(data))
self.assertEqual(int(page_count), 84)
if __name__ == '__main__':
unittest.main()
|
import unittest
from vendcrawler.scripts.vendcrawler import VendCrawler
class TestVendCrawlerMethods(unittest.TestCase):
def test_get_links(self):
links = VendCrawler('a', 'b', 'c').get_links(2)
self.assertEqual(links,
['https://sarahserver.net/?module=vendor&p=1',
'https://sarahserver.net/?module=vendor&p=2'])
def test_get_page_count(self):
with open('test_vendcrawler.html', 'r') as f:
data = f.read()
page_count = VendCrawler('a', 'b', 'c').get_page_count(str(data))
self.assertEqual(int(page_count), 84)
if __name__ == '__main__':
unittest.main()
|
Fix test by passing placeholder variables.
|
Fix test by passing placeholder variables.
|
Python
|
mit
|
josetaas/vendcrawler,josetaas/vendcrawler,josetaas/vendcrawler
|
56fca00d992c84e46e60fa8b9ea66eb9eadc7508
|
mgsv_names.py
|
mgsv_names.py
|
from __future__ import unicode_literals, print_function
import sqlite3, os, random
_select = 'select {0} from {1} limit 1 offset abs(random()) % (select count({0}) from {1});'
_uncommon_select = 'select value from uncommons where key=?;'
def generate_name():
conn = sqlite3.connect(os.path.join(os.path.dirname(__file__), 'names.db'))
cursor = conn.cursor()
adj = cursor.execute(_select.format('adjective', 'adjectives')).fetchone()[0]
anim = cursor.execute(_select.format('animal', 'animals')).fetchone()[0]
rare = cursor.execute(_select.format('name', 'rares')).fetchone()[0]
uncommon_anim = cursor.execute(_uncommon_select, [adj]).fetchone()
uncommon_adj = cursor.execute(_uncommon_select, [anim]).fetchone()
conn.close()
r = random.random()
if r < 0.001 or r >= 0.999:
return rare
elif r < 0.3 and uncommon_anim is not None:
return ' '.join((adj, uncommon_anim[0]))
elif r >= 0.7 and uncommon_adj is not None:
return ' '.join((uncommon_adj[0], anim))
return ' '.join((adj, anim))
if __name__ == '__main__':
print(generate_name())
|
from __future__ import unicode_literals, print_function
import sqlite3, os, random
_select_random = 'select {0} from {1} limit 1 offset abs(random()) % (select count({0}) from {1});'
_select_uncommon = 'select value from uncommons where key=?;'
def generate_name():
conn = sqlite3.connect(os.path.join(os.path.dirname(__file__), 'names.db'))
cursor = conn.cursor()
adj = cursor.execute(_select_random.format('adjective', 'adjectives')).fetchone()[0]
anim = cursor.execute(_select_random.format('animal', 'animals')).fetchone()[0]
rare = cursor.execute(_select_random.format('name', 'rares')).fetchone()[0]
uncommon_anim = cursor.execute(_select_uncommon, [adj]).fetchone()
uncommon_adj = cursor.execute(_select_uncommon, [anim]).fetchone()
conn.close()
r = random.random()
if r < 0.001 or r >= 0.999:
return rare
elif r < 0.3 and uncommon_anim is not None:
return ' '.join((adj, uncommon_anim[0]))
elif r >= 0.7 and uncommon_adj is not None:
return ' '.join((uncommon_adj[0], anim))
return ' '.join((adj, anim))
if __name__ == '__main__':
for _ in range(20):
print(generate_name())
|
Rename the SQL module vars for consistency.
|
Rename the SQL module vars for consistency.
|
Python
|
unlicense
|
rotated8/mgsv_names
|
c26fc5da048bb1751bb6401dbdb8839f89d82c1e
|
nova/policies/server_diagnostics.py
|
nova/policies/server_diagnostics.py
|
# Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from nova.policies import base
BASE_POLICY_NAME = 'os_compute_api:os-server-diagnostics'
server_diagnostics_policies = [
policy.DocumentedRuleDefault(
BASE_POLICY_NAME,
base.RULE_ADMIN_API,
"Show the usage data for a server",
[
{
'method': 'GET',
'path': '/servers/{server_id}/diagnostics'
}
]),
]
def list_rules():
return server_diagnostics_policies
|
# Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from nova.policies import base
BASE_POLICY_NAME = 'os_compute_api:os-server-diagnostics'
server_diagnostics_policies = [
policy.DocumentedRuleDefault(
name=BASE_POLICY_NAME,
check_str=base.RULE_ADMIN_API,
description="Show the usage data for a server",
operations=[
{
'method': 'GET',
'path': '/servers/{server_id}/diagnostics'
}
],
scope_types=['system', 'project']),
]
def list_rules():
return server_diagnostics_policies
|
Introduce scope_types in server diagnostics
|
Introduce scope_types in server diagnostics
oslo.policy introduced the scope_type feature which can
control the access level at system-level and project-level.
- https://docs.openstack.org/oslo.policy/latest/user/usage.html#setting-scope
- http://specs.openstack.org/openstack/keystone-specs/specs/keystone/queens/system-scope.html
Appropriate scope_type for nova case:
- https://specs.openstack.org/openstack/nova-specs/specs/ussuri/approved/policy-defaults-refresh.html#scope
This commit introduce scope_type for server diagnostics API policies
as 'system' and 'project' but we will keep default as SYSTEM_ADMIN only.
Also adds the test case with scope_type enabled and verify we
pass and fail the policy check with expected context.
Partial implement blueprint policy-defaults-refresh
Change-Id: Ie1749d4f85b8bdc4110f57c9a33e54e9551cb7e3
|
Python
|
apache-2.0
|
mahak/nova,klmitch/nova,openstack/nova,klmitch/nova,klmitch/nova,mahak/nova,mahak/nova,klmitch/nova,openstack/nova,openstack/nova
|
f25dde13d04e9c6eda28ac76444682e53accbdb3
|
src/webapp/tasks.py
|
src/webapp/tasks.py
|
import database as db
from database.model import Team
from geotools import simple_distance
from geotools.routing import MapPoint
try:
from uwsgidecorators import spool
except ImportError as e:
def spool(fn):
def nufun(*args, **kwargs):
raise e
return nufun
@spool
def get_aqua_distance(args):
team = db.session.query(Team).filter(Team.id == int(args["team_id"])).first()
if team is None:
return
target = MapPoint.from_team(team)
#aqua = MapPoint(51.04485, 13.74011) # real aqua
aqua = MapPoint(51.05299472808838, 13.742453455924988) # hfbk
team.location.center_distance = simple_distance(target, aqua)
db.session.commit()
|
import database as db
from database.model import Team
from geotools import simple_distance
from geotools.routing import MapPoint
from cfg import config
try:
from uwsgidecorators import spool
except ImportError as e:
def spool(fn):
def nufun(*args, **kwargs):
raise e
return nufun
@spool
def get_aqua_distance(args):
team = db.session.query(Team).filter(Team.id == int(args["team_id"])).first()
if team is None:
return
target = MapPoint.from_team(team)
aqua = MapPoint(*config.CENTER_POINT)
team.location.center_distance = simple_distance(target, aqua)
db.session.commit()
|
Read center point from config
|
Read center point from config
Signed-off-by: Jan Losinski <577c4104c61edf9f052c616c0c23e67bef4a9955@wh2.tu-dresden.de>
|
Python
|
bsd-3-clause
|
eXma/meet-and-eat-registration-system,janLo/meet-and-eat-registration-system,eXma/meet-and-eat-registration-system,janLo/meet-and-eat-registration-system,janLo/meet-and-eat-registration-system,eXma/meet-and-eat-registration-system,janLo/meet-and-eat-registration-system,eXma/meet-and-eat-registration-system
|
638f52f59135d151a3c7ed4f84fc0716c6c0d69d
|
mcbench/xpath.py
|
mcbench/xpath.py
|
import sys
import lxml.etree
class XPathError(Exception):
pass
def parse_xml_filename(filename):
return lxml.etree.parse(filename)
def compile_xpath(query):
try:
return lxml.etree.XPath(query)
except lxml.etree.XPathSyntaxError as e:
raise XPathError(e.msg), None, sys.exc_info()[2]
def register_extensions():
ns = lxml.etree.FunctionNamespace(None)
ns['is_call'] = lambda c, n: is_call(c.context_node, n)
def is_call(node, name):
return (node.tag == 'ParameterizedExpr' and
node[0].tag == 'NameExpr' and
node[0].get('kind') == 'FUN' and
node[0][0].get('nameId') == name)
|
import sys
import lxml.etree
class XPathError(Exception):
pass
def parse_xml_filename(filename):
return lxml.etree.parse(filename)
def compile_xpath(query):
try:
return lxml.etree.XPath(query)
except lxml.etree.XPathSyntaxError as e:
raise XPathError(e.msg), None, sys.exc_info()[2]
def register_extensions():
ns = lxml.etree.FunctionNamespace(None)
ns['is_call'] = is_call
def is_call(context, *names):
node = context.context_node
if node.tag != 'ParameterizedExpr':
return False
if node[0].tag != 'NameExpr' or node[0].get('kind') != 'FUN':
return False
called_name = node[0][0].get('nameId')
# Could this function like
# is_call('eval', 'feval') -> names is a tuple of strings
# is_call(//some/sequence) -> names[0] is a list of strings
for name in names:
if isinstance(name, basestring) and called_name == name:
return True
elif any(called_name == n for n in name):
return True
return False
|
Make is_call handle multiple arguments.
|
Make is_call handle multiple arguments.
Can now be called with a sequence, as in
is_call(ancestor::Function/@name)
or just several literals, as in is_call('eval', 'feval').
In both cases, it does an or.
|
Python
|
mit
|
isbadawi/mcbench,isbadawi/mcbench
|
b482eb5c8ff7dc346a3c7037c2218a4b2f2d61c4
|
setup/create_player_seasons.py
|
setup/create_player_seasons.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import concurrent.futures
from db.common import session_scope
from db.player import Player
from utils.player_data_retriever import PlayerDataRetriever
def create_player_seasons(simulation=False):
data_retriever = PlayerDataRetriever()
with session_scope() as session:
players = session.query(Player).all()[:]
with concurrent.futures.ThreadPoolExecutor(max_workers=8) as threads:
future_tasks = {
threads.submit(
data_retriever.retrieve_player_seasons,
player.player_id, simulation
): player for player in players
}
for future in concurrent.futures.as_completed(future_tasks):
try:
plr_seasons = future.result()
except Exception as e:
print("Concurrent task generated an exception: %s" % e)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
import concurrent.futures
from db.common import session_scope
from db.player import Player
from utils.player_data_retriever import PlayerDataRetriever
logger = logging.getLogger(__name__)
def create_player_seasons(simulation=False):
data_retriever = PlayerDataRetriever()
plr_season_count = 0
with session_scope() as session:
players = session.query(Player).all()[:]
with concurrent.futures.ThreadPoolExecutor(max_workers=8) as threads:
future_tasks = {
threads.submit(
data_retriever.retrieve_player_seasons,
player.player_id, simulation
): player for player in players
}
for future in concurrent.futures.as_completed(future_tasks):
try:
plr_season_count += len(future.result())
except Exception as e:
print("Concurrent task generated an exception: %s" % e)
logger.info("+ %d statistics items retrieved overall" % plr_season_count)
|
Add logging to player season creation script
|
Add logging to player season creation script
|
Python
|
mit
|
leaffan/pynhldb
|
36cc1ecc2b64d5c31ea590dddbf9e12c71542c7d
|
sphinxcontrib/openstreetmap.py
|
sphinxcontrib/openstreetmap.py
|
# -*- coding: utf-8 -*-
"""
sphinxcontrib.openstreetmap
===========================
Embed OpenStreetMap on your documentation.
:copyright: Copyright 2015 HAYASHI Kentaro <kenhys@gmail.com>
:license: BSD, see LICENSE for details.
"""
from docutils import nodes
from docutils.parsers.rst import directives
from sphinx.util.compat import Directive
class openstreetmap(nodes.General, nodes.Element):
pass
class OpenStreetMapDirective(Directive):
"""Directive for embedding OpenStreetMap"""
has_content = False
option_spec = {
'id': directives.unchanged,
'label': directives.unchanged,
'marker': directives.unchanged,
}
def run(self):
node = openstreetmap()
if 'id' in self.options:
node['id'] = self.options['id']
else:
msg = ('openstreetmap directive needs uniqueue id for map data')
return [document.reporter.warning(msg, line=self.lineno)]
return [node]
def visit_openstreetmap_node(self, node):
self.body.append("<div id='openstreetmap' style='color:red'>OpenStreetMap directive</div>")
def depart_openstreetmap_node(self, node):
pass
def setup(app):
app.add_node(openstreetmap,
html=(visit_openstreetmap_node, depart_openstreetmap_node))
app.add_directive('openstreetmap', OpenStreetMapDirective)
|
# -*- coding: utf-8 -*-
"""
sphinxcontrib.openstreetmap
===========================
Embed OpenStreetMap on your documentation.
:copyright: Copyright 2015 HAYASHI Kentaro <kenhys@gmail.com>
:license: BSD, see LICENSE for details.
"""
from docutils import nodes
from docutils.parsers.rst import directives
from sphinx.util.compat import Directive
class openstreetmap(nodes.General, nodes.Element):
pass
class OpenStreetMapDirective(Directive):
"""Directive for embedding OpenStreetMap"""
has_content = False
option_spec = {
'id': directives.unchanged,
'label': directives.unchanged,
}
def run(self):
node = openstreetmap()
if 'id' in self.options:
node['id'] = self.options['id']
else:
msg = ('openstreetmap directive needs uniqueue id for map data')
return [document.reporter.warning(msg, line=self.lineno)]
return [node]
def visit_openstreetmap_node(self, node):
self.body.append("<div id='openstreetmap' style='color:red'>OpenStreetMap directive</div>")
def depart_openstreetmap_node(self, node):
pass
def setup(app):
app.add_node(openstreetmap,
html=(visit_openstreetmap_node, depart_openstreetmap_node))
app.add_directive('openstreetmap', OpenStreetMapDirective)
|
Remove marker from option spec
|
Remove marker from option spec
|
Python
|
bsd-2-clause
|
kenhys/sphinxcontrib-openstreetmap,kenhys/sphinxcontrib-openstreetmap
|
8ce1def3020570c8a3e370261fc9c7f027202bdf
|
owapi/util.py
|
owapi/util.py
|
"""
Useful utilities.
"""
import json
from kyokai import Request
from kyokai.context import HTTPRequestContext
def jsonify(func):
"""
JSON-ify the response from a function.
"""
async def res(ctx: HTTPRequestContext):
result = await func(ctx)
assert isinstance(ctx.request, Request)
if isinstance(result, tuple):
new_result = {**{"_request": {"route": ctx.request.path, "api_ver": 1}},
**result[0]}
if len(result) == 1:
return json.dumps(new_result), 200, {"Content-Type": "application/json"}
elif len(result) == 2:
return json.dumps(new_result[0]), result[1], {"Content-Type": "application/json"}
else:
return json.dumps(new_result), result[1], {**{"Content-Type": "application/json"}, **result[2]}
else:
new_result = {**{"_request": {"route": ctx.request.path, "api_ver": 1}},
**result}
return json.dumps(new_result), 200, {"Content-Type": "application/json"}
return res
|
"""
Useful utilities.
"""
import json
import aioredis
from kyokai import Request
from kyokai.context import HTTPRequestContext
async def with_cache(ctx: HTTPRequestContext, func, *args, expires=300):
"""
Run a coroutine with cache.
Stores the result in redis.
"""
assert isinstance(ctx.redis, aioredis.Redis)
built = func.__name__ + repr(args)
# Check for the key.
# Uses a simple func name + repr(args) as the key to use.
got = await ctx.redis.get(built)
if got:
return got.decode()
# Call the function.
result = await func(ctx, *args)
# Store the result as cached.
await ctx.redis.set(built, result, expire=expires)
return result
def jsonify(func):
"""
JSON-ify the response from a function.
"""
async def res(ctx: HTTPRequestContext):
result = await func(ctx)
assert isinstance(ctx.request, Request)
if isinstance(result, tuple):
new_result = {**{"_request": {"route": ctx.request.path, "api_ver": 1}},
**result[0]}
if len(result) == 1:
return json.dumps(new_result), 200, {"Content-Type": "application/json"}
elif len(result) == 2:
return json.dumps(new_result[0]), result[1], {"Content-Type": "application/json"}
else:
return json.dumps(new_result), result[1], {**{"Content-Type": "application/json"}, **result[2]}
else:
new_result = {**{"_request": {"route": ctx.request.path, "api_ver": 1}},
**result}
return json.dumps(new_result), 200, {"Content-Type": "application/json"}
return res
|
Add with_cache function for storing cached data
|
Add with_cache function for storing cached data
|
Python
|
mit
|
azah/OWAPI,SunDwarf/OWAPI
|
106ea580471387a3645877f52018ff2880db34f3
|
live_studio/config/forms.py
|
live_studio/config/forms.py
|
from django import forms
from .models import Config
class ConfigForm(forms.ModelForm):
class Meta:
model = Config
exclude = ('created', 'user')
PAGES = (
('base',),
('distribution',),
('media_type',),
('architecture',),
('installer',),
('locale', 'keyboard_layout'),
)
WIZARD_FORMS = []
for fields in PAGES:
meta = type('Meta', (), {
'model': Config,
'fields': fields,
})
WIZARD_FORMS.append(type('', (forms.ModelForm,), {'Meta': meta}))
|
from django import forms
from .models import Config
class ConfigForm(forms.ModelForm):
class Meta:
model = Config
exclude = ('created', 'user')
PAGES = (
('base',),
('distribution',),
('media_type',),
('architecture',),
('installer',),
('locale', 'keyboard_layout'),
)
WIZARD_FORMS = []
for fields in PAGES:
meta = type('Meta', (), {
'model': Config,
'fields': fields,
'widgets': {
'base': forms.RadioSelect(),
'distribution': forms.RadioSelect(),
'media_type': forms.RadioSelect(),
'architecture': forms.RadioSelect(),
'installer': forms.RadioSelect(),
},
})
WIZARD_FORMS.append(type('', (forms.ModelForm,), {'Meta': meta}))
|
Use radio buttons for most of the interface.
|
Use radio buttons for most of the interface.
Signed-off-by: Chris Lamb <29e6d179a8d73471df7861382db6dd7e64138033@debian.org>
|
Python
|
agpl-3.0
|
lamby/live-studio,lamby/live-studio,lamby/live-studio,debian-live/live-studio,debian-live/live-studio,debian-live/live-studio
|
6b01cdc18fce9277991fc5628f1d6c904ad47ee6
|
BuildAndRun.py
|
BuildAndRun.py
|
import os
import subprocess
name = "gobuildmaster"
current_hash = ""
if os.path.isfile('hash'):
current_hash = open('hash').readlines()[0]
new_hash = os.popen('git rev-parse HEAD').readlines()[0]
open('hash','w').write(new_hash)
# Move the old version over
for line in os.popen('cp ' + name + ' old' + name).readlines():
print line.strip()
# Rebuild
for line in os.popen('go build').readlines():
print line.strip()
size_1 = os.path.getsize('./old' + name)
size_2 = os.path.getsize('./' + name)
running = len(os.popen('ps -ef | grep ' + name).readlines()) > 3
if size_1 != size_2 or new_hash != current_hash or not running:
if not running:
for line in os.popen('cat out.txt | mail -s "Crash Report ' + name + '" brotherlogic@gmail.com').readlines():
pass
for line in os.popen('echo "" > out.txt').readlines():
pass
for line in os.popen('killall ' + name).readlines():
pass
subprocess.Popen(['./' + name, "--quiet=false"])
|
import os
import subprocess
name = "gobuildmaster"
current_hash = ""
if os.path.isfile('hash'):
current_hash = open('hash').readlines()[0]
new_hash = os.popen('git rev-parse HEAD').readlines()[0]
open('hash','w').write(new_hash)
# Move the old version over
for line in os.popen('cp ' + name + ' old' + name).readlines():
print line.strip()
# Rebuild
for line in os.popen('go build').readlines():
print line.strip()
size_1 = os.path.getsize('./old' + name)
size_2 = os.path.getsize('./' + name)
lines = os.popen('ps -ef | grep ' + name).readlines()
running = False
for line in lines:
if "./" + name in line:
running = True
if size_1 != size_2 or new_hash != current_hash or not running:
if not running:
for line in os.popen('cat out.txt | mail -s "Crash Report ' + name + '" brotherlogic@gmail.com').readlines():
pass
for line in os.popen('echo "" > out.txt').readlines():
pass
for line in os.popen('killall ' + name).readlines():
pass
subprocess.Popen(['./' + name, "--quiet=false"])
|
Fix running detection for master
|
Fix running detection for master
|
Python
|
apache-2.0
|
brotherlogic/gobuildmaster,brotherlogic/gobuildmaster,brotherlogic/gobuildmaster
|
3ca46f1407d8984ca5cbd1eb0581765386533d71
|
observatory/rcos/tests/test_rcos.py
|
observatory/rcos/tests/test_rcos.py
|
import pytest
from django.core.urlresolvers import reverse
@pytest.mark.django_db
def test_homepage(client):
for url in (
"/donor",
"/students",
"/courses",
"/talks",
"/programming-competition",
"/achievements",
"/urp-application",
"/links-and-contacts",
"/talk-sign-up",
"/irc",
"/faq",
"/calendar",
"/howtojoin",
"/past-projects",
):
#Load Site
response = client.get(url)
#Check for normal processing
assert response.status_code in [200, 301]
|
import pytest
from django.core.urlresolvers import reverse
@pytest.mark.django_db
def test_homepage(client):
for url in (
"/",
"/donor",
"/students",
"/courses",
"/talks",
"/programming-competition",
"/achievements",
"/urp-application",
"/links-and-contacts",
"/talk-sign-up",
"/irc",
"/faq",
"/calendar",
"/howtojoin",
"/past-projects",
):
#Load Site
response = client.get(url)
#Check for normal processing
assert response.status_code in [200, 301]
|
Add / to rcos tests
|
rcos: Add / to rcos tests
|
Python
|
isc
|
rcos/Observatory,rcos/Observatory,rcos/Observatory,rcos/Observatory,rcos/Observatory,rcos/Observatory
|
5e3c6d6ab892a87ca27c05c01b39646bd339b3f2
|
tests/test_event.py
|
tests/test_event.py
|
import unittest
from event import Event
class EventTest(unittest.TestCase):
def test_a_listener_is_notified_when_event_is_raised(self):
called = False
def listener():
nonlocal called
called = True
event = Event()
event.connect(listener)
event.fire()
self.assertTrue(called)
def test_a_listener_is_passed_correct_parameters(self):
params = ()
def listener(*args, **kwargs):
nonlocal params
params = (args, kwargs)
event = Event()
event.connect(listener)
event.fire(5, shape="square")
self.assertEquals(((5, ), {"shape": "square"}), params)
|
import unittest
from event import Event
class Mock:
def __init__(self):
self.called = False
self.params = ()
def __call__(self, *args, **kwargs):
self.called = True
self.params = (args, kwargs)
class EventTest(unittest.TestCase):
def test_a_listener_is_notified_when_event_is_raised(self):
listener = Mock()
event = Event()
event.connect(listener)
event.fire()
self.assertTrue(listener.called)
def test_a_listener_is_passed_correct_parameters(self):
listener = Mock()
event = Event()
event.connect(listener)
event.fire(5, shape="square")
self.assertEquals(((5, ), {"shape": "square"}), listener.params)
|
Refactor a lightweight Mock class.
|
Refactor a lightweight Mock class.
|
Python
|
mit
|
bsmukasa/stock_alerter
|
291923f4ad1fc0041284a73d6edad43e6047fafc
|
workspace/commands/status.py
|
workspace/commands/status.py
|
from __future__ import absolute_import
import os
import logging
from workspace.commands import AbstractCommand
from workspace.commands.helpers import ProductPager
from workspace.scm import stat_repo, repos, product_name, all_branches, is_repo
log = logging.getLogger(__name__)
class Status(AbstractCommand):
""" Show status on current product or all products in workspace """
alias = 'st'
def run(self):
try:
scm_repos = repos()
in_repo = is_repo(os.getcwd())
optional = len(scm_repos) == 1
pager = ProductPager(optional=optional)
for repo in scm_repos:
stat_path = os.getcwd() if in_repo else repo
output = stat_repo(stat_path, True)
nothing_to_commit = 'nothing to commit' in output and 'Your branch is ahead of' not in output
branches = all_branches(repo)
child_branches = [b for b in branches if '@' in b]
if len(child_branches) > 1:
if nothing_to_commit:
output = '# Branches: %s' % ' '.join(branches)
nothing_to_commit = False
elif len(branches) > 1:
output = '# Branches: %s\n#\n%s' % (' '.join(branches), output)
if output and not nothing_to_commit:
pager.write(product_name(repo), output)
finally:
pager.close_and_wait()
|
from __future__ import absolute_import
import os
import logging
from workspace.commands import AbstractCommand
from workspace.commands.helpers import ProductPager
from workspace.scm import stat_repo, repos, product_name, all_branches, is_repo
log = logging.getLogger(__name__)
class Status(AbstractCommand):
""" Show status on current product or all products in workspace """
alias = 'st'
def run(self):
try:
scm_repos = repos()
in_repo = is_repo(os.getcwd())
optional = len(scm_repos) == 1
pager = ProductPager(optional=optional)
for repo in scm_repos:
stat_path = os.getcwd() if in_repo else repo
output = stat_repo(stat_path, True)
nothing_to_commit = 'nothing to commit' in output and 'Your branch is ahead of' not in output
branches = all_branches(repo)
child_branches = [b for b in branches if '@' in b]
if len(child_branches) > 1 or len(scm_repos) == 1:
if nothing_to_commit:
output = '# Branches: %s' % ' '.join(branches)
nothing_to_commit = False
elif len(branches) > 1:
output = '# Branches: %s\n#\n%s' % (' '.join(branches), output)
if output and not nothing_to_commit:
pager.write(product_name(repo), output)
finally:
pager.close_and_wait()
|
Fix bug to display all branches when there is only 1 repo
|
Fix bug to display all branches when there is only 1 repo
|
Python
|
mit
|
maxzheng/workspace-tools
|
b18bdf11141cf47319eed9ba2b861ebc287cf5ff
|
pyqs/utils.py
|
pyqs/utils.py
|
import base64
import json
import pickle
def decode_message(message):
message_body = message.get_body()
json_body = json.loads(message_body)
if 'task' in message_body:
return json_body
else:
# Fallback to processing celery messages
return decode_celery_message(json_body)
def decode_celery_message(json_task):
message = base64.decodestring(json_task['body'])
return pickle.loads(message)
def function_to_import_path(function):
return "{}.{}".format(function.__module__, function.func_name)
|
import base64
import json
import pickle
def decode_message(message):
message_body = message.get_body()
json_body = json.loads(message_body)
if 'task' in message_body:
return json_body
else:
# Fallback to processing celery messages
return decode_celery_message(json_body)
def decode_celery_message(json_task):
message = base64.decodestring(json_task['body'])
try:
return json.loads(message)
except ValueError:
pass
return pickle.loads(message)
def function_to_import_path(function):
return "{}.{}".format(function.__module__, function.func_name)
|
Add fallback for loading json encoded celery messages
|
Add fallback for loading json encoded celery messages
|
Python
|
mit
|
spulec/PyQS
|
77b87f5657583a5418d57f712b52bbcd6e9421aa
|
puzzle.py
|
puzzle.py
|
#!/usr/bin/python3
class Puzzle:
def get_all_exits(self, graph):
exits = []
for key, value in graph.items():
for item in value:
if 'Exit' in item:
exits += item
return exits
def find_all_paths(self, graph, start, end, path=None):
if path is None:
path = []
path = path + [start]
if start == end:
return [path]
if start not in graph:
return []
paths = []
for node in graph[start]:
if node not in path:
newpaths = self.find_all_paths(graph, node, end, path)
for newpath in newpaths:
paths.append(newpath)
return paths
def solve(self, graph=None):
unique_paths = []
for exit in self.get_all_exits(graph):
for start, connected_nodes in graph.items():
unique_paths += self.find_all_paths(graph, start, exit)
return unique_paths
|
#!/usr/bin/python3
class Puzzle:
def get_all_exits(self, graph):
exits = []
for root_node, connected_nodes in graph.items():
for node in connected_nodes:
if 'Exit' in node:
exits += node
return exits
def find_all_paths(self, graph, start, end, path=None):
if path is None:
path = []
path = path + [start]
if start == end:
return [path]
if start not in graph:
return []
paths = []
for node in graph[start]:
if node not in path:
newpaths = self.find_all_paths(graph, node, end, path)
for newpath in newpaths:
paths.append(newpath)
return paths
def solve(self, graph=None):
unique_paths = []
for exit in self.get_all_exits(graph):
for start, connected_nodes in graph.items():
unique_paths += self.find_all_paths(graph, start, exit)
return unique_paths
|
Rename vars in get_all_exits to make it more clear
|
Rename vars in get_all_exits to make it more clear
|
Python
|
mit
|
aaronshaver/graph-unique-paths
|
f7e2bcf941e2a15a3bc28ebf3f15244df6f0d758
|
posts/versatileimagefield.py
|
posts/versatileimagefield.py
|
from django.conf import settings
from versatileimagefield.datastructures.filteredimage import FilteredImage
from versatileimagefield.registry import versatileimagefield_registry
from PIL import Image, ImageDraw, ImageFont
from io import BytesIO
class Watermark(FilteredImage):
def process_image(self, image, image_format, save_kwargs={}):
"""
Returns a BytesIO instance of `image` with inverted colors
"""
if image.mode != 'RGBA':
image = image.convert('RGBA')
txt = Image.new('RGBA', image.size, (255,255,255,0))
fontsize = int(image.size[1] * 0.1)
# get a font
fnt = ImageFont.truetype(
'/usr/share/fonts/truetype/dejavu/DejaVuSans-Bold.ttf',
fontsize,
)
# get a drawing context
d = ImageDraw.Draw(txt)
# draw text, half opacity
d.text(
(10, image.size[1] - 10 - fontsize),
settings.WATERMARK_TEXT,
font=fnt,
fill=(255,255,255,30)
)
out = Image.alpha_composite(image, txt)
out = out.convert('RGB')
imagefile = BytesIO()
out.save(
imagefile,
**save_kwargs
)
return imagefile
versatileimagefield_registry.register_filter('watermark', Watermark)
|
import os.path
from django.conf import settings
from versatileimagefield.datastructures.filteredimage import FilteredImage
from versatileimagefield.registry import versatileimagefield_registry
from PIL import Image, ImageDraw, ImageFont
from io import BytesIO
class Watermark(FilteredImage):
def process_image(self, image, image_format, save_kwargs={}):
"""
Returns a BytesIO instance of `image` with inverted colors
"""
if image.mode != 'RGBA':
image = image.convert('RGBA')
txt = Image.new('RGBA', image.size, (255,255,255,0))
height = image.size[1]
fontsize = int(image.size[1] * 0.1)
# get a font
fnt = ImageFont.truetype(
os.path.join(
os.path.dirname(os.path.dirname(__file__)),
'font', 'conthrax-sb.ttf'
),
fontsize,
)
# get a drawing context
d = ImageDraw.Draw(txt)
# draw text, half opacity
d.text(
(10 + fontsize * .2, height - 10 - fontsize - fontsize * .2),
settings.WATERMARK_TEXT,
font=fnt,
fill=(255,255,255,30)
)
out = Image.alpha_composite(image, txt)
out = out.convert('RGB')
imagefile = BytesIO()
out.save(
imagefile,
**save_kwargs
)
return imagefile
versatileimagefield_registry.register_filter('watermark', Watermark)
|
Use custom font for watermark
|
Use custom font for watermark
Signed-off-by: Michal Čihař <a2df1e659c9fd2578de0a26565357cb273292eeb@cihar.com>
|
Python
|
agpl-3.0
|
nijel/photoblog,nijel/photoblog
|
01e62119750d0737e396358dbf45727dcbb5732f
|
tests/__init__.py
|
tests/__init__.py
|
import sys
import unittest
def main():
if sys.version_info[0] >= 3:
from unittest.main import main
main(module=None)
else:
unittest.main()
if __name__ == '__main__':
main()
|
from unittest.main import main
if __name__ == '__main__':
main(module=None, verbosity=2)
|
Drop Python 2 support in tests
|
Drop Python 2 support in tests
|
Python
|
bsd-3-clause
|
retext-project/pymarkups,mitya57/pymarkups
|
2b74c8714b659ccf5faa615e9b5c4c4559f8d9c8
|
artbot_website/views.py
|
artbot_website/views.py
|
from django.shortcuts import render
from datetime import date, datetime, timedelta
from .models import Event
def index(request):
if date.today().isoweekday() in [5,6,7]:
weekend_start = date.today()
else:
weekend_start = date.today() + timedelta((5 - date.today().isoweekday()) % 7 )
events = Event.objects.filter(start__lte = weekend_start, end__gte = weekend_start).order_by('-start')
return render(request, 'index.html', {'events': events})
|
from django.shortcuts import render
from datetime import date, datetime, timedelta
from .models import Event
def index(request):
if date.today().isoweekday() in [5,6,7]:
weekend_start = date.today()
else:
weekend_start = date.today() + timedelta((5 - date.today().isoweekday()) % 7 )
events = Event.objects.filter(start__lte = weekend_start, end__gte = weekend_start, published = True).order_by('-start')
return render(request, 'index.html', {'events': events})
|
Index now only displays published articles.
|
Index now only displays published articles.
|
Python
|
mit
|
coreymcdermott/artbot,coreymcdermott/artbot
|
285eeb1c7565f8fa9fb6ba38ed843601f81cdf4e
|
tmc/models/document_topic.py
|
tmc/models/document_topic.py
|
# -*- coding: utf-8 -*-
from odoo import api, fields, models
class DocumentTopic(models.Model):
_name = 'tmc.document_topic'
_description = 'document_topic'
_inherit = 'tmc.category'
first_parent_id = fields.Many2one(
comodel_name='tmc.document_topic',
compute='_compute_first_parent',
store=True
)
document_ids = fields.Many2many(
comodel_name='tmc.document',
relation='document_main_topic_rel',
column1='main_topic_ids'
)
parent_id = fields.Many2one(
comodel_name='tmc.document_topic',
string='Main Topic'
)
child_ids = fields.One2many(
comodel_name='tmc.document_topic',
inverse_name='parent_id'
)
important = fields.Boolean()
@api.multi
@api.depends('parent_id',
'parent_id.parent_id')
def _compute_first_parent(self):
for document_topic in self:
first_parent_id = False
parent = document_topic.parent_id
while parent:
first_parent_id = parent.id
parent = parent.parent_id
document_topic.first_parent_id = first_parent_id
|
# -*- coding: utf-8 -*-
from odoo import api, fields, models
class DocumentTopic(models.Model):
_name = 'tmc.document_topic'
_description = 'document_topic'
_inherit = 'tmc.category'
_order = 'name'
first_parent_id = fields.Many2one(
comodel_name='tmc.document_topic',
compute='_compute_first_parent',
store=True
)
document_ids = fields.Many2many(
comodel_name='tmc.document',
relation='document_main_topic_rel',
column1='main_topic_ids'
)
parent_id = fields.Many2one(
comodel_name='tmc.document_topic',
string='Main Topic'
)
child_ids = fields.One2many(
comodel_name='tmc.document_topic',
inverse_name='parent_id'
)
important = fields.Boolean()
@api.multi
@api.depends('parent_id',
'parent_id.parent_id')
def _compute_first_parent(self):
for document_topic in self:
first_parent_id = False
parent = document_topic.parent_id
while parent:
first_parent_id = parent.id
parent = parent.parent_id
document_topic.first_parent_id = first_parent_id
|
Order document topics by name
|
[IMP] Order document topics by name
|
Python
|
agpl-3.0
|
tmcrosario/odoo-tmc
|
ee9f1058107f675f7f12f822ead3feb78ec10d9b
|
wagtail/utils/urlpatterns.py
|
wagtail/utils/urlpatterns.py
|
from __future__ import absolute_import, unicode_literals
from functools import update_wrapper
def decorate_urlpatterns(urlpatterns, decorator):
for pattern in urlpatterns:
if hasattr(pattern, 'url_patterns'):
decorate_urlpatterns(pattern.url_patterns, decorator)
if hasattr(pattern, '_callback'):
pattern._callback = update_wrapper(decorator(pattern.callback), pattern.callback)
return urlpatterns
|
from __future__ import absolute_import, unicode_literals
from functools import update_wrapper
from django import VERSION as DJANGO_VERSION
def decorate_urlpatterns(urlpatterns, decorator):
"""Decorate all the views in the passed urlpatterns list with the given decorator"""
for pattern in urlpatterns:
if hasattr(pattern, 'url_patterns'):
# this is an included RegexURLResolver; recursively decorate the views
# contained in it
decorate_urlpatterns(pattern.url_patterns, decorator)
if DJANGO_VERSION < (1, 10):
# Prior to Django 1.10, RegexURLPattern accepted both strings and callables as
# the callback parameter; `callback` is a property that consistently returns it as
# a callable.
#
# * if RegexURLPattern was given a string, _callback will be None, and will be
# populated on the first call to the `callback` property
# * if RegexURLPattern was given a callable, _callback will be set to that callable,
# and the `callback` property will return it
#
# In either case, we wrap the result of `callback` and write it back to `_callback`,
# so that future calls to `callback` will return our wrapped version.
if hasattr(pattern, '_callback'):
pattern._callback = update_wrapper(decorator(pattern.callback), pattern.callback)
else:
# In Django 1.10 and above, RegexURLPattern only accepts a callable as the callback
# parameter; this is directly accessible as the `callback` attribute.
if getattr(pattern, 'callback', None):
pattern.callback = update_wrapper(decorator(pattern.callback), pattern.callback)
return urlpatterns
|
Test for RegexURLPattern.callback on Django 1.10
|
Test for RegexURLPattern.callback on Django 1.10
Thanks Paul J Stevens for the initial patch, Tim Graham for review
and Matt Westcott for tweak of initial patch
|
Python
|
bsd-3-clause
|
nealtodd/wagtail,torchbox/wagtail,nutztherookie/wagtail,nealtodd/wagtail,kurtw/wagtail,mixxorz/wagtail,rsalmaso/wagtail,kurtw/wagtail,jnns/wagtail,kurtw/wagtail,nutztherookie/wagtail,wagtail/wagtail,Toshakins/wagtail,mixxorz/wagtail,gasman/wagtail,iansprice/wagtail,rsalmaso/wagtail,thenewguy/wagtail,kaedroho/wagtail,thenewguy/wagtail,mixxorz/wagtail,gasman/wagtail,kurtrwall/wagtail,nealtodd/wagtail,chrxr/wagtail,mikedingjan/wagtail,mikedingjan/wagtail,timorieber/wagtail,rsalmaso/wagtail,nilnvoid/wagtail,FlipperPA/wagtail,takeflight/wagtail,nilnvoid/wagtail,wagtail/wagtail,rsalmaso/wagtail,chrxr/wagtail,torchbox/wagtail,Toshakins/wagtail,wagtail/wagtail,FlipperPA/wagtail,Toshakins/wagtail,takeflight/wagtail,iansprice/wagtail,chrxr/wagtail,jnns/wagtail,kurtrwall/wagtail,gasman/wagtail,takeflight/wagtail,mixxorz/wagtail,Toshakins/wagtail,wagtail/wagtail,mikedingjan/wagtail,thenewguy/wagtail,nimasmi/wagtail,iansprice/wagtail,thenewguy/wagtail,jnns/wagtail,kurtw/wagtail,nilnvoid/wagtail,zerolab/wagtail,thenewguy/wagtail,nimasmi/wagtail,nutztherookie/wagtail,kurtrwall/wagtail,mikedingjan/wagtail,timorieber/wagtail,kaedroho/wagtail,mixxorz/wagtail,kaedroho/wagtail,zerolab/wagtail,chrxr/wagtail,timorieber/wagtail,nimasmi/wagtail,torchbox/wagtail,kaedroho/wagtail,iansprice/wagtail,nutztherookie/wagtail,jnns/wagtail,FlipperPA/wagtail,rsalmaso/wagtail,kaedroho/wagtail,wagtail/wagtail,takeflight/wagtail,gasman/wagtail,timorieber/wagtail,zerolab/wagtail,kurtrwall/wagtail,nealtodd/wagtail,zerolab/wagtail,FlipperPA/wagtail,nilnvoid/wagtail,gasman/wagtail,torchbox/wagtail,zerolab/wagtail,nimasmi/wagtail
|
558a25b6f3b77d6a1b087819dc40f1aa7584e7fb
|
sky/tools/release_packages.py
|
sky/tools/release_packages.py
|
#!/usr/bin/env python
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# See https://github.com/domokit/sky_engine/wiki/Release-process
import os
import subprocess
import sys
def main():
engine_root = os.path.abspath('.')
if not os.path.exists(os.path.join(engine_root, 'sky')):
print "Cannot find //sky. Is %s the Flutter engine repository?" % engine_root
return 1
pub_path = os.path.join(engine_root, 'third_party/dart-sdk/dart-sdk/bin/pub')
if args.publish:
subprocess.check_call([pub_path, 'publish', '--force'], cwd=os.path.join(engine_root, 'sky/packages/sky'))
subprocess.check_call([pub_path, 'publish', '--force'], cwd=os.path.join(engine_root, 'skysprites'))
if __name__ == '__main__':
sys.exit(main())
|
#!/usr/bin/env python
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# See https://github.com/domokit/sky_engine/wiki/Release-process
import os
import subprocess
import sys
def main():
engine_root = os.path.abspath('.')
if not os.path.exists(os.path.join(engine_root, 'sky')):
print "Cannot find //sky. Is %s the Flutter engine repository?" % engine_root
return 1
pub_path = os.path.join(engine_root, 'third_party/dart-sdk/dart-sdk/bin/pub')
if args.publish:
subprocess.check_call([pub_path, 'publish', '--force'], cwd=os.path.join(engine_root, 'sky/packages/sky'))
subprocess.check_call([pub_path, 'publish', '--force'], cwd=os.path.join(engine_root, 'sky/packages/flx'))
subprocess.check_call([pub_path, 'publish', '--force'], cwd=os.path.join(engine_root, 'skysprites'))
if __name__ == '__main__':
sys.exit(main())
|
Add FLX to the release train
|
Add FLX to the release train
|
Python
|
bsd-3-clause
|
krisgiesing/sky_engine,devoncarew/engine,abarth/sky_engine,tvolkert/engine,Hixie/sky_engine,devoncarew/engine,mpcomplete/flutter_engine,mpcomplete/engine,devoncarew/sky_engine,chinmaygarde/sky_engine,jamesr/sky_engine,flutter/engine,chinmaygarde/sky_engine,mpcomplete/engine,lyceel/engine,devoncarew/engine,Hixie/sky_engine,tvolkert/engine,krisgiesing/sky_engine,mikejurka/engine,mpcomplete/engine,mpcomplete/flutter_engine,jamesr/flutter_engine,jamesr/sky_engine,jamesr/sky_engine,flutter/engine,cdotstout/sky_engine,aam/engine,lyceel/engine,devoncarew/engine,mpcomplete/flutter_engine,Hixie/sky_engine,rmacnak-google/engine,jason-simmons/flutter_engine,jamesr/sky_engine,abarth/sky_engine,tvolkert/engine,jason-simmons/flutter_engine,lyceel/engine,lyceel/engine,aam/engine,jason-simmons/flutter_engine,jason-simmons/sky_engine,jason-simmons/sky_engine,mikejurka/engine,jason-simmons/flutter_engine,jamesr/flutter_engine,mikejurka/engine,aam/engine,devoncarew/sky_engine,chinmaygarde/sky_engine,mpcomplete/engine,jason-simmons/sky_engine,chinmaygarde/flutter_engine,mpcomplete/engine,chinmaygarde/sky_engine,jason-simmons/flutter_engine,jamesr/flutter_engine,jamesr/sky_engine,tvolkert/engine,krisgiesing/sky_engine,flutter/engine,jason-simmons/sky_engine,tvolkert/engine,abarth/sky_engine,flutter/engine,krisgiesing/sky_engine,aam/engine,krisgiesing/sky_engine,aam/engine,Hixie/sky_engine,mikejurka/engine,krisgiesing/sky_engine,krisgiesing/sky_engine,cdotstout/sky_engine,rmacnak-google/engine,devoncarew/sky_engine,Hixie/sky_engine,devoncarew/sky_engine,rmacnak-google/engine,mikejurka/engine,chinmaygarde/flutter_engine,mpcomplete/flutter_engine,lyceel/engine,cdotstout/sky_engine,abarth/sky_engine,abarth/sky_engine,aam/engine,jason-simmons/flutter_engine,mikejurka/engine,flutter/engine,Hixie/sky_engine,aam/engine,devoncarew/sky_engine,rmacnak-google/engine,mikejurka/engine,jamesr/flutter_engine,jamesr/flutter_engine,devoncarew/sky_engine,jason-simmons/flutter_engine,devoncarew/engine,mpcomplete/engine,flutter/engine,jamesr/sky_engine,cdotstout/sky_engine,mikejurka/engine,flutter/engine,mpcomplete/flutter_engine,devoncarew/engine,aam/engine,devoncarew/sky_engine,jason-simmons/sky_engine,jason-simmons/sky_engine,tvolkert/engine,rmacnak-google/engine,cdotstout/sky_engine,jason-simmons/flutter_engine,chinmaygarde/sky_engine,lyceel/engine,flutter/engine,rmacnak-google/engine,cdotstout/sky_engine,Hixie/sky_engine,mpcomplete/flutter_engine,jamesr/flutter_engine,cdotstout/sky_engine,jason-simmons/sky_engine,jamesr/flutter_engine,abarth/sky_engine,jamesr/flutter_engine,devoncarew/engine,tvolkert/engine,chinmaygarde/sky_engine,lyceel/engine,mikejurka/engine,chinmaygarde/flutter_engine,mpcomplete/engine,rmacnak-google/engine,chinmaygarde/flutter_engine,abarth/sky_engine,Hixie/sky_engine,chinmaygarde/flutter_engine,chinmaygarde/flutter_engine,jamesr/flutter_engine,jamesr/sky_engine,mpcomplete/engine,chinmaygarde/flutter_engine,chinmaygarde/sky_engine
|
d2c2208b39c5715deebf8d24d5fa9096a945bdcd
|
script.py
|
script.py
|
import ast
import click
from parsing.parser import FileVisitor
@click.command()
@click.argument('code', type=click.File('rb'))
@click.option('--printed', default=False, is_flag=True, help='Pretty prints the call tree for each class in the file')
def cli(code, printed):
"""
Parses a file.
codegrapher [file_name]
"""
parsed_code = ast.parse(code.read(), filename='code.py')
visitor = FileVisitor()
visitor.visit(parsed_code)
if printed:
click.echo('Classes in file:')
for class_object in visitor.classes:
click.echo('=' * 80)
click.echo(class_object.name)
click.echo(class_object.pprint())
click.echo('')
|
import ast
import click
from parsing.parser import FileVisitor
@click.command()
@click.argument('code', type=click.File('rb'))
@click.option('--printed', default=False, is_flag=True, help='Pretty prints the call tree for each class in the file')
@click.option('--remove-builtins', default=False, is_flag=True, help='Removes builtin functions from call trees')
def cli(code, printed, remove_builtins):
"""
Parses a file.
codegrapher [file_name]
"""
parsed_code = ast.parse(code.read(), filename='code.py')
visitor = FileVisitor()
visitor.visit(parsed_code)
if printed:
click.echo('Classes in file:')
for class_object in visitor.classes:
if remove_builtins:
class_object.remove_builtins()
click.echo('=' * 80)
click.echo(class_object.name)
click.echo(class_object.pprint())
click.echo('')
|
Add builtin removal as an option to cli
|
Add builtin removal as an option to cli
|
Python
|
mit
|
LaurEars/codegrapher
|
1e8ecd09ce6dc44c4955f8bb2f81aa65232ad9a0
|
multi_schema/management/commands/loaddata.py
|
multi_schema/management/commands/loaddata.py
|
from django.core.management.commands import loaddata
from django.core.management.base import CommandError
from django.db import DatabaseError
from optparse import make_option
from ...models import Schema, template_schema
class Command(loaddata.Command):
option_list = loaddata.Command.option_list + (
make_option('--schema', action='store', dest='schema',
help='Specify which schema to load schema-aware models to',
default='__template__',
),
)
def handle(self, *app_labels, **options):
schema_name = options.get('schema')
if schema_name == '__template__':
# Hmm, we don't want to accidentally write data to this, so
# we should raise an exception if we are going to be
# writing any schema-aware objects.
schema = None
else:
try:
schema = Schema.objects.get(schema=options.get('schema'))
except Schema.DoesNotExist:
raise CommandError('No Schema found named "%s"' % schema_name)
schema.activate()
super(Command, self).handle(*app_labels, **options)
if schema:
schema.deactivate()
|
from django.core.management.commands import loaddata
from django.core.management.base import CommandError
from django.db import DatabaseError
from optparse import make_option
from ...models import Schema, template_schema
class Command(loaddata.Command):
option_list = loaddata.Command.option_list + (
make_option('--schema', action='store', dest='schema',
help='Specify which schema to load schema-aware models to',
default='__template__',
),
)
def handle(self, *app_labels, **options):
schema_name = options.get('schema')
if schema_name == '__template__':
# Hmm, we don't want to accidentally write data to this, so
# we should raise an exception if we are going to be
# writing any schema-aware objects.
schema = None
else:
try:
schema = Schema.objects.get(schema=options.get('schema'))
except Schema.DoesNotExist:
raise CommandError('No Schema found named "%s"' % schema_name)
schema.activate()
super(Command, self).handle(*app_labels, **options)
if schema:
schema.deactivate()
for schema in Schema.objects.all():
schema.create_schema()
|
Fix indenting. Create any schemas that were just loaded.
|
Fix indenting.
Create any schemas that were just loaded.
|
Python
|
bsd-3-clause
|
luzfcb/django-boardinghouse,luzfcb/django-boardinghouse,luzfcb/django-boardinghouse
|
c5d22fd143f952ce5e0c86b9e8bce4a06fe47063
|
bigsi/storage/__init__.py
|
bigsi/storage/__init__.py
|
from bigsi.storage.berkeleydb import BerkeleyDBStorage
from bigsi.storage.redis import RedisStorage
from bigsi.storage.rocksdb import RocksDBStorage
def get_storage(config):
return {
"rocksdb": RocksDBStorage,
"berkeleydb": BerkeleyDBStorage,
"redis": RedisStorage,
}[config["storage-engine"]](config["storage-config"])
|
from bigsi.storage.redis import RedisStorage
try:
from bigsi.storage.berkeleydb import BerkeleyDBStorage
except ModuleNotFoundError:
pass
try:
from bigsi.storage.rocksdb import RocksDBStorage
except ModuleNotFoundError:
pass
def get_storage(config):
return {
"rocksdb": RocksDBStorage,
"berkeleydb": BerkeleyDBStorage,
"redis": RedisStorage,
}[config["storage-engine"]](config["storage-config"])
|
Allow import without optional requirements
|
Allow import without optional requirements
|
Python
|
mit
|
Phelimb/cbg,Phelimb/cbg,Phelimb/cbg,Phelimb/cbg
|
33505f9b4dfeead0b01ee1b8cf3f8f228476e866
|
openpassword/crypt_utils.py
|
openpassword/crypt_utils.py
|
from Crypto.Cipher import AES
def decrypt(data, key_iv):
key = key_iv[0:16]
iv = key_iv[16:]
print(data)
cipher = AES.new(key, AES.MODE_CBC, iv)
return cipher.decrypt(data)
def encrypt(data, key_iv):
key = key_iv[0:16]
iv = key_iv[16:]
cipher = AES.new(key, AES.MODE_CBC, iv)
return cipher.encrypt(data)
|
from Crypto.Cipher import AES
def decrypt(data, key_iv):
key = key_iv[0:16]
iv = key_iv[16:]
cipher = AES.new(key, AES.MODE_CBC, iv)
return cipher.decrypt(data)
def encrypt(data, key_iv):
key = key_iv[0:16]
iv = key_iv[16:]
cipher = AES.new(key, AES.MODE_CBC, iv)
return cipher.encrypt(data)
|
Remove print statement from crypto utils...
|
Remove print statement from crypto utils...
|
Python
|
mit
|
openpassword/blimey,openpassword/blimey
|
f2fc7f1015fc24fdbb69069ac74a21437e94657b
|
xmantissa/plugins/sineoff.py
|
xmantissa/plugins/sineoff.py
|
from axiom import iaxiom, userbase
from xmantissa import website, offering, provisioning
from sine import sipserver, sinetheme
sineproxy = provisioning.BenefactorFactory(
name = u'sineproxy',
description = u'Sine SIP Proxy',
benefactorClass = sipserver.SineBenefactor)
plugin = offering.Offering(
name = u"Sine",
description = u"""
The Sine SIP proxy and registrar.
""",
siteRequirements = (
(userbase.IRealm, userbase.LoginSystem),
(None, website.WebSite),
(None, sipserver.SIPServer)),
appPowerups = (sipserver.SinePublicPage,
),
benefactorFactories = (sineproxy,),
loginInterfaces=(),
themes = (sinetheme.XHTMLDirectoryTheme('base'),)
)
|
from axiom import iaxiom, userbase
from xmantissa import website, offering, provisioning
from sine import sipserver, sinetheme
sineproxy = provisioning.BenefactorFactory(
name = u'sineproxy',
description = u'Sine SIP Proxy',
benefactorClass = sipserver.SineBenefactor)
plugin = offering.Offering(
name = u"Sine",
description = u"""
The Sine SIP proxy and registrar.
""",
siteRequirements = (
(userbase.IRealm, userbase.LoginSystem),
(None, website.WebSite),
(None, sipserver.SIPServer)),
appPowerups = (sipserver.SinePublicPage,
),
benefactorFactories = (sineproxy,),
themes = (sinetheme.XHTMLDirectoryTheme('base'),)
)
|
Revert 5505 - introduced numerous regressions into the test suite
|
Revert 5505 - introduced numerous regressions into the test suite
|
Python
|
mit
|
habnabit/divmod-sine,twisted/sine
|
8bfd49c7aef03f6d2ad541f466e9661b6acc5262
|
staticassets/compilers/sass.py
|
staticassets/compilers/sass.py
|
from .base import CommandCompiler
class SassCompiler(CommandCompiler):
content_type = 'text/css'
options = {'compass': True}
command = 'sass'
params = ['--trace']
def compile(self, asset):
if self.compass:
self.params.append('--compass')
if '.scss' in asset.attributes.extensions:
self.params.append('--scss')
return super(SassCompiler, self).compile(asset)
|
from .base import CommandCompiler
class SassCompiler(CommandCompiler):
content_type = 'text/css'
options = {'compass': True, 'scss': False}
command = 'sass'
params = ['--trace']
def get_args(self):
args = super(SassCompiler, self).get_args()
if self.compass:
args.append('--compass')
if self.scss:
args.append('--scss')
return args
|
Fix args being appended continuously to SassCompiler
|
Fix args being appended continuously to SassCompiler
|
Python
|
mit
|
davidelias/django-staticassets,davidelias/django-staticassets,davidelias/django-staticassets
|
f522a464e3f58a9f2ed235b48382c9db15f66029
|
eva/layers/residual_block.py
|
eva/layers/residual_block.py
|
from keras.layers import Convolution2D, Merge
from keras.layers.advanced_activations import PReLU
from keras.engine.topology import merge
from eva.layers.masked_convolution2d import MaskedConvolution2D
def ResidualBlock(model, filters):
# 2h -> h
block = Convolution2D(filters//2, 1, 1)(model)
block = PReLU()(block)
# h 3x3 -> h
block = MaskedConvolution2D(filters//2, 3, 3, border_mode='same')(block)
block = PReLU()(block)
# h -> 2h
block = Convolution2D(filters, 1, 1)(block)
return PReLU()(Merge(mode='sum')([model, block]))
def ResidualBlockList(model, filters, length):
for _ in range(length):
model = ResidualBlock(model, filters)
return model
|
from keras.layers import Convolution2D, Merge
from keras.layers.advanced_activations import PReLU
from keras.engine.topology import merge
from eva.layers.masked_convolution2d import MaskedConvolution2D
def ResidualBlock(model, filters):
# 2h -> h
block = Convolution2D(filters//2, 1, 1)(model)
block = PReLU()(block)
# h 3x3 -> h
block = MaskedConvolution2D(filters//2, 3, 3, border_mode='same')(block)
block = PReLU()(block)
# h -> 2h
block = Convolution2D(filters, 1, 1)(block)
return PReLU()(merge([model, block], mode='sum'))
def ResidualBlockList(model, filters, length):
for _ in range(length):
model = ResidualBlock(model, filters)
return model
|
Use the functional merge; just for formatting
|
Use the functional merge; just for formatting
|
Python
|
apache-2.0
|
israelg99/eva
|
8a7a8c3589b1e3bd3a4d8b0dc832178be26117d3
|
mozaik_membership/wizards/base_partner_merge_automatic_wizard.py
|
mozaik_membership/wizards/base_partner_merge_automatic_wizard.py
|
# Copyright 2022 ACSONE SA/NV
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
from odoo import models
class BasePartnerMergeAutomaticWizard(models.TransientModel):
_inherit = "base.partner.merge.automatic.wizard"
def _merge(self, partner_ids, dst_partner=None, extra_checks=True):
partners = self.env["res.partner"].browse(partner_ids).exists()
# remove dst_partner from partners to merge
if dst_partner and dst_partner in partners:
src_partners = partners - dst_partner
else:
ordered_partners = self._get_ordered_partner(partners.ids)
src_partners = ordered_partners[:-1]
src_partners.mapped("membership_line_ids")._close(force=True)
return super(BasePartnerMergeAutomaticWizard, self)._merge(
partner_ids, dst_partner, extra_checks
)
|
# Copyright 2022 ACSONE SA/NV
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
from odoo import models
class BasePartnerMergeAutomaticWizard(models.TransientModel):
_inherit = "base.partner.merge.automatic.wizard"
def _merge(self, partner_ids, dst_partner=None, extra_checks=True):
partners = self.env["res.partner"].browse(partner_ids).exists()
# remove dst_partner from partners to merge
if dst_partner and dst_partner in partners:
src_partners = partners - dst_partner
else:
ordered_partners = self._get_ordered_partner(partners.ids)
dst_partner = ordered_partners[-1]
src_partners = ordered_partners[:-1]
# since we close the membership we need to keep an instance for the security
for p in src_partners:
p.force_int_instance_id = p.int_instance_id
dst_force_int_instance_id = dst_partner.force_int_instance_id
src_partners.mapped("membership_line_ids")._close(force=True)
res = super(BasePartnerMergeAutomaticWizard, self)._merge(
partner_ids, dst_partner, extra_checks
)
# do not modify the force_int_instance_id since it should be empty if
# there is a membership_line_id
dst_partner.force_int_instance_id = dst_force_int_instance_id
return res
|
Fix the security for the merge after closing memberships
|
Fix the security for the merge after closing memberships
|
Python
|
agpl-3.0
|
mozaik-association/mozaik,mozaik-association/mozaik
|
16fe23b18f69e475858a975f3a2e3f743f4b4c57
|
zipline/__init__.py
|
zipline/__init__.py
|
"""
Zipline
"""
# This is *not* a place to dump arbitrary classes/modules for convenience,
# it is a place to expose the public interfaces.
__version__ = "0.5.11.dev"
from . import data
from . import finance
from . import gens
from . import utils
from . algorithm import TradingAlgorithm
__all__ = [
'data',
'finance',
'gens',
'utils',
'TradingAlgorithm'
]
|
#
# Copyright 2013 Quantopian, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Zipline
"""
# This is *not* a place to dump arbitrary classes/modules for convenience,
# it is a place to expose the public interfaces.
__version__ = "0.5.11.dev"
from . import data
from . import finance
from . import gens
from . import utils
from . algorithm import TradingAlgorithm
__all__ = [
'data',
'finance',
'gens',
'utils',
'TradingAlgorithm'
]
|
Add license to module init file.
|
REL: Add license to module init file.
|
Python
|
apache-2.0
|
iamkingmaker/zipline,ChinaQuants/zipline,chrjxj/zipline,michaeljohnbennett/zipline,ronalcc/zipline,nborggren/zipline,sketchytechky/zipline,joequant/zipline,grundgruen/zipline,zhoulingjun/zipline,alphaBenj/zipline,CDSFinance/zipline,alphaBenj/zipline,umuzungu/zipline,dhruvparamhans/zipline,cmorgan/zipline,otmaneJai/Zipline,magne-max/zipline-ja,enigmampc/catalyst,Scapogo/zipline,euri10/zipline,stkubr/zipline,wilsonkichoi/zipline,zhoulingjun/zipline,keir-rex/zipline,humdings/zipline,jordancheah/zipline,keir-rex/zipline,euri10/zipline,iamkingmaker/zipline,humdings/zipline,kmather73/zipline,mattcaldwell/zipline,ronalcc/zipline,DVegaCapital/zipline,jimgoo/zipline-fork,davidastephens/zipline,MonoCloud/zipline,AlirezaShahabi/zipline,michaeljohnbennett/zipline,jimgoo/zipline-fork,quantopian/zipline,bartosh/zipline,wubr2000/zipline,aajtodd/zipline,dmitriz/zipline,CarterBain/AlephNull,bartosh/zipline,florentchandelier/zipline,semio/zipline,YuepengGuo/zipline,morrisonwudi/zipline,gwulfs/zipline,dhruvparamhans/zipline,chrjxj/zipline,dmitriz/zipline,dkushner/zipline,mattcaldwell/zipline,davidastephens/zipline,erikness/AlephOne,otmaneJai/Zipline,joequant/zipline,enigmampc/catalyst,DVegaCapital/zipline,YuepengGuo/zipline,sketchytechky/zipline,semio/zipline,gwulfs/zipline,CarterBain/AlephNull,florentchandelier/zipline,umuzungu/zipline,StratsOn/zipline,ChinaQuants/zipline,quantopian/zipline,grundgruen/zipline,nborggren/zipline,magne-max/zipline-ja,aajtodd/zipline,kmather73/zipline,CDSFinance/zipline,cmorgan/zipline,StratsOn/zipline,morrisonwudi/zipline,MonoCloud/zipline,jordancheah/zipline,dkushner/zipline,stkubr/zipline,wilsonkichoi/zipline,AlirezaShahabi/zipline,Scapogo/zipline,erikness/AlephOne,wubr2000/zipline
|
0e36a49d6a53f87cbe71fd5ec9dce524dd638122
|
fireplace/deck.py
|
fireplace/deck.py
|
import logging
import random
from .card import Card
from .enums import GameTag, Zone
from .utils import CardList
class Deck(CardList):
MAX_CARDS = 30
MAX_UNIQUE_CARDS = 2
MAX_UNIQUE_LEGENDARIES = 1
@classmethod
def fromList(cls, cards, hero):
return cls([Card(card) for card in cards], Card(hero))
def __init__(self, cards, hero, name=None):
super().__init__(cards)
self.hero = hero
if name is None:
name = "Custom %s" % (hero)
self.name = name
for card in cards:
# Don't use .zone directly as it would double-fill the deck
card.tags[GameTag.ZONE] = Zone.DECK
def __str__(self):
return self.name
def __repr__(self):
return "<%s (%i cards)>" % (self.hero, len(self))
def shuffle(self):
logging.info("Shuffling %r..." % (self))
random.shuffle(self)
|
import logging
import random
from .card import Card
from .enums import GameTag, Zone
from .utils import CardList
class Deck(CardList):
MAX_CARDS = 30
MAX_UNIQUE_CARDS = 2
MAX_UNIQUE_LEGENDARIES = 1
@classmethod
def fromList(cls, cards, hero):
return cls([Card(card) for card in cards], Card(hero))
def __init__(self, cards, hero):
super().__init__(cards)
self.hero = hero
for card in cards:
# Don't use .zone directly as it would double-fill the deck
card.tags[GameTag.ZONE] = Zone.DECK
def __repr__(self):
return "<Deck(hero=%r, count=%i)>" % (self.hero, len(self))
def shuffle(self):
logging.info("Shuffling %r..." % (self))
random.shuffle(self)
|
Drop support for naming Deck objects
|
Drop support for naming Deck objects
|
Python
|
agpl-3.0
|
smallnamespace/fireplace,Meerkov/fireplace,amw2104/fireplace,Ragowit/fireplace,beheh/fireplace,butozerca/fireplace,Ragowit/fireplace,amw2104/fireplace,liujimj/fireplace,smallnamespace/fireplace,jleclanche/fireplace,oftc-ftw/fireplace,oftc-ftw/fireplace,butozerca/fireplace,NightKev/fireplace,Meerkov/fireplace,liujimj/fireplace
|
d6a6fc478d9aaea69ff6c1f5be3ebe0c1b34f180
|
fixlib/channel.py
|
fixlib/channel.py
|
import asyncore
import util
try:
import simplejson as json
except ImportError:
import json
class ChannelServer(asyncore.dispatcher):
def __init__(self, sock, dest):
asyncore.dispatcher.__init__(self, sock)
self.dest = dest
dest.register('close', self.closehook)
def handle_accept(self):
client = self.accept()
SideChannel(client[0], self.dest)
def closehook(self, hook, data):
print 'HOOK-CLOSE'
self.close()
class SideChannel(asyncore.dispatcher):
def __init__(self, sock, dest):
asyncore.dispatcher.__init__(self, sock)
self.dest = dest
self.buffer = None
def handle_close(self):
self.close()
def handle_read(self):
raw = self.recv(8192)
if raw:
msg = util.json_decode(json.loads(raw))
self.dest.queue(msg)
self.buffer = {'result': 'done'}
def writable(self):
return self.buffer
def handle_write(self):
self.send(json.dumps(self.buffer))
self.close()
|
import asyncore
import util
try:
import simplejson as json
except ImportError:
import json
class ChannelServer(asyncore.dispatcher):
def __init__(self, sock, dest):
asyncore.dispatcher.__init__(self, sock)
self.dest = dest
dest.register('close', lambda x, y: self.close())
def handle_accept(self):
client = self.accept()
SideChannel(client[0], self.dest)
class SideChannel(asyncore.dispatcher):
def __init__(self, sock, dest):
asyncore.dispatcher.__init__(self, sock)
self.dest = dest
self.buffer = None
def handle_close(self):
self.close()
def handle_read(self):
raw = self.recv(8192)
if raw:
msg = util.json_decode(json.loads(raw))
self.dest.queue(msg)
self.buffer = {'result': 'done'}
def writable(self):
return self.buffer
def handle_write(self):
self.send(json.dumps(self.buffer))
self.close()
|
Use a lambda as a proxy.
|
Use a lambda as a proxy.
|
Python
|
bsd-3-clause
|
jvirtanen/fixlib
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.