commit
stringlengths 40
40
| old_file
stringlengths 4
236
| new_file
stringlengths 4
236
| old_contents
stringlengths 1
3.26k
| new_contents
stringlengths 16
4.43k
| subject
stringlengths 16
624
| message
stringlengths 17
3.29k
| lang
stringclasses 5
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
|
|---|---|---|---|---|---|---|---|---|---|
bc593f1716a8e36e65cf75a58e524e77d38d5d9c
|
notation/statistics.py
|
notation/statistics.py
|
# encoding: utf-8
# included for ease of use with Python 2 (which has no statistics package)
def mean(values):
return float(sum(values)) / len(values)
def median(values):
middle = (len(values) - 1) // 2
if len(values) % 2:
return values[middle]
else:
return mean(values[middle:middle + 2])
|
# encoding: utf-8
# included for ease of use with Python 2 (which has no statistics package)
def mean(values):
return float(sum(values)) / len(values)
def quantile(p):
def bound_quantile(values):
ix = int(len(values) * p)
if len(values) % 2:
return values[ix]
elif ix < 1:
return values[0]
else:
return mean(values[ix - 1:ix + 1])
return bound_quantile
Q0 = min
Q1 = quantile(0.25)
Q2 = median = quantile(0.5)
Q3 = quantile(0.75)
Q4 = max
|
Add a rudimentary quantile factory function.
|
Add a rudimentary quantile factory function.
|
Python
|
isc
|
debrouwere/python-ballpark
|
54c856e987bf570c7bcb8c449726a5d2895c0241
|
octopus/__init__.py
|
octopus/__init__.py
|
__version__ = "trunk"
def run (runnable, logging = True):
from twisted.internet import reactor
if reactor.running:
return runnable.run()
else:
def _complete (result):
reactor.stop()
def _run ():
runnable.run().addBoth(_complete)
if logging:
import sys
from twisted.python import log
log.startLogging(sys.stdout)
runnable.log += log
reactor.callWhenRunning(_run)
reactor.run()
|
__version__ = "trunk"
def run (runnable, logging = True):
from twisted.internet import reactor
if reactor.running:
return runnable.run()
else:
if logging:
import sys
from twisted.python import log
log.startLogging(sys.stdout)
runnable.on("log", log.msg)
def _complete (result):
reactor.stop()
if logging:
runnable.off("log", log.msg)
def _run ():
runnable.run().addBoth(_complete)
reactor.callWhenRunning(_run)
reactor.run()
|
Fix octopus.run for new events model.
|
Fix octopus.run for new events model.
|
Python
|
mit
|
richardingham/octopus,richardingham/octopus,richardingham/octopus,richardingham/octopus
|
fa98f32ce9c2d4e7dff8281bf5e6f154b82599d6
|
gargoyle/__init__.py
|
gargoyle/__init__.py
|
"""
gargoyle
~~~~~~~~
:copyright: (c) 2010 DISQUS.
:license: Apache License 2.0, see LICENSE for more details.
"""
__all__ = ('gargoyle', 'ConditionSet', 'autodiscover', 'VERSION')
try:
VERSION = __import__('pkg_resources') \
.get_distribution('gargoyle').version
except Exception, e:
VERSION = 'unknown'
from gargoyle.manager import gargoyle
def autodiscover():
"""
Auto-discover INSTALLED_APPS admin.py modules and fail silently when
not present. This forces an import on them to register any admin bits they
may want.
"""
import copy
from django.conf import settings
from django.utils.importlib import import_module
for app in settings.INSTALLED_APPS:
# Attempt to import the app's gargoyle module.
before_import_registry = copy.copy(gargoyle._registry)
try:
import_module('%s.gargoyle' % app)
except:
# Reset the model registry to the state before the last import as
# this import will have to reoccur on the next request and this
# could raise NotRegistered and AlreadyRegistered exceptions
gargoyle._registry = before_import_registry
# load builtins
__import__('gargoyle.builtins')
|
"""
gargoyle
~~~~~~~~
:copyright: (c) 2010 DISQUS.
:license: Apache License 2.0, see LICENSE for more details.
"""
__all__ = ('gargoyle', 'ConditionSet', 'autodiscover', 'VERSION')
try:
VERSION = __import__('pkg_resources') \
.get_distribution('gargoyle').version
except Exception, e:
VERSION = 'unknown'
from gargoyle.manager import gargoyle
def autodiscover():
"""
Auto-discover INSTALLED_APPS admin.py modules and fail silently when
not present. This forces an import on them to register any admin bits they
may want.
"""
import copy
from django.conf import settings
from importlib import import_module
for app in settings.INSTALLED_APPS:
# Attempt to import the app's gargoyle module.
before_import_registry = copy.copy(gargoyle._registry)
try:
import_module('%s.gargoyle' % app)
except:
# Reset the model registry to the state before the last import as
# this import will have to reoccur on the next request and this
# could raise NotRegistered and AlreadyRegistered exceptions
gargoyle._registry = before_import_registry
# load builtins
__import__('gargoyle.builtins')
|
Use python import lib (django import lib will be removed in 1.9).
|
Use python import lib (django import lib will be removed in 1.9).
|
Python
|
apache-2.0
|
brilliant-org/gargoyle,brilliant-org/gargoyle,brilliant-org/gargoyle
|
3443c7164e490e0607fff599c497a4fc054f3c48
|
oslo_cache/_i18n.py
|
oslo_cache/_i18n.py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""oslo.i18n integration module.
See http://docs.openstack.org/developer/oslo.i18n/usage.html
"""
import oslo_i18n
_translators = oslo_i18n.TranslatorFactory(domain='oslo.versionedobjects')
# The primary translation function using the well-known name "_"
_ = _translators.primary
# Translators for log levels.
#
# The abbreviated names are meant to reflect the usual use of a short
# name like '_'. The "L" is for "log" and the other letter comes from
# the level.
_LI = _translators.log_info
_LW = _translators.log_warning
_LE = _translators.log_error
_LC = _translators.log_critical
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""oslo.i18n integration module.
See http://docs.openstack.org/developer/oslo.i18n/usage.html
"""
import oslo_i18n
_translators = oslo_i18n.TranslatorFactory(domain='oslo.cache')
# The primary translation function using the well-known name "_"
_ = _translators.primary
# Translators for log levels.
#
# The abbreviated names are meant to reflect the usual use of a short
# name like '_'. The "L" is for "log" and the other letter comes from
# the level.
_LI = _translators.log_info
_LW = _translators.log_warning
_LE = _translators.log_error
_LC = _translators.log_critical
|
Update i18n domain to correct project name
|
Update i18n domain to correct project name
The current oslo_i18n domain name is listed as oslo.versionedobjects
Change-Id: I493b66efbd83fb7704fe927866a24b765feb1576
|
Python
|
apache-2.0
|
citrix-openstack-build/oslo.cache,openstack/oslo.cache,openstack/oslo.cache
|
ec235e290b4428dec2db03a19d678eba52f02fb5
|
keyring/getpassbackend.py
|
keyring/getpassbackend.py
|
"""Specific support for getpass."""
import os
import getpass
from keyring.core import get_password as original_get_password
def get_password(prompt='Password: ', stream=None,
service_name='Python',
username=None):
if username is None:
username = getpass.getuser()
return original_get_password(service_name, username)
|
"""Specific support for getpass."""
import os
import getpass
import keyring.core
def get_password(prompt='Password: ', stream=None,
service_name='Python',
username=None):
if username is None:
username = getpass.getuser()
return keyring.core.get_password(service_name, username)
|
Use module namespaces to distinguish names instead of 'original_' prefix
|
Use module namespaces to distinguish names instead of 'original_' prefix
|
Python
|
mit
|
jaraco/keyring
|
4a711a2709ec5d8a8e04bb0f735fcfaa319cffdf
|
designate/objects/validation_error.py
|
designate/objects/validation_error.py
|
# Copyright 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import six
from designate.objects import base
class ValidationError(base.DesignateObject):
FIELDS = {
'path': {},
'message': {},
'validator': {},
'validator_value': {},
'raw': {},
}
@classmethod
def from_js_error(cls, js_error):
"""Convert a JSON Schema ValidationError instance into a
ValidationError instance.
"""
e = cls()
e.path = list(getattr(js_error, 'releative_path', js_error.path))
e.message = six.text_type(js_error)
e.validator = js_error.validator
e.validator_value = js_error.validator_value
e.raw = js_error._contents()
return e
class ValidationErrorList(base.ListObjectMixin, base.DesignateObject):
LIST_ITEM_TYPE = ValidationError
|
# Copyright 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from designate.objects import base
class ValidationError(base.DesignateObject):
FIELDS = {
'path': {},
'message': {},
'validator': {},
'validator_value': {},
'raw': {},
}
@classmethod
def from_js_error(cls, js_error):
"""Convert a JSON Schema ValidationError instance into a
ValidationError instance.
"""
e = cls()
e.path = list(getattr(js_error, 'releative_path', js_error.path))
e.message = js_error.message
e.validator = js_error.validator
e.validator_value = js_error.validator_value
e.raw = js_error._contents()
return e
class ValidationErrorList(base.ListObjectMixin, base.DesignateObject):
LIST_ITEM_TYPE = ValidationError
|
Fix the displayed error message in V2 API
|
Fix the displayed error message in V2 API
Change-Id: I07c3f1ed79fa507dbe9b76eb8f5964475516754c
|
Python
|
apache-2.0
|
tonyli71/designate,openstack/designate,ionrock/designate,ionrock/designate,ramsateesh/designate,grahamhayes/designate,cneill/designate-testing,muraliselva10/designate,muraliselva10/designate,cneill/designate-testing,openstack/designate,tonyli71/designate,muraliselva10/designate,grahamhayes/designate,ionrock/designate,tonyli71/designate,grahamhayes/designate,openstack/designate,ramsateesh/designate,cneill/designate-testing,ramsateesh/designate
|
6fc2e75426eb34755bf6dbedbd21a4345d9c5738
|
plugins/websites.py
|
plugins/websites.py
|
import re
from smartbot import utils
class Plugin:
def on_message(self, bot, msg, reply):
match = re.findall(r"(https?://[^\s]+)", msg["message"], re.IGNORECASE)
for i, url in enumerate(match):
title = utils.web.get_title(url)
if title:
reply("[{0}]: {1}".format(i, title))
def on_help(self):
return "Echos the titles of websites for any HTTP(S) URL."
|
import io
import re
import unittest
from smartbot import utils
class Plugin:
def on_message(self, bot, msg, reply):
match = re.findall(r"(https?://[^\s]+)", msg["message"], re.IGNORECASE)
for i, url in enumerate(match):
title = utils.web.get_title(url)
if title:
reply("[{0}]: {1}".format(i, title))
def on_help(self):
return "Echos the titles of websites for any HTTP(S) URL."
class Test(unittest.TestCase):
def setUp(self):
self.plugin = Plugin()
def test_message(self):
self.plugin.on_message(None, {"message": "http://tomleese.me.uk"}, lambda x: self.assertEqual("[0]: Tom Leese", x))
def test_help(self):
self.assertTrue(self.plugin.on_help())
|
Add tests for website plugin
|
Add tests for website plugin
|
Python
|
mit
|
Muzer/smartbot,Cyanogenoid/smartbot,thomasleese/smartbot-old,tomleese/smartbot
|
8852955632b0ef0250ebbe21b5bdefdecdf30e8a
|
tests/test_dem.py
|
tests/test_dem.py
|
import unittest
import numpy as np
class CalculationMethodsTestCase(unittest.TestCase):
def setUp(self):
self.dem = DEMGrid()
def test_calculate_slope(self):
sx, sy = self.dem._calculate_slope()
def test_calculate_laplacian(self):
del2z = self.dem._calculate_lapalacian()
def test_calculate_directional_laplacian(self):
alpha = np.pi/4
del2z = self.dem._calculate_lapalacian(alpha)
def test_pad_boundary(self):
dx = 4
dy = 4
grid = self.dem._griddata
pad_x = np.zeros((self.ny, dx/2))
pad_y = np.zeros((self.nx + dx, dy/2))
padgrid = np.vstack([pad_y, np.hstack([pad_x, self.dem._griddata, pad_x]), pad_y]])
self.dem._pad_boundary(dx, dy)
assertEqual(self.dem.grid, padgrid, 'Grid padded incorrectly (dx = 2, dy = 2)')
dx = 5
dy = 5
grid = self.dem._griddata
pad_x = np.zeros((self.ny, np.round(dx/2))
pad_y = np.zeros((self.nx + 2*np.round(dx/2), np.round(dy/2)))
padgrid = np.vstack([pad_y, np.hstack([pad_x, self.dem._griddata, pad_x]), pad_y]])
self.dem._pad_boundary(dx, dy)
assertEqual(self.dem.grid, padgrid, 'Grid padded incorrectly (dx = 5, dy = 5)')
|
import unittest
import numpy as np
class CalculationMethodsTestCase(unittest.TestCase):
def setUp(self):
self.dem = DEMGrid()
def test_calculate_slope(self):
sx, sy = self.dem._calculate_slope()
def test_calculate_laplacian(self):
del2z = self.dem._calculate_lapalacian()
def test_calculate_directional_laplacian(self):
alpha = np.pi/4
del2z = self.dem._calculate_lapalacian(alpha)
def test_pad_boundary(self):
dx = 5
dy = 5
grid = self.dem._griddata
pad_x = np.zeros((self.ny, np.round(dx/2))
pad_y = np.zeros((self.nx + 2*np.round(dx/2), np.round(dy/2)))
padgrid = np.vstack([pad_y, np.hstack([pad_x, self.dem._griddata, pad_x]), pad_y]])
self.dem._pad_boundary(dx, dy)
assertEqual(self.dem.grid, padgrid, 'Grid padded incorrectly')
|
Remove redundant case from padding test
|
Remove redundant case from padding test
|
Python
|
mit
|
stgl/scarplet,rmsare/scarplet
|
d9938a50429db16ce60d905bca9844073fe2b0fa
|
this_app/forms.py
|
this_app/forms.py
|
from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField
from wtforms.validators import Required, Length, Email
class SignupForm(FlaskForm):
"""Render and validate the signup form"""
email = StringField("Email", validators=[Required(), Email(), Length(1, 32)])
username = StringField("Username", validators=[Required(), Length(1, 32)])
password = PasswordField("Password", validators=[Required(), Length(1, 32)])
|
from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField, BooleanField
from wtforms.validators import DataRequired, Length, Email
class SignupForm(FlaskForm):
"""Render and validate the signup form"""
email = StringField("Email", validators=[DataRequired(), Email(message="Invalid email format"), Length(max=32)])
username = StringField("Username", validators=[DataRequired(), Length(2, 32)])
password = PasswordField("Password", validators=[DataRequired(), Length(min=4, max=32)])
class LoginForm(FlaskForm):
"""Form to let users login"""
email = StringField("Username", validators=[DataRequired(), Email(message="Invalid email format"), Length(max=32)])
password = PasswordField("Password", validators=[DataRequired(), Length(4, 32)])
remember = BooleanField("Remember Me")
|
Use DataRequired to validate form
|
Use DataRequired to validate form
|
Python
|
mit
|
borenho/flask-bucketlist,borenho/flask-bucketlist
|
5f2ab0dcaec5a7826ff0652e7c052971083a8398
|
openid/test/datadriven.py
|
openid/test/datadriven.py
|
import unittest
class DataDrivenTestCase(unittest.TestCase):
cases = []
@classmethod
def generateCases(cls):
return cls.cases
@classmethod
def loadTests(cls):
tests = []
for case in cls.generateCases():
if isinstance(case, tuple):
test = cls(*case)
elif isinstance(case, dict):
test = cls(**case)
else:
test = cls(case)
tests.append(test)
return tests
def __init__(self, description):
super(DataDrivenTestCase, self).__init__(self, 'runOneTest')
self.description = description
def shortDescription(self):
return '%s for %s' % (self.__class__.__name__, self.description)
def loadTests(module_name):
loader = unittest.defaultTestLoader
this_module = __import__(module_name, {}, {}, [None])
tests = []
for name in dir(this_module):
obj = getattr(this_module, name)
if isinstance(obj, unittest.TestCase):
if hasattr(obj, 'loadTests'):
tests.extend(obj.loadTests())
else:
tests.append(loader.loadTestsFromTestCase(obj))
return unittest.TestSuite(tests)
|
import unittest
class DataDrivenTestCase(unittest.TestCase):
cases = []
@classmethod
def generateCases(cls):
return cls.cases
@classmethod
def loadTests(cls):
tests = []
for case in cls.generateCases():
if isinstance(case, tuple):
test = cls(*case)
elif isinstance(case, dict):
test = cls(**case)
else:
test = cls(case)
tests.append(test)
return tests
def __init__(self, description):
super(DataDrivenTestCase, self).__init__(self, 'runOneTest')
self.description = description
def shortDescription(self):
return '%s for %s' % (self.__class__.__name__, self.description)
def loadTests(module_name):
loader = unittest.defaultTestLoader
tests = loader.loadTestsFromName(module_name)
if not tests:
raise AssertionError("No tests for {0}".format(module_name))
return unittest.TestSuite(tests)
|
Replace ad-hoc pain with builtin methods
|
Replace ad-hoc pain with builtin methods
|
Python
|
apache-2.0
|
moreati/python3-openid,isagalaev/sm-openid,moreati/python3-openid,moreati/python3-openid,necaris/python3-openid,misli/python3-openid,necaris/python3-openid,misli/python3-openid,misli/python3-openid
|
89d8ee0b91c9fd579dcf965e9e07f18954625c72
|
xero/api.py
|
xero/api.py
|
from .manager import Manager
class Xero(object):
"""An ORM-like interface to the Xero API"""
OBJECT_LIST = (u'Contacts', u'Accounts', u'CreditNotes',
u'Currencies', u'Invoices', u'Items', u'Organisation',
u'Payments', u'TaxRates', u'TrackingCategories')
def __init__(self, credentials):
# Iterate through the list of objects we support, for
# each of them create an attribute on our self that is
# the lowercase name of the object and attach it to an
# instance of a Manager object to operate on it
for name in self.OBJECT_LIST:
setattr(self, name.lower(), Manager(name, credentials.oauth))
|
from .manager import Manager
class Xero(object):
"""An ORM-like interface to the Xero API"""
OBJECT_LIST = (u'Contacts', u'Accounts', u'CreditNotes',
u'Currencies', u'Invoices', u'Items', u'Organisation',
u'Payments', u'TaxRates', u'TrackingCategories', u'ManualJournals')
def __init__(self, credentials):
# Iterate through the list of objects we support, for
# each of them create an attribute on our self that is
# the lowercase name of the object and attach it to an
# instance of a Manager object to operate on it
for name in self.OBJECT_LIST:
setattr(self, name.lower(), Manager(name, credentials.oauth))
|
Add support for manual journals
|
Add support for manual journals
|
Python
|
bsd-3-clause
|
wegotpop/pyxero,jarekwg/pyxero,jaymcconnell/pyxero,opendesk/pyxero,thisismyrobot/pyxero,freakboy3742/pyxero,MJMortimer/pyxero,unomena/pyxero,schinckel/pyxero,unomena/pyxeropos,jacobg/pyxero,direvus/pyxero
|
fb9591c4a2801bfe5f5380c3e33aa44a25db3591
|
customforms/models.py
|
customforms/models.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from django.utils.translation import ugettext as _
from django.db import models
class Form(models.Model):
title = models.CharField(_("Title"), max_length=255)
def __unicode__(self):
return u'%s' % self.title
class Meta:
ordering = ('title', )
class Question(models.Model):
form = models.ForeignKey(Form)
title = models.CharField(
_("Title"), max_length=255, default=_("Question Title"))
help_text = models.TextField(blank=True, null=True)
CHOICES = [
('C', _('Checkbox')),
('R', _('Radio')),
('S', _('Select')),
('T', _('Text')),
]
question_type = models.CharField(
max_length=1, choices=CHOICES, default="T")
required = models.BooleanField(default=False)
position = models.PositiveIntegerField(default=0)
def __unicode__(self):
return u'%s' % (self.title, )
class Meta:
ordering = ('form', 'position', )
class Choice(models.Model):
question = models.ForeignKey(Question)
title = models.CharField(max_length=200,)
position = models.PositiveIntegerField(default=0)
class Meta:
ordering = ('position', )
def __unicode__(self):
return u'%s' % (self.title, )
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext as _
from django.db import models
class Form(models.Model):
title = models.CharField(_("Title"), max_length=255)
def __unicode__(self):
return u'%s' % self.title
class Meta:
ordering = ('title', )
def get_absolute_url(self):
return reverse('customforms.views.view_form', args=[str(self.id)])
class Question(models.Model):
form = models.ForeignKey(Form)
title = models.CharField(
_("Title"), max_length=255, default=_("Question Title"))
help_text = models.TextField(blank=True, null=True)
CHOICES = [
('C', _('Checkbox')),
('R', _('Radio')),
('S', _('Select')),
('T', _('Text')),
]
question_type = models.CharField(
max_length=1, choices=CHOICES, default="T")
required = models.BooleanField(default=False)
position = models.PositiveIntegerField(default=0)
def __unicode__(self):
return u'%s' % (self.title, )
class Meta:
ordering = ('form', 'position', )
def get_absolute_url(self):
return reverse('customforms.views.view_form', args=[str(self.form.id)])
class Choice(models.Model):
question = models.ForeignKey(Question)
title = models.CharField(max_length=200,)
position = models.PositiveIntegerField(default=0)
class Meta:
ordering = ('position', )
def __unicode__(self):
return u'%s' % (self.title, )
|
Add absolute URLs to form and question admin
|
Add absolute URLs to form and question admin
|
Python
|
apache-2.0
|
cschwede/django-customforms
|
d6ff777c7fb3f645c021da1319bb5d78d13aa9db
|
meshnet/interface.py
|
meshnet/interface.py
|
import serial
import struct
from siphashc import siphash
def _hash(key: str, sender: int, receiver: int, msg_type: int, data: bytes):
packed_data = struct.pack(">h>hBs", sender, receiver, msg_type, data)
return struct.pack("Q", siphash(key, packed_data))
class SerialMessage(object):
def __init__(self):
pass
def serialize(self):
pass
class Connection(object):
def __init__(self, device):
self._device = device
self._conn = None
def connect(self):
self._conn = serial.Serial(self._device, 115200)
|
import serial
import struct
from siphashc import siphash
def _hash(key: bytes, sender: int, receiver: int, msg_type: int, data: bytes):
packed_data = struct.pack(">hhB", sender, receiver, msg_type) + data
return struct.pack(">Q", siphash(key, packed_data))
class SerialMessage(object):
def __init__(self):
pass
def serialize(self):
pass
class Connection(object):
def __init__(self, device):
self._device = device
self._conn = None
def connect(self):
self._conn = serial.Serial(self._device, 115200)
|
Fix python siphashing to match c implementation
|
Fix python siphashing to match c implementation
Signed-off-by: Jan Losinski <577c4104c61edf9f052c616c0c23e67bef4a9955@wh2.tu-dresden.de>
|
Python
|
bsd-3-clause
|
janLo/automation_mesh,janLo/automation_mesh,janLo/automation_mesh
|
b2bab786c4af3dcca7d35b1e6ecff8699e542ec4
|
pytest_girder/pytest_girder/plugin.py
|
pytest_girder/pytest_girder/plugin.py
|
from .fixtures import * # noqa
def pytest_addoption(parser):
group = parser.getgroup('girder')
group.addoption('--mock-db', action='store_true', default=False,
help='Whether or not to mock the database using mongomock.')
group.addoption('--mongo-uri', action='store', default='mongodb://localhost:27017',
help=('The base URI to the MongoDB instance to use for database connections, '
'default is mongodb://localhost:27017'))
group.addoption('--drop-db', action='store', default='both',
choices=('both', 'pre', 'post', 'never'),
help='When to destroy testing databases, default is both '
'(before and after running tests)')
|
import os
from .fixtures import * # noqa
def pytest_configure(config):
"""
Create the necessary directories for coverage. This is necessary because neither coverage nor
pytest-cov have support for making the data_file directory before running.
"""
covPlugin = config.pluginmanager.get_plugin('_cov')
if covPlugin is not None:
covPluginConfig = covPlugin.cov_controller.cov.config
covDataFileDir = os.path.dirname(covPluginConfig.data_file)
try:
os.makedirs(covDataFileDir)
except OSError:
pass
def pytest_addoption(parser):
group = parser.getgroup('girder')
group.addoption('--mock-db', action='store_true', default=False,
help='Whether or not to mock the database using mongomock.')
group.addoption('--mongo-uri', action='store', default='mongodb://localhost:27017',
help=('The base URI to the MongoDB instance to use for database connections, '
'default is mongodb://localhost:27017'))
group.addoption('--drop-db', action='store', default='both',
choices=('both', 'pre', 'post', 'never'),
help='When to destroy testing databases, default is both '
'(before and after running tests)')
|
Add a pytest hook for creating the coverage data_file directory
|
Add a pytest hook for creating the coverage data_file directory
|
Python
|
apache-2.0
|
jbeezley/girder,jbeezley/girder,girder/girder,kotfic/girder,jbeezley/girder,data-exp-lab/girder,Xarthisius/girder,data-exp-lab/girder,girder/girder,RafaelPalomar/girder,jbeezley/girder,girder/girder,kotfic/girder,manthey/girder,kotfic/girder,girder/girder,RafaelPalomar/girder,Xarthisius/girder,RafaelPalomar/girder,Xarthisius/girder,data-exp-lab/girder,manthey/girder,manthey/girder,RafaelPalomar/girder,data-exp-lab/girder,RafaelPalomar/girder,Kitware/girder,manthey/girder,data-exp-lab/girder,Xarthisius/girder,Kitware/girder,Xarthisius/girder,kotfic/girder,Kitware/girder,kotfic/girder,Kitware/girder
|
b1e6f3eacccb5e575ac47b6a40809f4671510672
|
rest_flex_fields/utils.py
|
rest_flex_fields/utils.py
|
try:
# Python 3
from collections.abc import Iterable
string_types = (str,)
except ImportError:
# Python 2
from collections import Iterable
string_types = (str, unicode)
def is_expanded(request, key):
""" Examines request object to return boolean of whether
passed field is expanded.
"""
expand = request.query_params.get("expand", "")
expand_fields = []
for e in expand.split(","):
expand_fields.extend([e for e in e.split(".")])
return "~all" in expand_fields or key in expand_fields
def split_levels(fields):
"""
Convert dot-notation such as ['a', 'a.b', 'a.d', 'c'] into
current-level fields ['a', 'c'] and next-level fields
{'a': ['b', 'd']}.
"""
first_level_fields = []
next_level_fields = {}
if not fields:
return first_level_fields, next_level_fields
assert (
isinstance(fields, Iterable)
), "`fields` must be iterable (e.g. list, tuple, or generator)"
if isinstance(fields, string_types):
fields = [a.strip() for a in fields.split(",") if a.strip()]
for e in fields:
if "." in e:
first_level, next_level = e.split(".", 1)
first_level_fields.append(first_level)
next_level_fields.setdefault(first_level, []).append(next_level)
else:
first_level_fields.append(e)
first_level_fields = list(set(first_level_fields))
return first_level_fields, next_level_fields
|
from collections.abc import Iterable
def is_expanded(request, key):
""" Examines request object to return boolean of whether
passed field is expanded.
"""
expand = request.query_params.get("expand", "")
expand_fields = []
for e in expand.split(","):
expand_fields.extend([e for e in e.split(".")])
return "~all" in expand_fields or key in expand_fields
def split_levels(fields):
"""
Convert dot-notation such as ['a', 'a.b', 'a.d', 'c'] into
current-level fields ['a', 'c'] and next-level fields
{'a': ['b', 'd']}.
"""
first_level_fields = []
next_level_fields = {}
if not fields:
return first_level_fields, next_level_fields
assert (
isinstance(fields, Iterable)
), "`fields` must be iterable (e.g. list, tuple, or generator)"
if isinstance(fields, str):
fields = [a.strip() for a in fields.split(",") if a.strip()]
for e in fields:
if "." in e:
first_level, next_level = e.split(".", 1)
first_level_fields.append(first_level)
next_level_fields.setdefault(first_level, []).append(next_level)
else:
first_level_fields.append(e)
first_level_fields = list(set(first_level_fields))
return first_level_fields, next_level_fields
|
Drop Python 2 support in split_level utility function
|
Drop Python 2 support in split_level utility function
|
Python
|
mit
|
rsinger86/drf-flex-fields
|
494f14a69d08e9bfd556fccc6b4e2319db129a38
|
books/models.py
|
books/models.py
|
from django.contrib.auth.models import User
from django.db import models
from django.db.models import fields
class Receipt(models.Model):
title = fields.CharField(max_length=255)
price = fields.DecimalField(max_digits=10, decimal_places=2)
user = models.ForeignKey(User)
def __str__(self):
return "{}_{}".format(self.title, self.price)
|
from django.contrib.auth.models import User
from django.db import models
from django.db.models import fields
from django.utils import timezone
class Receipt(models.Model):
title = fields.CharField(max_length=255)
price = fields.DecimalField(max_digits=10, decimal_places=2)
created = fields.DateTimeField(auto_now=True)
modified = fields.DateTimeField(default=timezone.now())
user = models.ForeignKey(User)
def __str__(self):
return "{}_{}".format(self.title, self.price)
|
Add created and modified fields to Receipt
|
Add created and modified fields to Receipt
|
Python
|
mit
|
trimailov/finance,trimailov/finance,trimailov/finance
|
b1547647deec6c1edf54c497fa4ed20235ea6902
|
pymodels/middlelayer/devices/__init__.py
|
pymodels/middlelayer/devices/__init__.py
|
from .dcct import DCCT
from .li_llrf import LiLLRF
from .rf import RF
from .sofb import SOFB
from .kicker import Kicker
from .septum import Septum
from .screen import Screen
from .bpm import BPM
from .ict import ICT
from .ict import TranspEff
from .egun import HVPS
from .egun import Filament
|
from .dcct import DCCT
from .li_llrf import LiLLRF
from .rf import RF
from .sofb import SOFB
from .kicker import Kicker
from .septum import Septum
from .screen import Screen
from .bpm import BPM
from .ict import ICT
from .ict import TranspEff
from .egun import Bias
from .egun import Filament
from .egun import HVPS
|
Add missing egun.bias in init
|
ENH: Add missing egun.bias in init
|
Python
|
mit
|
lnls-fac/sirius
|
5856e4daaf141e5bf9cdef438378a3757297f9c0
|
recipe_scrapers/wholefoods.py
|
recipe_scrapers/wholefoods.py
|
from ._abstract import AbstractScraper
class WholeFoods(AbstractScraper):
@classmethod
def host(self, domain="com"):
return f"www.wholefoodsmarket.{domain}"
|
from ._abstract import AbstractScraper
class WholeFoods(AbstractScraper):
@classmethod
def host(self, domain="com"):
return f"www.wholefoodsmarket.{domain}"
def title(self):
return self.schema.title()
def total_time(self):
return self.schema.total_time()
def yields(self):
return self.schema.yields()
def image(self):
return self.schema.image()
def ingredients(self):
return self.schema.ingredients()
def instructions(self):
return self.schema.instructions()
def ratings(self):
return self.schema.ratings()
|
Add wrapper methods for clarity.
|
Add wrapper methods for clarity.
|
Python
|
mit
|
hhursev/recipe-scraper
|
5f42f76ffd11e82d51a334b91d64723388ca4a0d
|
newswall/providers/feed.py
|
newswall/providers/feed.py
|
from datetime import datetime
import feedparser
import time
from newswall.providers.base import ProviderBase
class Provider(ProviderBase):
def update(self):
feed = feedparser.parse(self.config['source'])
for entry in feed['entries']:
self.create_story(entry.link,
title=entry.title,
body=entry.description,
timestamp=datetime.fromtimestamp(time.mktime(entry.date_parsed)),
)
|
"""
RSS Feed Provider
=================
Required configuration keys::
{
"provider": "newswall.providers.feed",
"source": "http://twitter.com/statuses/user_timeline/feinheit.rss"
}
"""
from datetime import datetime
import feedparser
import time
from newswall.providers.base import ProviderBase
class Provider(ProviderBase):
def update(self):
feed = feedparser.parse(self.config['source'])
for entry in feed['entries']:
self.create_story(entry.link,
title=entry.title,
body=entry.description,
timestamp=datetime.fromtimestamp(time.mktime(entry.date_parsed)),
)
|
Add RSS Feed Provider docs
|
Add RSS Feed Provider docs
|
Python
|
bsd-3-clause
|
michaelkuty/django-newswall,registerguard/django-newswall,matthiask/django-newswall,HerraLampila/django-newswall,registerguard/django-newswall,HerraLampila/django-newswall,michaelkuty/django-newswall,matthiask/django-newswall
|
931e2d1e8ba3fd6b129a6d74e3a1ad9984c1938a
|
benchmarks/benchmarks/bench_random.py
|
benchmarks/benchmarks/bench_random.py
|
from __future__ import absolute_import, division, print_function
from .common import Benchmark
import numpy as np
class Random(Benchmark):
params = ['normal', 'uniform', 'weibull 1', 'binomial 10 0.5',
'poisson 10']
def setup(self, name):
items = name.split()
name = items.pop(0)
params = [float(x) for x in items]
self.func = getattr(np.random, name)
self.params = tuple(params) + ((100, 100),)
def time_rng(self, name):
self.func(*self.params)
class Shuffle(Benchmark):
def setup(self):
self.a = np.arange(100000)
def time_100000(self):
np.random.shuffle(self.a)
|
from __future__ import absolute_import, division, print_function
from .common import Benchmark
import numpy as np
from numpy.lib import NumpyVersion
class Random(Benchmark):
params = ['normal', 'uniform', 'weibull 1', 'binomial 10 0.5',
'poisson 10']
def setup(self, name):
items = name.split()
name = items.pop(0)
params = [float(x) for x in items]
self.func = getattr(np.random, name)
self.params = tuple(params) + ((100, 100),)
def time_rng(self, name):
self.func(*self.params)
class Shuffle(Benchmark):
def setup(self):
self.a = np.arange(100000)
def time_100000(self):
np.random.shuffle(self.a)
class Randint(Benchmark):
def time_randint_fast(self):
"""Compare to uint32 below"""
np.random.randint(0, 2**30, size=10**5)
def time_randint_slow(self):
"""Compare to uint32 below"""
np.random.randint(0, 2**30 + 1, size=10**5)
class Randint_dtype(Benchmark):
high = {
'bool': 1,
'uint8': 2**7,
'uint16': 2**15,
'uint32': 2**31,
'uint64': 2**63
}
param_names = ['dtype']
params = ['bool', 'uint8', 'uint16', 'uint32', 'uint64']
def setup(self, name):
if NumpyVersion(np.__version__) < '1.11.0.dev0':
raise NotImplementedError
def time_randint_fast(self, name):
high = self.high[name]
np.random.randint(0, high, size=10**5, dtype=name)
def time_randint_slow(self, name):
high = self.high[name]
np.random.randint(0, high + 1, size=10**5, dtype=name)
|
Add benchmark tests for numpy.random.randint.
|
ENH: Add benchmark tests for numpy.random.randint.
This add benchmarks randint. There is one set of benchmarks for the
default dtype, 'l', that can be tracked back, and another set for the
new dtypes 'bool', 'uint8', 'uint16', 'uint32', and 'uint64'.
|
Python
|
bsd-3-clause
|
shoyer/numpy,Dapid/numpy,jakirkham/numpy,WarrenWeckesser/numpy,chatcannon/numpy,WarrenWeckesser/numpy,b-carter/numpy,anntzer/numpy,ssanderson/numpy,simongibbons/numpy,nbeaver/numpy,SiccarPoint/numpy,numpy/numpy,Eric89GXL/numpy,kiwifb/numpy,seberg/numpy,rgommers/numpy,ESSS/numpy,shoyer/numpy,anntzer/numpy,utke1/numpy,dwillmer/numpy,grlee77/numpy,ddasilva/numpy,charris/numpy,tacaswell/numpy,simongibbons/numpy,endolith/numpy,solarjoe/numpy,numpy/numpy,WarrenWeckesser/numpy,stuarteberg/numpy,SiccarPoint/numpy,mhvk/numpy,ahaldane/numpy,rgommers/numpy,bringingheavendown/numpy,anntzer/numpy,ContinuumIO/numpy,Eric89GXL/numpy,kiwifb/numpy,bringingheavendown/numpy,MSeifert04/numpy,solarjoe/numpy,ahaldane/numpy,jakirkham/numpy,maniteja123/numpy,anntzer/numpy,ssanderson/numpy,tacaswell/numpy,WarrenWeckesser/numpy,ContinuumIO/numpy,maniteja123/numpy,njase/numpy,jakirkham/numpy,maniteja123/numpy,drasmuss/numpy,tynn/numpy,shoyer/numpy,endolith/numpy,madphysicist/numpy,stuarteberg/numpy,madphysicist/numpy,jakirkham/numpy,abalkin/numpy,Dapid/numpy,pbrod/numpy,ContinuumIO/numpy,pdebuyl/numpy,pbrod/numpy,mattip/numpy,gmcastil/numpy,rherault-insa/numpy,stuarteberg/numpy,ESSS/numpy,njase/numpy,jonathanunderwood/numpy,jorisvandenbossche/numpy,gfyoung/numpy,b-carter/numpy,jorisvandenbossche/numpy,grlee77/numpy,jonathanunderwood/numpy,pizzathief/numpy,seberg/numpy,drasmuss/numpy,skwbc/numpy,skwbc/numpy,grlee77/numpy,Eric89GXL/numpy,AustereCuriosity/numpy,gfyoung/numpy,SiccarPoint/numpy,pbrod/numpy,rherault-insa/numpy,dwillmer/numpy,ddasilva/numpy,charris/numpy,simongibbons/numpy,chiffa/numpy,chatcannon/numpy,simongibbons/numpy,argriffing/numpy,mhvk/numpy,shoyer/numpy,njase/numpy,grlee77/numpy,pbrod/numpy,WarrenWeckesser/numpy,pizzathief/numpy,pizzathief/numpy,SiccarPoint/numpy,dwillmer/numpy,MSeifert04/numpy,MSeifert04/numpy,seberg/numpy,joferkington/numpy,MSeifert04/numpy,skwbc/numpy,joferkington/numpy,nbeaver/numpy,pdebuyl/numpy,abalkin/numpy,bertrand-l/numpy,madphysicist/numpy,pdebuyl/numpy,bertrand-l/numpy,rherault-insa/numpy,rgommers/numpy,gmcastil/numpy,dwillmer/numpy,tacaswell/numpy,drasmuss/numpy,seberg/numpy,chiffa/numpy,jakirkham/numpy,endolith/numpy,pbrod/numpy,mhvk/numpy,pdebuyl/numpy,mhvk/numpy,charris/numpy,argriffing/numpy,gfyoung/numpy,chatcannon/numpy,pizzathief/numpy,AustereCuriosity/numpy,stuarteberg/numpy,charris/numpy,MSeifert04/numpy,bringingheavendown/numpy,joferkington/numpy,shoyer/numpy,numpy/numpy,jorisvandenbossche/numpy,Dapid/numpy,simongibbons/numpy,mhvk/numpy,mattip/numpy,jorisvandenbossche/numpy,endolith/numpy,ESSS/numpy,behzadnouri/numpy,chiffa/numpy,kiwifb/numpy,argriffing/numpy,jorisvandenbossche/numpy,joferkington/numpy,behzadnouri/numpy,AustereCuriosity/numpy,utke1/numpy,tynn/numpy,grlee77/numpy,ssanderson/numpy,behzadnouri/numpy,madphysicist/numpy,mattip/numpy,Eric89GXL/numpy,ahaldane/numpy,jonathanunderwood/numpy,abalkin/numpy,ahaldane/numpy,madphysicist/numpy,solarjoe/numpy,utke1/numpy,gmcastil/numpy,ddasilva/numpy,numpy/numpy,tynn/numpy,b-carter/numpy,pizzathief/numpy,mattip/numpy,ahaldane/numpy,bertrand-l/numpy,rgommers/numpy,nbeaver/numpy
|
ca8e15d50b816c29fc2a0df27d0266826e38b5b8
|
cellcounter/statistics/serializers.py
|
cellcounter/statistics/serializers.py
|
from rest_framework.serializers import ModelSerializer
from .models import CountInstance
class CountInstanceSerializer(ModelSerializer):
class Meta:
model = CountInstance
|
from rest_framework.serializers import ModelSerializer
from .models import CountInstance
class CountInstanceSerializer(ModelSerializer):
class Meta:
model = CountInstance
fields = ('count_total',)
|
Update serializer to deal with new model
|
Update serializer to deal with new model
|
Python
|
mit
|
cellcounter/cellcounter,haematologic/cellcounter,cellcounter/cellcounter,cellcounter/cellcounter,haematologic/cellcounter,haematologic/cellcounter,cellcounter/cellcounter
|
3245946ff25889149dc60cf6b1364bd09c953809
|
faas/puzzleboard-pop/puzzleboard_pop.py
|
faas/puzzleboard-pop/puzzleboard_pop.py
|
import json
from datetime import datetime
import requests
from .model.puzzleboard import pop_puzzleboard
class HuntwordsPuzzleBoardPopCommand(object):
'''Command class that processes puzzleboard-pop message'''
def run(self, jreq):
'''Command that processes puzzleboard-pop message'''
req = json.loads(jreq)
pboard = pop_puzzleboard(req['puzzle'])
jpboard = json.dumps(dict(pboard))
resp = {
'puzzleboard': jpboard,
'processed': {
'at': f'{datetime.now().isoformat()}',
'status': 'ok'
}
}
send_consumed(pboard)
return json.dumps(resp)
def send_consumed(pboard):
'''Send async request to generate a new copy'''
url = '/async-function/puzzleboard-consumed'
data = f'{{"puzzle": "{pboard.puzzle.name}" }}'
requests.post(url, data)
|
import json
from datetime import datetime
import requests
from .model.puzzleboard import pop_puzzleboard
class HuntwordsPuzzleBoardPopCommand(object):
'''Command class that processes puzzleboard-pop message'''
def run(self, jreq):
'''Command that processes puzzleboard-pop message'''
req = json.loads(jreq)
pboard = pop_puzzleboard(req['puzzle'])
jpboard = json.dumps(dict(pboard))
resp = {
'puzzleboard': jpboard,
'processed': {
'at': f'{datetime.now().isoformat()}',
'status': 'ok'
}
}
send_consumed(pboard)
return json.dumps(resp)
def send_consumed(pboard):
'''Send async request to generate a new copy'''
url = 'http://puzzleboard-consumed.openfaas-fn:8080'
data = f'{{"puzzle": "{pboard.puzzle.name}" }}'
requests.post(url, data)
|
Change url from relative to internal service endpoint
|
Change url from relative to internal service endpoint
|
Python
|
mit
|
klmcwhirter/huntwords,klmcwhirter/huntwords,klmcwhirter/huntwords,klmcwhirter/huntwords
|
608dc0db688be1dabe3c6ba7647807f6697fcefe
|
tools/misc/python/test-data-in-out.py
|
tools/misc/python/test-data-in-out.py
|
# TOOL test-data-in-out.py: "Test data input and output in Python" (Data input output test.)
# INPUT input TYPE GENERIC
# OUTPUT output
# OUTPUT OPTIONAL missing_output.txt
import shutil
shutil.copyfile('input', 'output')
|
# TOOL test-data-in-out.py: "Test data input and output in Python" (Data input output test.)
# INPUT input TYPE GENERIC
# OUTPUT output
# OUTPUT OPTIONAL missing_output.txt
# IMAGE chipster-tools-python
import shutil
shutil.copyfile('input', 'output')
|
Test image definition in SADL
|
Test image definition in SADL
|
Python
|
mit
|
chipster/chipster-tools,chipster/chipster-tools,chipster/chipster-tools,chipster/chipster-tools
|
5548e32a32bd1cd5951ce50e74c0fad944a1cf04
|
ideascube/conf/idb_col_llavedelsaber.py
|
ideascube/conf/idb_col_llavedelsaber.py
|
"""Configuration for Llave Del Saber, Colombia"""
from .idb import * # noqa
from django.utils.translation import ugettext_lazy as _
LANGUAGE_CODE = 'es'
DOMAIN = 'bibliotecamovil.lan'
ALLOWED_HOSTS = ['.bibliotecamovil.lan', 'localhost']
USER_FORM_FIELDS = USER_FORM_FIELDS + (
(_('Personal informations'), ['extra', 'disabilities']),
)
USER_EXTRA_FIELD_LABEL = 'Etnicidad'
|
"""Configuration for Llave Del Saber, Colombia"""
from .idb import * # noqa
from django.utils.translation import ugettext_lazy as _
LANGUAGE_CODE = 'es'
DOMAIN = 'bibliotecamovil.lan'
ALLOWED_HOSTS = ['.bibliotecamovil.lan', 'localhost']
USER_FORM_FIELDS = USER_FORM_FIELDS + (
(_('Personal informations'), ['disabilities']),
)
|
Stop using the extra field for Colombia
|
Stop using the extra field for Colombia
After discussion, this is not something we will have in Ideascube.
Fixes #609
|
Python
|
agpl-3.0
|
ideascube/ideascube,ideascube/ideascube,ideascube/ideascube,ideascube/ideascube
|
c1b433e5ed4c06b956b4d27f6da4e8b1dab54aaf
|
services/cloudwatch/sample.py
|
services/cloudwatch/sample.py
|
'''
===================================
Boto 3 - CloudWatch Service Example
===================================
This application implements the CloudWatch service that lets you gets
information from Amazon Cloud Watch. See the README for more details.
'''
import boto3
'''
Define your AWS credentials:
'''
AWS_ACCESS_KEY_ID = 'AKIAJM7BQ4WBJJSVU2JQ'
AWS_SECRET_ACCESS_KEY = 'Fq9GmwWEsvbcdHuh4McD+ZUmfowPKrnzFmhczV2U'
'''
Connection to AWS.
'''
client = boto3.client('cloudwatch',
aws_access_key_id=AWS_ACCESS_KEY_ID,
aws_secret_access_key=AWS_SECRET_ACCESS_KEY)
# Main program:
if __name__ == '__main__':
print_results()
|
'''
===================================
Boto 3 - CloudWatch Service Example
===================================
This application implements the CloudWatch service that lets you gets
information from Amazon Cloud Watch. See the README for more details.
'''
import boto3
'''
Define your AWS credentials:
'''
AWS_ACCESS_KEY_ID = '<YOUR ACCESS KEY ID>'
AWS_SECRET_ACCESS_KEY = '<YOUR SECRET ACCESS KEY>'
'''
Connection to AWS.
'''
client = boto3.client('cloudwatch',
aws_access_key_id=AWS_ACCESS_KEY_ID,
aws_secret_access_key=AWS_SECRET_ACCESS_KEY)
# Main program:
if __name__ == '__main__':
print_results()
|
Fix issue in cloudwacth service credentials
|
Fix issue in cloudwacth service credentials
|
Python
|
mit
|
rolandovillca/aws_samples_boto3_sdk
|
a05a05f24c29dcf039e02b55c18c476dc69757df
|
shell_manager/problem_repo.py
|
shell_manager/problem_repo.py
|
"""
Problem repository management for the shell manager.
"""
import spur, gzip
from shutil import copy2
from os.path import join
def local_update(repo_path, deb_paths=[]):
"""
Updates a local deb repository by copying debs and running scanpackages.
Args:
repo_path: the path to the local repository.
dep_paths: list of problem deb paths to copy.
"""
[copy2(deb_path, repo_path) for deb_path in deb_paths]
shell = spur.LocalShell()
result = shell.run(["dpkg-scanpackages", ".", "/dev/null"], cwd=repo_path)
packages_path = join(repo_path, "Packages.gz")
with gzip.open(packages_path, "wb") as packages:
packages.write(result.output)
print("Updated problem repository.")
|
"""
Problem repository management for the shell manager.
"""
import spur, gzip
from shutil import copy2
from os.path import join
def update_repo(args):
"""
Main entrypoint for repo update operations.
"""
if args.repo_type == "local":
local_update(args.repository, args.package_paths)
else:
remote_update(args.repository, args.package_paths)
def remote_update(repo_ui, deb_paths=[]):
"""
Pushes packages to a remote deb repository.
Args:
repo_uri: location of the repository.
deb_paths: list of problem deb paths to copy.
"""
pass
def local_update(repo_path, deb_paths=[]):
"""
Updates a local deb repository by copying debs and running scanpackages.
Args:
repo_path: the path to the local repository.
dep_paths: list of problem deb paths to copy.
"""
[copy2(deb_path, repo_path) for deb_path in deb_paths]
shell = spur.LocalShell()
result = shell.run(["dpkg-scanpackages", ".", "/dev/null"], cwd=repo_path)
packages_path = join(repo_path, "Packages.gz")
with gzip.open(packages_path, "wb") as packages:
packages.write(result.output)
print("Updated problem repository.")
|
Update repo entrypoint and remote_update stub.
|
Update repo entrypoint and remote_update stub.
|
Python
|
mit
|
RitwikGupta/picoCTF-shell-manager,cganas/picoCTF-shell-manager,RitwikGupta/picoCTF-shell-manager,cganas/picoCTF-shell-manager,picoCTF/picoCTF-shell-manager,cganas/picoCTF-shell-manager,cganas/picoCTF-shell-manager,RitwikGupta/picoCTF-shell-manager,picoCTF/picoCTF-shell-manager,picoCTF/picoCTF-shell-manager,picoCTF/picoCTF-shell-manager,RitwikGupta/picoCTF-shell-manager
|
6f7dba3beccca655b84879ccd0f3071d15536b2f
|
test/utils.py
|
test/utils.py
|
# coding: utf-8
import string
import random
def generate_string(str_len=6, src=string.ascii_lowercase):
return "".join(random.choice(src) for x in xrange(str_len))
def lorem_ipsum():
words_count = random.randint(20, 50)
lorem = list([])
for i in xrange(words_count):
word_length = random.randint(4, 8)
lorem.append(generate_string(str_len=word_length))
return " ".join(lorem)
|
# coding: utf-8
import string
import random
def generate_string(str_len=6, src=string.ascii_lowercase):
return "".join(random.choice(src) for x in xrange(str_len))
def lorem_ipsum(words_count=30):
lorem = list([])
for i in xrange(words_count):
word_length = random.randint(4, 8)
lorem.append(generate_string(str_len=word_length))
return " ".join(lorem)
|
Add word_count parameter for lorem_ipsum generator
|
Add word_count parameter for lorem_ipsum generator
|
Python
|
mit
|
sarutobi/Rynda,sarutobi/flowofkindness,sarutobi/ritmserdtsa,sarutobi/ritmserdtsa,sarutobi/flowofkindness,sarutobi/Rynda,sarutobi/ritmserdtsa,sarutobi/ritmserdtsa,sarutobi/Rynda,sarutobi/flowofkindness,sarutobi/flowofkindness,sarutobi/Rynda
|
d80f7a89b5bc23802ad5ec9bb8cc6ad523976718
|
test_gitnl.py
|
test_gitnl.py
|
from __future__ import print_function, division, absolute_import
import unittest
import gitnl
class GitnlTestCase(unittest.TestCase):
"""Tests from 'gitnl.py'."""
def test_push_remotename_branchfrom(self):
desired = 'push remotename branchfrom'
actual = gitnl.parse_to_git('push my branch branchfrom to a remote called remotename')
self.assertEqual(actual, desired)
if __name__ == '__main__':
unittest.main()
|
from __future__ import print_function, division, absolute_import
import unittest
import gitnl
class GitnlTestCase(unittest.TestCase):
"""Tests from 'gitnl.py'."""
def test_push_remotename_branchfrom(self):
desired = 'push remotename branchfrom'
actual = gitnl.parse_to_git('push my branch branchfrom to a remote called remotename')
self.assertEqual(actual, desired)
def test_rename_branch(self):
desired = 'branch -m old_branch new_branch'
actual = gitnl.parse_to_git('branch rename branch old_branch to new_branch')
self.assertEqual(actual, desired)
if __name__ == '__main__':
unittest.main()
|
Add rename branch locally test
|
Add rename branch locally test
|
Python
|
mit
|
eteq/gitnl,eteq/gitnl
|
fb213097e838ddfa40d9f71f1705d7af661cfbdf
|
tests/unit.py
|
tests/unit.py
|
# -*- coding: latin-1 -*-
import unittest
from github2.issues import Issue
from github2.client import Github
class ReprTests(unittest.TestCase):
"""__repr__ must return strings, not unicode objects."""
def test_issue(self):
"""Issues can have non-ASCII characters in the title."""
i = Issue(title=u'abcdé')
self.assertEqual(str, type(repr(i)))
class RateLimits(unittest.TestCase):
"""
How should we handle actual API calls such that tests can run?
Perhaps the library should support a ~/.python_github2.conf from which to
get the auth?
"""
def test_delays(self):
import datetime
USERNAME = ''
API_KEY = ''
client = Github(username=USERNAME, api_token=API_KEY,
requests_per_second=.5)
client.users.show('defunkt')
start = datetime.datetime.now()
client.users.show('mojombo')
end = datetime.datetime.now()
self.assertGreaterEqual((end - start).total_seconds(), 2.0,
"Expected .5 reqs per second to require a 2 second delay between "
"calls.")
|
# -*- coding: latin-1 -*-
import unittest
from github2.issues import Issue
from github2.client import Github
class ReprTests(unittest.TestCase):
"""__repr__ must return strings, not unicode objects."""
def test_issue(self):
"""Issues can have non-ASCII characters in the title."""
i = Issue(title=u'abcdé')
self.assertEqual(str, type(repr(i)))
class RateLimits(unittest.TestCase):
"""
How should we handle actual API calls such that tests can run?
Perhaps the library should support a ~/.python_github2.conf from which to
get the auth?
"""
def test_delays(self):
import datetime
USERNAME = ''
API_KEY = ''
client = Github(username=USERNAME, api_token=API_KEY,
requests_per_second=.5)
client.users.show('defunkt')
start = datetime.datetime.now()
client.users.show('mojombo')
end = datetime.datetime.now()
delta = end - start
delta_seconds = delta.days * 24 * 60 * 60 + delta.seconds
self.assertTrue(delta_seconds >= 2,
"Expected .5 reqs per second to require a 2 second delay between "
"calls.")
|
Allow tests to be run with Python <2.6.
|
Allow tests to be run with Python <2.6.
|
Python
|
bsd-3-clause
|
ask/python-github2
|
4be7f694220ee969683f07b982f8fcbe61971a04
|
hairball/plugins/duplicate.py
|
hairball/plugins/duplicate.py
|
"""This module provides plugins for basic duplicate code detection."""
from hairball.plugins import HairballPlugin
class DuplicateScripts(HairballPlugin):
"""Plugin that keeps track of which scripts have been
used more than once whithin a project."""
def __init__(self):
super(DuplicateScripts, self).__init__()
self.total_duplicate = 0
self.list_duplicate = []
def finalize(self):
"""Output the duplicate scripts detected."""
if self.total_duplicate > 0:
print("%d duplicate scripts found" % self.total_duplicate)
for duplicate in self.list_duplicate:
print duplicate
def analyze(self, scratch):
"""Run and return the results from the DuplicateChecks plugin."""
scripts_set = set()
for script in self.iter_scripts(scratch):
blocks_list = []
for name, _, _ in self.iter_blocks(script.blocks):
blocks_list.append(name)
blocks_tuple = tuple(blocks_list)
if blocks_tuple in scripts_set:
if len(blocks_list)>3:
self.total_duplicate += 1
self.list_duplicate.append(blocks_list)
else:
scripts_set.add(blocks_tuple)
|
"""This module provides plugins for basic duplicate code detection."""
from hairball.plugins import HairballPlugin
class DuplicateScripts(HairballPlugin):
"""Plugin that keeps track of which scripts have been
used more than once whithin a project."""
def __init__(self):
super(DuplicateScripts, self).__init__()
self.total_duplicate = 0
self.list_duplicate = []
def finalize(self):
"""Output the duplicate scripts detected."""
if self.total_duplicate > 0:
print("%d duplicate scripts found" % self.total_duplicate)
for duplicate in self.list_duplicate:
print duplicate
def analyze(self, scratch):
"""Run and return the results from the DuplicateChecks plugin.
Only takes into account scripts with more than 3 blocks"""
scripts_set = set()
for script in self.iter_scripts(scratch):
blocks_list = []
for name, _, _ in self.iter_blocks(script.blocks):
blocks_list.append(name)
blocks_tuple = tuple(blocks_list)
if blocks_tuple in scripts_set:
if len(blocks_list)>3:
self.total_duplicate += 1
self.list_duplicate.append(blocks_list)
else:
scripts_set.add(blocks_tuple)
|
Add comment to explain the length of the scripts taken into account in DuplicateScripts
|
Add comment to explain the length of the scripts taken into account in DuplicateScripts
|
Python
|
bsd-2-clause
|
ucsb-cs-education/hairball,jemole/hairball,thsunmy/hairball,jemole/hairball,ucsb-cs-education/hairball,thsunmy/hairball
|
15996286496d913c25290362ba2dba2d349bd5f6
|
imageManagerUtils/settings.py
|
imageManagerUtils/settings.py
|
# Copyright (c) 2017, MIT Licensed, Medicine Yeh
# This file helps to read settings from bash script into os.environ
import os
import sys
import subprocess
# This path is the location of the caller script
MAIN_SCRIPT_PATH = os.path.dirname(os.path.abspath(sys.argv[0]))
# Set up the path to settings.sh
settings_path = os.path.join(MAIN_SCRIPT_PATH, 'settings.sh')
if not os.path.isfile(settings_path):
print('Cannot find settings.sh in ' + MAIN_SCRIPT_PATH)
exit(1)
# This is a tricky way to read bash envs in the script
env_str = subprocess.check_output('source {} && env'.format(settings_path), shell=True)
# Transform to list of python strings (utf-8 encodings)
env_str = env_str.decode('utf-8').split('\n')
# Transform from a list to a list of pairs and filter out invalid formats
env_list = [kv.split('=') for kv in env_str if len(kv.split('=')) == 2]
# Transform from a list to a dictionary
env_dict = {kv[0]: kv[1] for kv in env_list}
# Update the os.environ globally
os.environ.update(env_dict)
|
# Copyright (c) 2017, MIT Licensed, Medicine Yeh
# This file helps to read settings from bash script into os.environ
import os
import sys
import subprocess
# This path is the location of the caller script
MAIN_SCRIPT_PATH = os.path.dirname(os.path.abspath(sys.argv[0]))
# Set up the path to settings.sh
settings_path = os.path.join(MAIN_SCRIPT_PATH, 'settings.sh')
if not os.path.isfile(settings_path):
print('Cannot find settings.sh in ' + MAIN_SCRIPT_PATH)
exit(1)
# This is a tricky way to read bash envs in the script
env_str = subprocess.check_output('source {} && env'.format(settings_path), shell=True, executable='/bin/bash')
# Transform to list of python strings (utf-8 encodings)
env_str = env_str.decode('utf-8').split('\n')
# Transform from a list to a list of pairs and filter out invalid formats
env_list = [kv.split('=') for kv in env_str if len(kv.split('=')) == 2]
# Transform from a list to a dictionary
env_dict = {kv[0]: kv[1] for kv in env_list}
# Update the os.environ globally
os.environ.update(env_dict)
|
Fix bug of invoking /bin/sh on several OSs
|
Fix bug of invoking /bin/sh on several OSs
|
Python
|
mit
|
snippits/qemu_image,snippits/qemu_image,snippits/qemu_image
|
c027e671d1a47d485755b748f2dffc202c704ff8
|
goodreadsapi.py
|
goodreadsapi.py
|
#!/usr/bin/env python
import re
from xml.parsers.expat import ExpatError
import requests
import xmltodict
from settings import goodreads_api_key
def get_goodreads_ids(comment_msg):
# receives goodreads url
# returns the id using regex
regex = r'goodreads.com/book/show/(\d+)'
return set(re.findall(regex, comment_msg))
def get_book_details_by_id(goodreads_id):
api_url = 'http://goodreads.com/book/show/{0}?format=xml&key={1}'
r = requests.get(api_url.format(goodreads_id, goodreads_api_key))
try:
book_data = xmltodict.parse(r.content)['GoodreadsResponse']['book']
except (TypeError, KeyError, ExpatError):
return False
keys = ['title', 'average_rating', 'ratings_count', 'description',
'num_pages', 'publication_year']
book = {}
for k in keys:
book[k] = book_data.get(k)
if type(book_data['authors']['author']) == list:
authors = [author['name'] for author in book_data['authors']['author']]
authors = ', '.join(authors)
else:
authors = book_data['authors']['author']['name']
book['authors'] = authors
return book
|
#!/usr/bin/env python
import re
from xml.parsers.expat import ExpatError
import requests
import xmltodict
from settings import goodreads_api_key
def get_goodreads_ids(comment_msg):
# receives goodreads url
# returns the id using regex
regex = r'goodreads.com/book/show/(\d+)'
return set(re.findall(regex, comment_msg))
def get_book_details_by_id(goodreads_id):
api_url = 'http://goodreads.com/book/show/{0}?format=xml&key={1}'
r = requests.get(api_url.format(goodreads_id, goodreads_api_key))
try:
book_data = xmltodict.parse(r.content)['GoodreadsResponse']['book']
except (TypeError, KeyError, ExpatError):
return False
keys = ['title', 'average_rating', 'ratings_count', 'description',
'num_pages']
book = {}
for k in keys:
book[k] = book_data.get(k)
try:
work = book_data['work']
book['publication_year'] = work['original_publication_year']['#text']
except KeyError:
book['publication_year'] = book_data.get('publication_year')
if type(book_data['authors']['author']) == list:
authors = [author['name'] for author in book_data['authors']['author']]
authors = ', '.join(authors)
else:
authors = book_data['authors']['author']['name']
book['authors'] = authors
return book
|
Update goodreads API to `show original_publication_year`
|
Update goodreads API to `show original_publication_year`
|
Python
|
mit
|
avinassh/Reddit-GoodReads-Bot
|
59b015bb3e45497b7ec86bf1799e8442a30b65da
|
py/PMUtil.py
|
py/PMUtil.py
|
# PMUtil.py
# Phenotype microarray utility functions
#
# Author: Daniel A Cuevas
# Created on 27 Jan. 2015
# Updated on 27 Jan. 2015
from __future__ import absolute_import, division, print_function
import sys
import time
import datetime
def timeStamp():
'''Return time stamp'''
t = time.time()
fmt = '[%Y-%m-%d %H:%M:%S]'
return datetime.datetime.fromtimestamp(t).strftime(fmt)
def printStatus(msg):
'''Print status message'''
print('{} {}'.format(timeStamp(), msg), file=sys.stderr)
sys.stderr.flush()
|
# PMUtil.py
# Phenotype microarray utility functions
#
# Author: Daniel A Cuevas
# Created on 27 Jan 2015
# Updated on 20 Aug 2015
from __future__ import absolute_import, division, print_function
import sys
import time
import datetime
def timeStamp():
'''Return time stamp'''
t = time.time()
fmt = '[%Y-%m-%d %H:%M:%S]'
return datetime.datetime.fromtimestamp(t).strftime(fmt)
def printStatus(msg):
'''Print status message'''
print('{} {}'.format(timeStamp(), msg), file=sys.stderr)
sys.stderr.flush()
def exitScript(num=1):
'''Exit script'''
sys.exit(num)
|
Exit method. - (New) Added exit method.
|
Exit method.
- (New) Added exit method.
|
Python
|
mit
|
dacuevas/PMAnalyzer,dacuevas/PMAnalyzer,dacuevas/PMAnalyzer,dacuevas/PMAnalyzer
|
a8976ff1c3bdc177ca72becf48c4278f963d2627
|
gtr/__init__.py
|
gtr/__init__.py
|
__all__ = [
"gtr.services.funds.Funds",
"gtr.services.organisations.Organisations",
"gtr.services.persons.Persons",
"gtr.services.projects.Projects"
]
__version__ = "0.1.0"
from gtr.services.base import _Service
from gtr.services.funds import Funds
from gtr.services.organisations import Organisations
from gtr.services.persons import Persons
from gtr.services.projects import Projects
|
__all__ = [
"gtr.services.funds.Funds",
"gtr.services.organisations.Organisations",
"gtr.services.persons.Persons",
"gtr.services.projects.Projects",
"gtr.services.publications.Publications"
]
__version__ = "0.1.0"
from gtr.services.base import _Service
from gtr.services.funds import Funds
from gtr.services.organisations import Organisations
from gtr.services.persons import Persons
from gtr.services.projects import Projects
from gtr.services.publications import Publications
|
Add Publications class to initialisation
|
Add Publications class to initialisation
|
Python
|
apache-2.0
|
nestauk/gtr
|
63a26cbf76a3d0135f5b67dd10cc7f383ffa7ebf
|
helusers/jwt.py
|
helusers/jwt.py
|
from django.conf import settings
from rest_framework_jwt.authentication import JSONWebTokenAuthentication
from rest_framework_jwt.settings import api_settings
from .user_utils import get_or_create_user
def patch_jwt_settings():
"""Patch rest_framework_jwt authentication settings from allauth"""
defaults = api_settings.defaults
defaults['JWT_PAYLOAD_GET_USER_ID_HANDLER'] = (
__name__ + '.get_user_id_from_payload_handler')
if 'allauth.socialaccount' not in settings.INSTALLED_APPS:
return
from allauth.socialaccount.models import SocialApp
try:
app = SocialApp.objects.get(provider='helsinki')
except SocialApp.DoesNotExist:
return
defaults['JWT_SECRET_KEY'] = app.secret
defaults['JWT_AUDIENCE'] = app.client_id
# Disable automatic settings patching for now because it breaks Travis.
# patch_jwt_settings()
class JWTAuthentication(JSONWebTokenAuthentication):
def authenticate_credentials(self, payload):
return get_or_create_user(payload)
def get_user_id_from_payload_handler(payload):
return payload.get('sub')
|
from django.conf import settings
from rest_framework import exceptions
from rest_framework_jwt.authentication import JSONWebTokenAuthentication
from rest_framework_jwt.settings import api_settings
from .user_utils import get_or_create_user
def patch_jwt_settings():
"""Patch rest_framework_jwt authentication settings from allauth"""
defaults = api_settings.defaults
defaults['JWT_PAYLOAD_GET_USER_ID_HANDLER'] = (
__name__ + '.get_user_id_from_payload_handler')
if 'allauth.socialaccount' not in settings.INSTALLED_APPS:
return
from allauth.socialaccount.models import SocialApp
try:
app = SocialApp.objects.get(provider='helsinki')
except SocialApp.DoesNotExist:
return
defaults['JWT_SECRET_KEY'] = app.secret
defaults['JWT_AUDIENCE'] = app.client_id
# Disable automatic settings patching for now because it breaks Travis.
# patch_jwt_settings()
class JWTAuthentication(JSONWebTokenAuthentication):
def authenticate_credentials(self, payload):
user = super().authenticate_credentials(payload)
if user and not user.is_active:
msg = _('User account is disabled.')
raise exceptions.AuthenticationFailed(msg)
return get_or_create_user(payload)
def get_user_id_from_payload_handler(payload):
return payload.get('sub')
|
Change authenticate_credentials method to raise an exception if the account is disabled
|
Change authenticate_credentials method to raise an exception if the account is disabled
|
Python
|
bsd-2-clause
|
City-of-Helsinki/django-helusers,City-of-Helsinki/django-helusers
|
764f8d9d7818076555cde5fcad29f3052b523771
|
company/autocomplete_light_registry.py
|
company/autocomplete_light_registry.py
|
import autocomplete_light
from .models import Company
class CompanyAutocomplete(autocomplete_light.AutocompleteModelBase):
search_fields = ['^name']
model = Company
autocomplete_light.register(CompanyAutocomplete)
|
import autocomplete_light
from .models import Company
class CompanyAutocomplete(autocomplete_light.AutocompleteModelBase):
search_fields = ['name', 'official_name', 'common_name']
model = Company
autocomplete_light.register(CompanyAutocomplete)
|
Add more search fields to autocomplete
|
Add more search fields to autocomplete
|
Python
|
bsd-3-clause
|
KlubJagiellonski/pola-backend,KlubJagiellonski/pola-backend,KlubJagiellonski/pola-backend,KlubJagiellonski/pola-backend
|
a06010fcb2f4424d085da1487a6666867a8cbf5b
|
dbaas/maintenance/admin/maintenance.py
|
dbaas/maintenance/admin/maintenance.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django_services import admin
from ..models import Maintenance
from ..service.maintenance import MaintenanceService
class MaintenanceAdmin(admin.DjangoServicesAdmin):
service_class = MaintenanceService
search_fields = ("scheduled_for", "description", "maximum_workers", 'status')
list_display = ("scheduled_for", "description", "maximum_workers", 'status')
fields = ( "description", "scheduled_for", "main_script", "rollback_script",
"host_query","maximum_workers", "status", "celery_task_id",)
save_on_top = True
readonly_fields = ('status', 'celery_task_id')
def change_view(self, request, object_id, form_url='', extra_context=None):
maintenance = Maintenance.objects.get(id=object_id)
if maintenance.celery_task_id:
self.readonly_fields = self.fields
return super(MaintenanceAdmin, self).change_view(request,
object_id, form_url, extra_context=extra_context)
def add_view(self, request, form_url='', extra_context=None):
return super(MaintenanceAdmin, self).add_view(request, form_url,
extra_context)
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django_services import admin
from ..models import Maintenance
from ..service.maintenance import MaintenanceService
from ..forms import MaintenanceForm
class MaintenanceAdmin(admin.DjangoServicesAdmin):
service_class = MaintenanceService
search_fields = ("scheduled_for", "description", "maximum_workers", 'status')
list_display = ("scheduled_for", "description", "maximum_workers", 'status')
fields = ( "description", "scheduled_for", "main_script", "rollback_script",
"host_query","maximum_workers", "status", "celery_task_id",)
save_on_top = True
readonly_fields = ('status', 'celery_task_id')
form = MaintenanceForm
def change_view(self, request, object_id, form_url='', extra_context=None):
maintenance = Maintenance.objects.get(id=object_id)
if maintenance.celery_task_id:
self.readonly_fields = self.fields
return super(MaintenanceAdmin, self).change_view(request,
object_id, form_url, extra_context=extra_context)
|
Remove add_view and add form for the hole admin
|
Remove add_view and add form for the hole admin
|
Python
|
bsd-3-clause
|
globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service
|
6f822cf46957d038588e7a71eb91f8ca9f9c95f1
|
scaffolder/commands/install.py
|
scaffolder/commands/install.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from optparse import make_option
from optparse import OptionParser
from scaffolder.core.template import TemplateManager
from scaffolder.core.commands import BaseCommand
class InstallCommand(BaseCommand):
option_list = BaseCommand.option_list + (
make_option(
"-t",
"--target",
dest="target_dir",
default='~/.cookiejar',
help='Project Templates directory.',
metavar="TEMPLATES_DIR"
),
)
def __init__(self, name, help='', aliases=(), stdout=None, stderr=None):
help = 'install: Installs a Project Template.'
parser = OptionParser(
version=self.get_version(),
option_list=self.get_option_list(),
usage='\n %prog {0} ACTION [OPTIONS]'.format(name)
)
aliases = ('tmp',)
BaseCommand.__init__(self, name, parser=parser, help=help, aliases=aliases)
def run(self, *args, **options):
src = args[0]
tgt = options.get('target_dir')
manager = TemplateManager()
manager.install(src=src, dest=tgt)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from optparse import make_option
from optparse import OptionParser
from scaffolder import get_minion_path
from scaffolder.core.template import TemplateManager
from scaffolder.core.commands import BaseCommand
class InstallCommand(BaseCommand):
option_list = BaseCommand.option_list + (
make_option(
"-t",
"--target",
dest="target_dir",
default=get_minion_path('weaver'),
help='Project Templates directory.',
metavar="TEMPLATES_DIR"
),
)
def __init__(self, name, help='', aliases=(), stdout=None, stderr=None):
help = 'install: Installs a Project Template.'
parser = OptionParser(
version=self.get_version(),
option_list=self.get_option_list(),
usage='\n %prog {0} ACTION [OPTIONS]'.format(name)
)
aliases = ('tmp',)
BaseCommand.__init__(self, name, parser=parser, help=help, aliases=aliases)
def run(self, *args, **options):
src = args[0]
tgt = options.get('target_dir')
manager = TemplateManager()
manager.install(src=src, dest=tgt)
|
Use get_minion_path to get default dir.
|
InstallCommand: Use get_minion_path to get default dir.
|
Python
|
mit
|
goliatone/minions
|
95d9bb3a9500d80b5064c5fb4d5bd7b30406d1ae
|
conanfile.py
|
conanfile.py
|
from conans import ConanFile, CMake
class GrpccbConan(ConanFile):
name = "grpc_cb_core"
version = "0.2"
license = "Apache-2.0"
url = "https://github.com/jinq0123/grpc_cb_core"
description = "C++ gRPC core library with callback interface."
settings = "os", "compiler", "build_type", "arch"
options = {"shared": [True, False]}
default_options = "shared=False"
requires = "grpc/1.17.2@inexorgame/stable",
generators = "cmake", "Premake" # A custom generator: PremakeGen/0.1@memsharded/testing
build_requires = "PremakeGen/0.1@memsharded/testing"
exports_sources = "src*", "include*", "CMakeLists.txt"
def build(self):
cmake = CMake(self)
self.run('cmake %s %s' % (self.source_folder, cmake.command_line))
self.run("cmake --build . %s" % cmake.build_config)
def package(self):
self.copy("include/*")
self.copy("*.lib", dst="lib", keep_path=False)
self.copy("*.dll", dst="bin", keep_path=False)
self.copy("*.dylib*", dst="lib", keep_path=False)
self.copy("*.so", dst="lib", keep_path=False)
self.copy("*.a", dst="lib", keep_path=False)
def package_info(self):
self.cpp_info.libs = ["grpc_cb_core"]
|
from conans import ConanFile, CMake
class GrpccbConan(ConanFile):
name = "grpc_cb_core"
version = "0.2"
license = "Apache-2.0"
url = "https://github.com/jinq0123/grpc_cb_core"
description = "C++ gRPC core library with callback interface."
settings = "os", "compiler", "build_type", "arch"
options = {"shared": [True, False]}
default_options = "shared=False"
requires = "grpc/1.44.0@",
generators = "cmake", "premake" # The builtin premake generator
exports_sources = "src*", "include*", "CMakeLists.txt"
def build(self):
cmake = CMake(self)
self.run('cmake %s %s' % (self.source_folder, cmake.command_line))
self.run("cmake --build . %s" % cmake.build_config)
def package(self):
self.copy("include/*")
self.copy("*.lib", dst="lib", keep_path=False)
self.copy("*.dll", dst="bin", keep_path=False)
self.copy("*.dylib*", dst="lib", keep_path=False)
self.copy("*.so", dst="lib", keep_path=False)
self.copy("*.a", dst="lib", keep_path=False)
def package_info(self):
self.cpp_info.libs = ["grpc_cb_core"]
|
Fix update remote to ConanCenter and grpc to highest buildable/supported version
|
Fix update remote to ConanCenter and grpc to highest buildable/supported version
|
Python
|
apache-2.0
|
jinq0123/grpc_cb_core,jinq0123/grpc_cb_core,jinq0123/grpc_cb_core
|
c13a12e6355423d6756b8b514942596c31b0e3a9
|
conanfile.py
|
conanfile.py
|
from conans import ConanFile
from conans.tools import download, unzip
import os
VERSION = "0.0.7"
class CMakeModuleCommonConan(ConanFile):
name = "cmake-module-common"
version = os.environ.get("CONAN_VERSION_OVERRIDE", VERSION)
generators = "cmake"
url = "http://github.com/polysquare/cmake-module-common"
license = "MIT"
def source(self):
zip_name = "cmake-module-common.zip"
download("https://github.com/polysquare/"
"cmake-module-common/archive/{version}.zip"
"".format(version="v" + VERSION),
zip_name)
unzip(zip_name)
os.unlink(zip_name)
def package(self):
self.copy(pattern="Find*.cmake",
dst="",
src="cmake-module-common-" + VERSION,
keep_path=True)
self.copy(pattern="*.cmake",
dst="cmake/cmake-module-common",
src="cmake-module-common-" + VERSION,
keep_path=True)
|
from conans import ConanFile
from conans.tools import download, unzip
import os
VERSION = "0.0.7"
class CMakeModuleCommonConan(ConanFile):
name = "cmake-module-common"
version = os.environ.get("CONAN_VERSION_OVERRIDE", VERSION)
generators = "cmake"
url = "http://github.com/polysquare/cmake-module-common"
license = "MIT"
requires = ("cmake-unit/master@smspillaz/cmake-unit",
"cmake-linter-cmake/master@smspillaz/cmake-linter-cmake",
"style-linter-cmake/master@smspillaz/style-linter-cmake")
def source(self):
zip_name = "cmake-module-common.zip"
download("https://github.com/polysquare/"
"cmake-module-common/archive/{version}.zip"
"".format(version="v" + VERSION),
zip_name)
unzip(zip_name)
os.unlink(zip_name)
def package(self):
self.copy(pattern="Find*.cmake",
dst="",
src="cmake-module-common-" + VERSION,
keep_path=True)
self.copy(pattern="*.cmake",
dst="cmake/cmake-module-common",
src="cmake-module-common-" + VERSION,
keep_path=True)
|
Make cmake-unit, cmake-linter-cmake and style-linter-cmake normal deps
|
conan: Make cmake-unit, cmake-linter-cmake and style-linter-cmake normal deps
|
Python
|
mit
|
polysquare/cmake-module-common
|
306e6939c5b369f4a4ef4bb4d16948dc1f027f53
|
tests/test_initial_ismaster.py
|
tests/test_initial_ismaster.py
|
# Copyright 2015 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
from mockupdb import MockupDB, wait_until
from pymongo import MongoClient
from tests import unittest
class TestInitialIsMaster(unittest.TestCase):
def test_initial_ismaster(self):
server = MockupDB()
server.run()
self.addCleanup(server.stop)
start = time.time()
client = MongoClient(server.uri)
self.addCleanup(client.close)
# A single ismaster is enough for the client to be connected.
self.assertIsNone(client.address)
server.receives('ismaster').ok()
wait_until(lambda: client.address is not None,
'update address', timeout=1)
# At least 10 seconds before next heartbeat.
server.receives('ismaster').ok()
self.assertGreaterEqual(time.time() - start, 10)
if __name__ == '__main__':
unittest.main()
|
# Copyright 2015 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
from mockupdb import MockupDB, wait_until
from pymongo import MongoClient
from tests import unittest
class TestInitialIsMaster(unittest.TestCase):
def test_initial_ismaster(self):
server = MockupDB()
server.run()
self.addCleanup(server.stop)
start = time.time()
client = MongoClient(server.uri)
self.addCleanup(client.close)
# A single ismaster is enough for the client to be connected.
self.assertFalse(client.nodes)
server.receives('ismaster').ok(ismaster=True)
wait_until(lambda: client.nodes,
'update nodes', timeout=1)
# At least 10 seconds before next heartbeat.
server.receives('ismaster').ok(ismaster=True)
self.assertGreaterEqual(time.time() - start, 10)
if __name__ == '__main__':
unittest.main()
|
Update for PYTHON 985: MongoClient properties now block until connected.
|
Update for PYTHON 985: MongoClient properties now block until connected.
|
Python
|
apache-2.0
|
ajdavis/pymongo-mockup-tests
|
af5e90cb544e2e37819302f5750084fc17f7ee12
|
make_example.py
|
make_example.py
|
#!/usr/bin/env python
import os
import sys
import yaml
import subprocess
class SDBUSPlus(object):
def __init__(self, path):
self.path = path
def __call__(self, *a, **kw):
args = [
os.path.join(self.path, 'sdbus++'),
'-t',
os.path.join(self.path, 'templates')
]
subprocess.call(args + list(a), **kw)
if __name__ == '__main__':
sdbusplus = None
for p in os.environ.get('PATH', "").split(os.pathsep):
if os.path.exists(os.path.join(p, 'sdbus++')):
sdbusplus = SDBUSPlus(p)
break
if sdbusplus is None:
sys.stderr.write('Cannot find sdbus++\n')
sys.exit(1)
genfiles = {
'server-cpp': lambda x: '%s.cpp' % x,
'server-header': lambda x: os.path.join(
os.path.join(*x.split('.')), 'server.hpp')
}
with open(os.path.join('example', 'interfaces.yaml'), 'r') as fd:
interfaces = yaml.load(fd.read())
for i in interfaces:
for process, f in genfiles.iteritems():
dest = f(i)
parent = os.path.dirname(dest)
if parent and not os.path.exists(parent):
os.makedirs(parent)
with open(dest, 'w') as fd:
sdbusplus(
'-r',
os.path.join('example', 'interfaces'),
'interface',
process,
i,
stdout=fd)
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
|
#!/usr/bin/env python
import os
import sys
import yaml
import subprocess
if __name__ == '__main__':
genfiles = {
'server-cpp': lambda x: '%s.cpp' % x,
'server-header': lambda x: os.path.join(
os.path.join(*x.split('.')), 'server.hpp')
}
with open(os.path.join('example', 'interfaces.yaml'), 'r') as fd:
interfaces = yaml.load(fd.read())
for i in interfaces:
for process, f in genfiles.iteritems():
dest = f(i)
parent = os.path.dirname(dest)
if parent and not os.path.exists(parent):
os.makedirs(parent)
with open(dest, 'w') as fd:
subprocess.call([
'sdbus++',
'-r',
os.path.join('example', 'interfaces'),
'interface',
process,
i],
stdout=fd)
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
|
Remove sdbus++ template search workaround
|
Remove sdbus++ template search workaround
sdbus++ was fixed upstream to find its templates automatically.
Change-Id: I29020b9d1ea4ae8baaca5fe869625a3d96cd6eaf
Signed-off-by: Brad Bishop <713d098c0be4c8fd2bf36a94cd08699466677ecd@fuzziesquirrel.com>
|
Python
|
apache-2.0
|
openbmc/phosphor-inventory-manager,openbmc/phosphor-inventory-manager
|
1e07e9424a1ac69e1e660e6a6f1e58bba15472c1
|
make_spectra.py
|
make_spectra.py
|
# -*- coding: utf-8 -*-
import halospectra as hs
import randspectra as rs
import sys
snapnum=sys.argv[1]
sim=sys.argv[2]
#base="/n/hernquistfs1/mvogelsberger/projects/GFM/Production/Cosmo/Cosmo"+str(sim)+"_V6/L25n512/output/"
#savedir="/n/home11/spb/scratch/Cosmo/Cosmo"+str(sim)+"_V6_512/snapdir_"+str(snapnum).rjust(3,'0')
base="/home/spb/data/Cosmo/Cosmo"+str(sim)+"_V6/L25n256"
savedir="/home/spb/scratch/Cosmo/Cosmo"+str(sim)+"_V6/snapdir_"+str(snapnum).rjust(3,'0')
#halo = hs.HaloSpectra(snapnum, base,3, savefile="halo_spectra_DLA.hdf5", savedir=savedir)
halo = rs.RandSpectra(snapnum, base,numlos=3000,savedir=savedir, savefile="rand_spectra_DLA.hdf5")
halo.get_tau("Si",2,2)
halo.get_tau("H",1,1)
halo.get_col_density("Z",-1)
halo.get_col_density("H",-1)
halo.save_file()
|
# -*- coding: utf-8 -*-
import halospectra as hs
import randspectra as rs
import sys
snapnum=sys.argv[1]
sim=sys.argv[2]
#base="/n/hernquistfs1/mvogelsberger/projects/GFM/Production/Cosmo/Cosmo"+str(sim)+"_V6/L25n512/output/"
#savedir="/n/home11/spb/scratch/Cosmo/Cosmo"+str(sim)+"_V6_512/snapdir_"+str(snapnum).rjust(3,'0')
base="/home/spb/data/Cosmo/Cosmo"+str(sim)+"_V6/L25n256"
savedir="/home/spb/scratch/Cosmo/Cosmo"+str(sim)+"_V6/snapdir_"+str(snapnum).rjust(3,'0')
#halo = hs.HaloSpectra(snapnum, base,3, savefile="halo_spectra_DLA.hdf5", savedir=savedir)
halo = rs.RandSpectra(snapnum, base,numlos=10000,savedir=savedir, savefile="rand_spectra.hdf5")
#halo.get_observer_tau("Si",2)
halo.get_tau("H",1,1)
#halo.get_col_density("Z",-1)
#halo.get_col_density("H",-1)
halo.save_file()
|
Implement saving and loading the observer tau
|
Implement saving and loading the observer tau
|
Python
|
mit
|
sbird/vw_spectra
|
8316a60ba2887a511579e8cedb90b3a02fc1889a
|
dope/util.py
|
dope/util.py
|
from uuid import UUID
from werkzeug.routing import BaseConverter
class UUIDConverter(BaseConverter):
to_python = UUID
to_url = str
|
from uuid import UUID
from werkzeug.routing import BaseConverter
class UUIDConverter(BaseConverter):
to_python = UUID
def to_url(self, obj):
return str(obj).replace('-', '')
|
Drop dashes from download urls.
|
Drop dashes from download urls.
|
Python
|
mit
|
mbr/dope,mbr/dope
|
9d46df1680e3d799971e73ec73043c2a6c0590ce
|
scripts/build_tar.py
|
scripts/build_tar.py
|
#! /usr/bin/python
import os
import subprocess
root_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
tarfile = os.path.join(root_dir, "src_pkg.tar")
def _is_dir_newer(directory, filename):
file_mtime = os.stat(filename).st_mtime
for dirname, _, filenames in os.walk(directory):
for filename in filenames:
if filename.endswith(".pyc"):
continue
if _is_file_newer(os.path.join(dirname, filename), file_mtime):
return True
return False
def _is_file_newer(filename, file_mtime):
return os.stat(filename).st_mtime > file_mtime
def _tar():
if 0 != subprocess.call("tar cvf {0} flask_app manage.py static".format(tarfile), shell=True, cwd=root_dir):
raise Exception("Tar failed")
if __name__ == '__main__':
if not os.path.exists(tarfile) or \
_is_dir_newer(os.path.join(root_dir, "flask_app"), tarfile) or \
_is_dir_newer(os.path.join(root_dir, "static"), tarfile) or \
_is_file_newer(os.path.join(root_dir, "manage.py"), os.stat(tarfile).st_mtime):
_tar()
|
#! /usr/bin/python
import os
import subprocess
root_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
tarfile = os.path.join(root_dir, "src_pkg.tar")
def _is_dir_newer(directory, filename):
file_mtime = os.stat(filename).st_mtime
for dirname, _, filenames in os.walk(directory):
if _is_file_newer(dirname, file_mtime):
return True
for filename in filenames:
if filename.endswith(".pyc"):
continue
if _is_file_newer(os.path.join(dirname, filename), file_mtime):
return True
return False
def _is_file_newer(filename, file_mtime):
returned = os.stat(filename).st_mtime > file_mtime
return returned
def _tar():
if 0 != subprocess.call("tar cvf {0} flask_app manage.py static".format(tarfile), shell=True, cwd=root_dir):
raise Exception("Tar failed")
if __name__ == '__main__':
if not os.path.exists(tarfile) or \
_is_dir_newer(os.path.join(root_dir, "flask_app"), tarfile) or \
_is_dir_newer(os.path.join(root_dir, "static"), tarfile) or \
_is_file_newer(os.path.join(root_dir, "manage.py"), os.stat(tarfile).st_mtime):
_tar()
|
Fix building tar in deployment
|
Fix building tar in deployment
|
Python
|
bsd-3-clause
|
vmalloc/mailboxer,Infinidat/lanister,vmalloc/mailboxer,Infinidat/lanister,getslash/mailboxer,vmalloc/mailboxer,getslash/mailboxer,getslash/mailboxer
|
a1390619619a364b9fab13504fb5c2464491d449
|
Largest_Palindrome_Product.py
|
Largest_Palindrome_Product.py
|
# Find the largest palindrome made from the product of two n-digit numbers.
# Since the result could be very large, you should return the largest palindrome mod 1337.
# Example:
# Input: 2
# Output: 987
# Explanation: 99 x 91 = 9009, 9009 % 1337 = 987
# Note:
# The range of n is [1,8].
def largestPalindrome(n):
"""
:type n: int
:rtype: int
"""
number = ""
for x in range(n):
number += "9"
minNum = int(number[:-1])
number = int(number)
palindrome = 0
for x in range(number, minNum, -2):
if (x**2) < palindrome:
break
for i in range(number, x - 1, -2):
product = x * i
if product <= palindrome or product % 11 != 0:
break
elif isPalindrome(product):
palindrome = product
print(palindrome, palindrome % 1337)
break
return (palindrome, palindrome % 1337)
def isPalindrome(num):
""" Return True is number is Palindrome, else return False """
numString = str(num)
if numString == numString[::-1]:
return True
return False
n = 8
print(largestPalindrome(n))
# for i in range(upper, int((x*x)**.5), -2):
# 990090099 152 99999 9901 99998 76865
|
# Find the largest palindrome made from the product of two n-digit numbers.
# Since the result could be very large, you should return the largest palindrome mod 1337.
# Example:
# Input: 2
# Output: 987
# Explanation: 99 x 91 = 9009, 9009 % 1337 = 987
# Note:
# The range of n is [1,8].
from itertools import product
def largestPalindrome(n):
"""
:type n: int
:rtype: int
"""
number = ""
for x in range(n):
number += "9"
number = int(number)
palindrome = 0
for x in range(number, 1, -2):
if (x*x) < palindrome:
break
for i in range(number, x - 1, -2):
product = x * i
if product < palindrome:
break
elif isPalindrome(product):
palindrome = product
break
return palindrome % 1337
def isPalindrome(num):
""" Return True is number is Palindrome, else return False """
return str(num) == str(num)[::-1]
n = 7
print(largestPalindrome(n))
|
Refactor Largest Palindrome Product for range of n is
|
Refactor Largest Palindrome Product for range of n is [1,8]
|
Python
|
mit
|
Kunal57/Python_Algorithms
|
de4af7935c1c8d6751c5a71ad90dd5f531f7a1b0
|
bin/trigger_upload.py
|
bin/trigger_upload.py
|
#!/bin/env python
# -*- coding: utf8 -*-
""" Triggers an upload process with the specified raw.xz URL. """
import argparse
import logging
import logging.config
import multiprocessing.pool
import fedmsg.config
import fedimg.uploader
logging.config.dictConfig(fedmsg.config.load_config()['logging'])
log = logging.getLogger('fedmsg')
def trigger_upload(compose_id, url, push_notifications):
upload_pool = multiprocessing.pool.ThreadPool(processes=4)
fedimg.uploader.upload(upload_pool, [url],
compose_id=compose_id,
push_notifications=push_notifications)
def get_args():
parser = argparse.ArgumentParser(
description="Trigger a manual upload process with the "
"specified raw.xz URL")
parser.add_argument(
"-u", "--url", type=str, help=".raw.xz URL", required=True)
parser.add_argument(
"-c", "--compose-id", type=str, help="compose id of the .raw.xz file",
required=True)
parser.add_argument(
"-p", "--push-notifications",
help="Bool to check if we need to push fedmsg notifications",
action="store_true", required=False)
args = parser.parse_args()
return args.url, args.compose_id, args.push_notifications
def main():
url, compose_id, push_notifications = get_args()
trigger_upload(url, compose_id, push_notifications)
if __name__ == '__main__':
main()
|
#!/bin/env python
# -*- coding: utf8 -*-
""" Triggers an upload process with the specified raw.xz URL. """
import argparse
import logging
import logging.config
import multiprocessing.pool
import fedmsg.config
import fedimg.uploader
logging.config.dictConfig(fedmsg.config.load_config()['logging'])
log = logging.getLogger('fedmsg')
def trigger_upload(url, compose_id, push_notifications):
upload_pool = multiprocessing.pool.ThreadPool(processes=4)
fedimg.uploader.upload(upload_pool, [url],
compose_id=compose_id,
push_notifications=push_notifications)
def get_args():
parser = argparse.ArgumentParser(
description="Trigger a manual upload process with the "
"specified raw.xz URL")
parser.add_argument(
"-u", "--url", type=str, help=".raw.xz URL", required=True)
parser.add_argument(
"-c", "--compose-id", type=str, help="compose id of the .raw.xz file",
required=True)
parser.add_argument(
"-p", "--push-notifications",
help="Bool to check if we need to push fedmsg notifications",
action="store_true", required=False)
args = parser.parse_args()
return args.url, args.compose_id, args.push_notifications
def main():
url, compose_id, push_notifications = get_args()
trigger_upload(url, compose_id, push_notifications)
if __name__ == '__main__':
main()
|
Fix the script function args
|
fedimg: Fix the script function args
Signed-off-by: Sayan Chowdhury <5f0367a2b3b757615b57f51d912cf16f2c0ad827@gmail.com>
|
Python
|
agpl-3.0
|
fedora-infra/fedimg,fedora-infra/fedimg
|
166bff52496bfb47c5a3a03585bd10fb449b8d77
|
Lib/curses/__init__.py
|
Lib/curses/__init__.py
|
"""curses
The main package for curses support for Python. Normally used by importing
the package, and perhaps a particular module inside it.
import curses
from curses import textpad
curses.initwin()
...
"""
__revision__ = "$Id$"
from _curses import *
from curses.wrapper import wrapper
|
"""curses
The main package for curses support for Python. Normally used by importing
the package, and perhaps a particular module inside it.
import curses
from curses import textpad
curses.initwin()
...
"""
__revision__ = "$Id$"
from _curses import *
from curses.wrapper import wrapper
# Some constants, most notably the ACS_* ones, are only added to the C
# _curses module's dictionary after initscr() is called. (Some
# versions of SGI's curses don't define values for those constants
# until initscr() has been called.) This wrapper function calls the
# underlying C initscr(), and then copies the constants from the
# _curses module to the curses package's dictionary. Don't do 'from
# curses import *' if you'll be needing the ACS_* constants.
def initscr():
import _curses, curses
stdscr = _curses.initscr()
for key, value in _curses.__dict__.items():
if key[0:4] == 'ACS_' or key in ('LINES', 'COLS'):
setattr(curses, key, value)
return stdscr
|
Add wrapper for initscr() to copy the ACS_ and LINES,COLS bindings
|
Add wrapper for initscr() to copy the ACS_ and LINES,COLS bindings
|
Python
|
mit
|
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
|
17faea99343e37036b7ee35e5d3273f98a52dba9
|
Python/tomviz/utils.py
|
Python/tomviz/utils.py
|
import numpy as np
import vtk.numpy_interface.dataset_adapter as dsa
def get_scalars(dataobject):
do = dsa.WrapDataObject(dataobject)
# get the first
rawarray = do.PointData.GetScalars()
vtkarray = dsa.vtkDataArrayToVTKArray(rawarray, do)
vtkarray.Association = dsa.ArrayAssociation.POINT
return vtkarray
def set_scalars(dataobject, newscalars):
do = dsa.WrapDataObject(dataobject)
oldscalars = do.PointData.GetScalars()
name = oldscalars.GetName()
del oldscalars
do.PointData.append(newscalars, name)
do.PointData.SetActiveScalars(name)
|
import numpy as np
import vtk.numpy_interface.dataset_adapter as dsa
import vtk.util.numpy_support as np_s
def get_scalars(dataobject):
do = dsa.WrapDataObject(dataobject)
# get the first
rawarray = do.PointData.GetScalars()
vtkarray = dsa.vtkDataArrayToVTKArray(rawarray, do)
vtkarray.Association = dsa.ArrayAssociation.POINT
return vtkarray
def set_scalars(dataobject, newscalars):
do = dsa.WrapDataObject(dataobject)
oldscalars = do.PointData.GetScalars()
name = oldscalars.GetName()
del oldscalars
# handle the case if the newscalars array has a type that
# cannot be passed on to VTK. In which case, we convert to
# convert to float64
vtk_typecode = np_s.get_vtk_array_type(newscalars.dtype)
if vtk_typecode is None:
newscalars = newscalars.astype(np.float64)
do.PointData.append(newscalars, name)
do.PointData.SetActiveScalars(name)
|
Fix numpy related errors on Mavericks.
|
Fix numpy related errors on Mavericks.
The problem was due to the fact that operations (like sqrt) can return a
float16 arrays which cannot be passed back to VTK directly. Added a
temporary conversion to float64. We should potentially handle this in
VTK.
|
Python
|
bsd-3-clause
|
cryos/tomviz,thewtex/tomviz,cjh1/tomviz,cryos/tomviz,cryos/tomviz,Hovden/tomviz,Hovden/tomviz,yijiang1/tomviz,cjh1/tomviz,thewtex/tomviz,OpenChemistry/tomviz,mathturtle/tomviz,yijiang1/tomviz,cjh1/tomviz,mathturtle/tomviz,OpenChemistry/tomviz,thewtex/tomviz,OpenChemistry/tomviz,mathturtle/tomviz,OpenChemistry/tomviz
|
e753038de039fd23f0d59bb0094f59fc73efe22b
|
flask_apscheduler/json.py
|
flask_apscheduler/json.py
|
import flask
import json
from datetime import datetime
from apscheduler.job import Job
from .utils import job_to_dict
class JSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, datetime):
return obj.isoformat()
if isinstance(obj, Job):
return job_to_dict(obj)
return super(JSONEncoder, self).default(obj)
def dumps(obj, indent=None):
return json.dumps(obj, indent=indent, cls=JSONEncoder)
def jsonify(data, status=None):
indent = None
if flask.current_app.config['JSONIFY_PRETTYPRINT_REGULAR'] and not flask.request.is_xhr:
indent = 2
return flask.current_app.response_class(dumps(data, indent=indent), status=status, mimetype='application/json')
|
import datetime
import flask
import json
from apscheduler.job import Job
from .utils import job_to_dict
loads = json.loads
def dumps(obj, indent=None):
return json.dumps(obj, indent=indent, cls=JSONEncoder)
def jsonify(data, status=None):
indent = None
if flask.current_app.config['JSONIFY_PRETTYPRINT_REGULAR'] and not flask.request.is_xhr:
indent = 2
return flask.current_app.response_class(dumps(data, indent=indent), status=status, mimetype='application/json')
class JSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, datetime.datetime):
return obj.isoformat()
if isinstance(obj, datetime.date):
return obj.isoformat()
if isinstance(obj, Job):
return job_to_dict(obj)
return super(JSONEncoder, self).default(obj)
|
Set a custom JSON Encoder to serialize date class.
|
Set a custom JSON Encoder to serialize date class.
|
Python
|
apache-2.0
|
viniciuschiele/flask-apscheduler
|
edcfe2b156af23943478bc86592b4c8d5dc07e10
|
flask_mongoengine/json.py
|
flask_mongoengine/json.py
|
from flask.json import JSONEncoder
from bson import json_util
from mongoengine.base import BaseDocument
from mongoengine import QuerySet
def _make_encoder(superclass):
class MongoEngineJSONEncoder(superclass):
'''
A JSONEncoder which provides serialization of MongoEngine
documents and querysets.
'''
def default(self, obj):
if isinstance(obj, BaseDocument):
return json_util._json_convert(obj.to_mongo())
elif isinstance(obj, QuerySet):
return json_util._json_convert(obj.as_pymongo())
return superclass.default(self, obj)
return MongoEngineJSONEncoder
MongoEngineJSONEncoder = _make_encoder(JSONEncoder)
def overide_json_encoder(app):
'''
A function to dynamically create a new MongoEngineJSONEncoder class
based upon a custom base class.
This function allows us to combine MongoEngine serialization with
any changes to Flask's JSONEncoder which a user may have made
prior to calling init_app.
NOTE: This does not cover situations where users override
an instance's json_encoder after calling init_app.
'''
app.json_encoder = _make_encoder(app.json_encoder)
|
from flask.json import JSONEncoder
from bson import json_util
from mongoengine.base import BaseDocument
try:
from mongoengine.base import BaseQuerySet
except ImportError as ie: # support mongoengine < 0.7
from mongoengine.queryset import QuerySet as BaseQuerySet
def _make_encoder(superclass):
class MongoEngineJSONEncoder(superclass):
'''
A JSONEncoder which provides serialization of MongoEngine
documents and queryset objects.
'''
def default(self, obj):
if isinstance(obj, BaseDocument):
return json_util._json_convert(obj.to_mongo())
elif isinstance(obj, BaseQuerySet):
return json_util._json_convert(obj.as_pymongo())
return superclass.default(self, obj)
return MongoEngineJSONEncoder
MongoEngineJSONEncoder = _make_encoder(JSONEncoder)
def overide_json_encoder(app):
'''
A function to dynamically create a new MongoEngineJSONEncoder class
based upon a custom base class.
This function allows us to combine MongoEngine serialization with
any changes to Flask's JSONEncoder which a user may have made
prior to calling init_app.
NOTE: This does not cover situations where users override
an instance's json_encoder after calling init_app.
'''
app.json_encoder = _make_encoder(app.json_encoder)
|
Support older versions of MongoEngine
|
Support older versions of MongoEngine
|
Python
|
bsd-3-clause
|
gerasim13/flask-mongoengine-1,rochacbruno/flask-mongoengine,quokkaproject/flask-mongoengine,quokkaproject/flask-mongoengine,gerasim13/flask-mongoengine-1,losintikfos/flask-mongoengine,rochacbruno/flask-mongoengine,losintikfos/flask-mongoengine
|
3d7b5d61b7e985d409cd50c98d4bcbdc8ab9c723
|
mailer.py
|
mailer.py
|
from marrow.mailer import Mailer as MarrowMailer
from message import Message
import sys
class Mailer:
MAILER = MarrowMailer(dict(manager=dict(use='immediate'), transport=dict(use='sendmail')))
@staticmethod
def send(message):
Mailer.MAILER.send(message)
@staticmethod
def start():
Mailer.MAILER.start()
@staticmethod
def stop():
Mailer.MAILER.stop()
@staticmethod
def send_transactions(transactions, to_addr):
Mailer.start()
message = Message(
to=to_addr,
subject='New transactions',
plain=repr(transactions)
)
Mailer.send(message)
Mailer.stop()
@staticmethod
def get_cli_email_addr():
try:
return sys.argv[1]
except IndexError:
return None
|
from marrow.mailer import Mailer as MarrowMailer
from message import Message
import sys
import os
import pwd
import socket
class Mailer:
MAILER = MarrowMailer(dict(manager=dict(use='immediate'), transport=dict(use='sendmail')))
DEFAULT_AUTHOR = pwd.getpwuid(os.getuid()).pw_name + '@' + socket.getfqdn()
@staticmethod
def send(message):
Mailer.MAILER.send(message)
@staticmethod
def start():
Mailer.MAILER.start()
@staticmethod
def stop():
Mailer.MAILER.stop()
@staticmethod
def send_transactions(transactions, to_addr):
Mailer.start()
message = Message(
author=Mailer.DEFAULT_AUTHOR,
to=to_addr,
subject='New transactions',
plain=repr(transactions)
)
Mailer.send(message)
Mailer.stop()
@staticmethod
def get_cli_email_addr():
try:
return sys.argv[1]
except IndexError:
return None
|
Use current user as email author
|
Use current user as email author
|
Python
|
isc
|
2mv/raapija
|
65973802a3e68e23f9a903937ef94f8afa277013
|
ibmcnx/doc/DataSources.py
|
ibmcnx/doc/DataSources.py
|
######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
print AdminControl.getCell()
cell = "/Cell:" + AdminControl.getCell() + "/"
cellid = AdminConfig.getid( cell )
dbs = AdminConfig.list( 'DataSource', str(cellid) )
for db in dbs.splitlines().split('('):
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 )
|
######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
print AdminControl.getCell()
cell = "/Cell:" + AdminControl.getCell() + "/"
cellid = AdminConfig.getid( cell )
dbs = AdminConfig.list( 'DataSource', str(cellid) )
dbs = dbs.splitlines()
print dbs
for db in dbs.splitlines():
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 )
|
Create documentation of DataSource Settings
|
: Create documentation of DataSource Settings
Task-Url:
|
Python
|
apache-2.0
|
stoeps13/ibmcnx2,stoeps13/ibmcnx2
|
93f2ff45ff3d61487ed061ae3d1a65051c3d1799
|
django/contrib/admin/__init__.py
|
django/contrib/admin/__init__.py
|
from django.contrib.admin.options import ModelAdmin, HORIZONTAL, VERTICAL
from django.contrib.admin.options import StackedInline, TabularInline
from django.contrib.admin.sites import AdminSite, site
def autodiscover():
"""
Auto-discover INSTALLED_APPS admin.py modules and fail silently when
not present. This forces an import on them to register any admin bits they
may want.
"""
import copy
from django.conf import settings
from django.utils.importlib import import_module
from django.utils.module_loading import module_has_submodule
for app in settings.INSTALLED_APPS:
mod = import_module(app)
# Attempt to import the app's admin module.
try:
before_import_registry = copy.copy(site._registry)
import_module('%s.admin' % app)
except:
# Reset the model registry to the state before the last import as
# this import will have to reoccur on the next request and this
# could raise NotRegistered and AlreadyRegistered exceptions
# (see #8245).
site._registry = before_import_registry
# Decide whether to bubble up this error. If the app just
# doesn't have an admin module, we can ignore the error
# attempting to import it, otherwise we want it to bubble up.
if module_has_submodule(mod, 'admin'):
raise
|
# ACTION_CHECKBOX_NAME is unused, but should stay since its import from here
# has been referenced in documentation.
from django.contrib.admin.helpers import ACTION_CHECKBOX_NAME
from django.contrib.admin.options import ModelAdmin, HORIZONTAL, VERTICAL
from django.contrib.admin.options import StackedInline, TabularInline
from django.contrib.admin.sites import AdminSite, site
def autodiscover():
"""
Auto-discover INSTALLED_APPS admin.py modules and fail silently when
not present. This forces an import on them to register any admin bits they
may want.
"""
import copy
from django.conf import settings
from django.utils.importlib import import_module
from django.utils.module_loading import module_has_submodule
for app in settings.INSTALLED_APPS:
mod = import_module(app)
# Attempt to import the app's admin module.
try:
before_import_registry = copy.copy(site._registry)
import_module('%s.admin' % app)
except:
# Reset the model registry to the state before the last import as
# this import will have to reoccur on the next request and this
# could raise NotRegistered and AlreadyRegistered exceptions
# (see #8245).
site._registry = before_import_registry
# Decide whether to bubble up this error. If the app just
# doesn't have an admin module, we can ignore the error
# attempting to import it, otherwise we want it to bubble up.
if module_has_submodule(mod, 'admin'):
raise
|
Revert the removal of an unused import (in [14175]) that was referenced in documentation. Thanks for noticing, clong.
|
Revert the removal of an unused import (in [14175]) that was referenced in documentation. Thanks for noticing, clong.
git-svn-id: 554f83ef17aa7291f84efa897c1acfc5d0035373@14359 bcc190cf-cafb-0310-a4f2-bffc1f526a37
|
Python
|
bsd-3-clause
|
svn2github/django,svn2github/django,svn2github/django
|
445f244ddac6001b65f03d058a14178a19919eed
|
diamondash/config.py
|
diamondash/config.py
|
import yaml
from diamondash import utils
class ConfigError(Exception):
"""Raised when there is an error parsing a configuration"""
class ConfigMetaClass(type):
def __new__(mcs, name, bases, dict):
cls = type.__new__(mcs, name, bases, dict)
defaults = {}
for base in bases:
if hasattr(base, 'DEFAULTS'):
defaults.update(base.DEFAULTS)
defaults.update(cls.DEFAULTS)
cls.DEFAULTS = defaults
return cls
class Config(dict):
__metaclass__ = ConfigMetaClass
DEFAULTS = {}
def __init__(self, items):
super(Config, self).__init__(self._parse(items))
@classmethod
def parse(cls, items):
return items
@classmethod
def _parse(cls, items):
items = utils.add_dicts(cls.DEFAULTS, items)
return cls.parse(items)
@classmethod
def from_file(cls, filename, **defaults):
items = utils.add_dicts(defaults, yaml.safe_load(open(filename)))
return cls(items)
@classmethod
def for_type(cls, type_name):
type_cls = utils.load_class_by_string(type_name)
return type_cls.CONFIG_CLS
|
import yaml
from diamondash import utils
class ConfigError(Exception):
"""Raised when there is an error parsing a configuration"""
class ConfigMetaClass(type):
def __new__(mcs, name, bases, dict):
cls = type.__new__(mcs, name, bases, dict)
defaults = {}
for base in bases:
if hasattr(base, 'DEFAULTS'):
defaults.update(base.DEFAULTS)
defaults.update(cls.DEFAULTS)
cls.DEFAULTS = defaults
return cls
class Config(dict):
__metaclass__ = ConfigMetaClass
DEFAULTS = {}
def __init__(self, items=None):
super(Config, self).__init__(self._parse(items or {}))
@classmethod
def parse(cls, items):
return items
@classmethod
def _parse(cls, items):
items = utils.add_dicts(cls.DEFAULTS, items)
return cls.parse(items)
@classmethod
def from_file(cls, filename, **defaults):
items = utils.add_dicts(defaults, yaml.safe_load(open(filename)))
return cls(items)
@classmethod
def for_type(cls, type_name):
type_cls = utils.load_class_by_string(type_name)
return type_cls.CONFIG_CLS
|
Allow Config to be initialised without any args
|
Allow Config to be initialised without any args
|
Python
|
bsd-3-clause
|
praekelt/diamondash,praekelt/diamondash,praekelt/diamondash
|
bfcec696308ee8bfd226a54c17a7e15d49e2aed7
|
var/spack/repos/builtin/packages/nextflow/package.py
|
var/spack/repos/builtin/packages/nextflow/package.py
|
from spack import *
from glob import glob
import os
class Nextflow(Package):
"""Data-driven computational pipelines"""
homepage = "http://www.nextflow.io"
version('0.20.1', '0e4e0e3eca1c2c97f9b4bffd944b923a',
url='https://github.com/nextflow-io/nextflow/releases/download/v0.20.1/nextflow',
expand=False)
depends_on('jdk')
def unpack(self):
pass
def install(self, spec, prefix):
chmod = which('chmod')
mkdirp(prefix.bin)
install("nextflow", join_path(prefix.bin, "nextflow"))
chmod('+x', join_path(prefix.bin, "nextflow"))
|
##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Nextflow(Package):
"""Data-driven computational pipelines"""
homepage = "http://www.nextflow.io"
version('0.20.1', '0e4e0e3eca1c2c97f9b4bffd944b923a',
url='https://github.com/nextflow-io/nextflow/releases/download/v0.20.1/nextflow',
expand=False)
depends_on('jdk')
def unpack(self):
pass
def install(self, spec, prefix):
mkdirp(prefix.bin)
install("nextflow", join_path(prefix.bin, "nextflow"))
set_executable( join_path(prefix.bin, "nextflow"))
|
Add standard header, use spack helpers
|
Add standard header, use spack helpers
Added the standard header (stolen from R).
Touched up the install to use set_executable rather than doing it
myself.
|
Python
|
lgpl-2.1
|
matthiasdiener/spack,mfherbst/spack,lgarren/spack,tmerrick1/spack,TheTimmy/spack,LLNL/spack,tmerrick1/spack,TheTimmy/spack,TheTimmy/spack,matthiasdiener/spack,LLNL/spack,iulian787/spack,matthiasdiener/spack,krafczyk/spack,tmerrick1/spack,EmreAtes/spack,TheTimmy/spack,tmerrick1/spack,iulian787/spack,matthiasdiener/spack,mfherbst/spack,iulian787/spack,tmerrick1/spack,EmreAtes/spack,skosukhin/spack,skosukhin/spack,krafczyk/spack,lgarren/spack,LLNL/spack,matthiasdiener/spack,EmreAtes/spack,iulian787/spack,lgarren/spack,skosukhin/spack,LLNL/spack,krafczyk/spack,mfherbst/spack,TheTimmy/spack,skosukhin/spack,iulian787/spack,lgarren/spack,EmreAtes/spack,krafczyk/spack,mfherbst/spack,LLNL/spack,lgarren/spack,mfherbst/spack,EmreAtes/spack,skosukhin/spack,krafczyk/spack
|
e81b1ce7536ce32e022fb3132f8468d2472b2e31
|
atlas/prodtask/management/commands/extendopenended.py
|
atlas/prodtask/management/commands/extendopenended.py
|
from django.core.management.base import BaseCommand, CommandError
from atlas.prodtask.open_ended import check_open_ended
class Command(BaseCommand):
args = '<request_id, request_id>'
help = 'Extend open ended requests'
def handle(self, *args, **options):
if not args:
try:
check_open_ended()
except Exception,e:
raise CommandError('Some problem during request extension: %s'%e)
self.stdout.write('Successfully finished request extension')
|
from django.core.management.base import BaseCommand, CommandError
import time
from atlas.prodtask.open_ended import check_open_ended
class Command(BaseCommand):
args = '<request_id, request_id>'
help = 'Extend open ended requests'
def handle(self, *args, **options):
self.stdout.write('Start open ended at %s'%time.ctime())
if not args:
try:
check_open_ended()
except Exception,e:
raise CommandError('Some problem during request extension: %s'%e)
self.stdout.write('Successfully finished request extension: %s'%time.ctime())
|
Improve logging of openended extension
|
Improve logging of openended extension
|
Python
|
apache-2.0
|
PanDAWMS/panda-bigmon-atlas,PanDAWMS/panda-bigmon-atlas,PanDAWMS/panda-bigmon-atlas,PanDAWMS/panda-bigmon-atlas
|
6632157febfed7ce99fa1aaecb72393b0301d3aa
|
geotrek/authent/migrations/0003_auto_20181203_1518.py
|
geotrek/authent/migrations/0003_auto_20181203_1518.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from django.core.management import call_command
from django.conf import settings
def add_permissions(apps, schema_editor):
if 'geotrek.infrastructure' in settings.INSTALLED_APPS:
call_command('update_geotrek_permissions', verbosity=0)
UserModel = apps.get_model('auth', 'User')
GroupModel = apps.get_model('auth', 'Group')
PermissionModel = apps.get_model('auth', 'Permission')
ContentTypeModel = apps.get_model("contenttypes", "ContentType")
type_permissions = ['add', 'change', 'change_geom', 'delete', 'export', 'read']
content_type_signage = ContentTypeModel.objects.get(model='signage')
content_type_infrastructure = ContentTypeModel.objects.get(model='infrastructure')
for user in UserModel.objects.all():
for type_perm in type_permissions:
if user.user_permissions.filter(codename='%s_infrastructure' % type_perm).exists():
user.user_permissions.add(PermissionModel.objects.get(
codename='%s_infrastructure' % type_perm, content_type=content_type_infrastructure))
if user.user_permissions.filter(codename='%s_signage' % type_perm).exists():
user.user_permissions.add(PermissionModel.objects.get(
codename='%s_signage' % type_perm, content_type=content_type_signage))
for group in GroupModel.objects.all():
for type_perm in type_permissions:
if group.permissions.filter(codename='%s_infrastructure' % type_perm).exists():
group.permissions.add(PermissionModel.objects.get(
codename='%s_infrastructure' % type_perm, content_type=content_type_infrastructure))
if group.permissions.filter(codename='%s_signage' % type_perm).exists():
group.permissions.add(PermissionModel.objects.get(
codename='%s_signage' % type_perm, content_type=content_type_signage))
PermissionModel.objects.filter(content_type__model='baseinfrastructure').delete()
class Migration(migrations.Migration):
dependencies = [
('authent', '0002_auto_20181107_1620'),
]
operations = [
migrations.RunPython(add_permissions)
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('authent', '0002_auto_20181107_1620'),
]
operations = [
]
|
Make empty migration authent 3
|
Make empty migration authent 3
|
Python
|
bsd-2-clause
|
makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,makinacorpus/Geotrek
|
0324d220872ef063cb39ce62264bd4835f260920
|
test_project/urls.py
|
test_project/urls.py
|
from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic import RedirectView
from test_app.models import DummyModel, MushroomSpot
from test_app.views import DummyDocumentOdt, DummyDocumentWeasyprint
from mapentity.registry import registry
handler403 = 'mapentity.views.handler403'
admin.autodiscover()
models_urls = registry.register(DummyModel) + registry.register(MushroomSpot)
urlpatterns = [
url(r'', include(models_urls, namespace='test_app')),
url(r'', include('mapentity.urls', namespace='mapentity',
app_name='mapentity')),
url(r'^home/$', RedirectView.as_view(url='/', permanent=True), name='home'),
url(r'^login/$', 'django.contrib.auth.views.login', name='login'),
url(r'^logout/$', 'django.contrib.auth.views.logout', {'next_page': '/'}, name='logout',),
url(r'^paperclip/', include('paperclip.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^test/document/dummymodel-(?P<pk>\d+).odt', DummyDocumentOdt.as_view(), name="dummymodel_odt"),
url(r'^test/document/dummymodel-(?P<pk>\d+).pdf', DummyDocumentWeasyprint.as_view(), name="dummymodel_pdf"),
]
|
from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic import RedirectView
from test_app.models import DummyModel, MushroomSpot
from test_app.views import DummyDocumentOdt, DummyDocumentWeasyprint
from mapentity.registry import registry
from django.contrib.auth import views as auth_views
handler403 = 'mapentity.views.handler403'
admin.autodiscover()
models_urls = registry.register(DummyModel) + registry.register(MushroomSpot)
urlpatterns = [
url(r'', include(models_urls, namespace='test_app')),
url(r'', include('mapentity.urls', namespace='mapentity',
app_name='mapentity')),
url(r'^home/$', RedirectView.as_view(url='/', permanent=True), name='home'),
url(r'^login/$', auth_views.login, name='login'),
url(r'^logout/$', auth_views.logout, {'next_page': '/'}, name='logout',),
url(r'^paperclip/', include('paperclip.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^test/document/dummymodel-(?P<pk>\d+).odt', DummyDocumentOdt.as_view(), name="dummymodel_odt"),
url(r'^test/document/dummymodel-(?P<pk>\d+).pdf', DummyDocumentWeasyprint.as_view(), name="dummymodel_pdf"),
]
|
Replace str into call in url
|
Replace str into call in url
|
Python
|
bsd-3-clause
|
makinacorpus/django-mapentity,makinacorpus/django-mapentity,makinacorpus/django-mapentity
|
a53612d5f276180d204378b9e4974fcd812f6a5b
|
tests/fake_camera.py
|
tests/fake_camera.py
|
from os import listdir
from os.path import isfile, join
class Camera(object):
def __init__(self, path):
self.files = [join(path, f) for f in listdir(path)]
self.files = sorted([f for f in self.files if isfile(f)])
self.current = 0
def reset(self):
self.current = 0
def has_next(self):
return self.current < len(self.files)
def next(self):
img = open(self.files[self.current], 'rb').read()
self.current += 1
return img
|
# -*- coding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from os import listdir
from os.path import isfile, join
class Camera(object):
def __init__(self, path):
self.files = [join(path, f) for f in listdir(path)]
self.files = sorted([f for f in self.files if isfile(f)])
self.current = 0
def reset(self):
self.current = 0
def has_next(self):
return self.current < len(self.files)
def next(self):
img = open(self.files[self.current], 'rb').read()
self.current += 1
return img
|
Add licence header in fake camera test file.
|
Add licence header in fake camera test file.
|
Python
|
apache-2.0
|
angus-ai/angus-sdk-python
|
d1ea64d6645f60df38221cbd194c26dff9686dcd
|
scripts/utils.py
|
scripts/utils.py
|
import sys
import hashlib
def e(s):
if type(s) == str:
return str
return s.encode('utf-8')
def d(s):
if type(s) == unicode:
return s
return unicode(s, 'utf-8')
def mkid(s):
return hashlib.sha1(e(s)).hexdigest()[:2*4]
class Logger(object):
def __init__(self):
self._mode = 'INFO'
def progress(self, message):
if not sys.stderr.isatty():
return
if self._mode == 'PROGRESS':
print >>sys.stderr, '\r',
print >>sys.stderr, message,
self._mode = 'PROGRESS'
def info(self, message):
if self._mode == 'PROGRESS':
print >>sys.stderr
print >>sys.stderr, message
self._mode = 'INFO'
|
import sys
import hashlib
def e(s):
if type(s) == str:
return str
return s.encode('utf-8')
def d(s):
if type(s) == unicode:
return s
return unicode(s, 'utf-8')
def mkid(s):
return hashlib.sha1(e(s)).hexdigest()[:2*4]
class Logger(object):
def __init__(self):
self._mode = 'INFO'
def progress(self, message):
message = e(message)
if not sys.stderr.isatty():
return
if self._mode == 'PROGRESS':
print >>sys.stderr, '\r',
print >>sys.stderr, message,
self._mode = 'PROGRESS'
def info(self, message):
message = e(message)
if self._mode == 'PROGRESS':
print >>sys.stderr
print >>sys.stderr, message
self._mode = 'INFO'
|
Handle logging unicode messages in python2.
|
Handle logging unicode messages in python2.
Former-commit-id: 257d94eb71d5597ff52a18ec1530d73496901ef4
|
Python
|
mit
|
guilherme-pg/citationhunt,eggpi/citationhunt,guilherme-pg/citationhunt,eggpi/citationhunt,eggpi/citationhunt,guilherme-pg/citationhunt,guilherme-pg/citationhunt,eggpi/citationhunt
|
a74e91613be376d6d71fb90c15cab689af661e37
|
money_conversion/money.py
|
money_conversion/money.py
|
from currency_rates import rates
class Money(object):
def __init__(self, amount, currency):
self.amount = amount
self.currency = currency.upper()
def __repr__(self):
return "%.2f %s" % (self.amount, self.currency)
def to_currency(self, new_currency):
new_currency = new_currency.split('_')[1].upper()
amount = self.amount
base_currency_rates = rates.get(self.currency)
new_amount = amount * base_currency_rates.get(new_currency)
return Money(new_amount, new_currency)
|
from currency_rates import rates
class Money(object):
def __init__(self, amount, currency):
self.amount = amount
self.currency = currency.upper()
def __repr__(self):
return "%.2f %s" % (self.amount, self.currency)
def __getattr__(self, currency):
def convert():
return self.to_currency(currency)
return convert
def to_currency(self, currency):
currency = currency.split('_')[1].upper()
amount = self.amount
base_currency_rates = rates.get(self.currency)
new_amount = amount * base_currency_rates.get(currency)
return Money(new_amount, currency)
|
Add __getattr__ method in order to be able to call non-defined methods
|
Add __getattr__ method in order to be able to call non-defined methods
|
Python
|
mit
|
mdsrosa/money-conversion-py
|
9a698d1428fbe0744c9dba3532b778569dbe1dd4
|
server.py
|
server.py
|
import socket
import sys
class SimpleServer(object):
"""Simple server using the socket library"""
def __init__(self, blocking=False, connection_oriented=True):
"""
The constructor initializes socket specifying the blocking status and
if it must be a connection oriented socket.
:param blocking: A flag that specifies if the socket must be blocking
:ptype: Boolean
:param connection_oriented: A flag that specifies if the socket must
be connection oriented or not
:ptype: Boolean
"""
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if not blocking:
self.sock.setblocking(0)
def connect(self, host, port):
"""
Connects the server to the "host", and prepares it to listen on "port"
:param host: The network layer identifier of an interface
:ptype: String or Integer (see help(socket))
:param port: The transport layer identifier of an application
:ptype: Integer
"""
self.sock.connect((host, port))
|
"""
A Simple Server class that allows to configure a socket in a very simple way.
It is for studying purposes only.
"""
import socket
import sys
__author__ = "Facundo Victor"
__license__ = "MIT"
__email__ = "facundovt@gmail.com"
class SimpleServer(object):
"""Simple server using the socket library"""
def __init__(self, blocking=False, connection_oriented=True):
"""
The constructor initializes socket specifying the blocking status and
if it must be a connection oriented socket.
:param blocking: A flag that specifies if the socket must be blocking
:ptype: Boolean
:param connection_oriented: A flag that specifies if the socket must
be connection oriented or not
:ptype: Boolean
"""
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if not blocking:
self.sock.setblocking(0)
def connect(self, host, port):
"""
Connects the server to the "host", and prepares it to listen on "port"
:param host: The network layer identifier of an interface
:ptype: String or Integer (see help(socket))
:param port: The transport layer identifier of an application
:ptype: Integer
"""
server_address = (host, port)
self.sock.connect(server_address)
print('starting up on %s port %s' % server_address)
|
Add docstrings and author reference
|
Add docstrings and author reference
|
Python
|
mit
|
facundovictor/non-blocking-socket-samples
|
5f501af61b416dae0e46236a8e1f9684dcc66e21
|
python/decoder_test.py
|
python/decoder_test.py
|
import argparse
import scanner
import numpy as np
import cv2
from decode import db
@db.loader('frame')
def load_frames(buf, metadata):
return np.frombuffer(buf, dtype=np.uint8) \
.reshape((metadata.height,metadata.width,3))
def extract_frames(args):
job = load_frames(args['dataset'], 'edr')
video_paths = job._dataset.video_data.original_video_paths
for (vid, frames) in job.as_frame_list():
video_path = video_paths[int(vid)]
inp = cv2.VideoCapture(video_path)
assert(inp.isOpened())
video_frame_num = -1
for (frame_num, buf) in frames:
while video_frame_num != frame_num:
_, video_frame = inp.read()
video_frame_num += 1
scanner_frame = cv2.cvtColor(buf, cv2.COLOR_RGB2BGR)
frame_diff = (scanner_frame - video_frame).sum()
if frame_diff != 0:
print('Frame {} does not match!'.format(frame_num))
if __name__ == "__main__":
p = argparse.ArgumentParser(description='Extract JPEG frames from videos')
p.add_argument('dataset', type=str)
extract_frames(p.parse_args().__dict__)
|
import argparse
import scanner
import numpy as np
import cv2
from decode import db
@db.loader('frame')
def load_frames(buf, metadata):
return np.frombuffer(buf, dtype=np.uint8) \
.reshape((metadata.height,metadata.width,3))
def extract_frames(args):
job = load_frames(args['dataset'], 'edr')
video_paths = job._dataset.video_data.original_video_paths
for (vid, frames) in job.as_frame_list():
video_path = video_paths[int(vid)]
inp = cv2.VideoCapture(video_path)
assert(inp.isOpened())
video_frame_num = -1
for (frame_num, buf) in frames:
while video_frame_num != frame_num:
_, video_frame = inp.read()
video_frame_num += 1
scanner_frame = cv2.cvtColor(buf, cv2.COLOR_RGB2BGR)
frame_diff = np.abs(scanner_frame - video_frame)
if frame_diff.sum() != 0:
print('Frame {} does not match!'.format(frame_num))
cv2.imwrite('decode_frames_' + str(frame_num) + '.jpg',
np.concatenate(
(scanner_frame, video_frame, frame_diff), 1))
if __name__ == "__main__":
p = argparse.ArgumentParser(description='Extract JPEG frames from videos')
p.add_argument('dataset', type=str)
extract_frames(p.parse_args().__dict__)
|
Write out concatenated frame on decode test failure
|
Write out concatenated frame on decode test failure
|
Python
|
apache-2.0
|
scanner-research/scanner,scanner-research/scanner,scanner-research/scanner,scanner-research/scanner
|
e2cba02550dfbe8628daf024a2a35c0dffb234e9
|
python/cli/request.py
|
python/cli/request.py
|
import requests
import os
aport = os.environ.get('MYAPORT')
if aport is None:
aport = "80"
aport = "23456"
url1 = 'http://localhost:' + aport + '/'
url2 = 'http://localhost:' + aport + '/action/improvesimulateinvest'
url3 = 'http://localhost:' + aport + '/action/autosimulateinvest'
url4 = 'http://localhost:' + aport + '/action/improveautosimulateinvest'
#headers = {'Content-type': 'application/json', 'Accept': 'text/plain'}
#headers={'Content-type':'application/json', 'Accept':'application/json'}
headers={'Content-Type' : 'application/json;charset=utf-8'}
def request1(param, webpath):
return requests.post(url1 + webpath, json=param, headers=headers)
def request2(market, data):
return requests.post(url2 + '/market/' + str(market), json=data, headers=headers)
def request3(market, data):
return requests.post(url3 + '/market/' + str(market), json=data, headers=headers)
def request4(market, data):
return requests.post(url4 + '/market/' + str(market), json=data, headers=headers)
def request0(data):
return requests.post(url, data='', headers=headers)
#return requests.post(url, data=json.dumps(data), headers=headers)
|
import requests
import os
aport = os.environ.get('MYAPORT')
if aport is None:
aport = "80"
aport = "23456"
ahost = os.environ.get('MYAHOST')
if ahost is None:
ahost = "localhost"
url1 = 'http://' + ahost + ':' + aport + '/'
#headers = {'Content-type': 'application/json', 'Accept': 'text/plain'}
#headers={'Content-type':'application/json', 'Accept':'application/json'}
headers={'Content-Type' : 'application/json;charset=utf-8'}
def request1(param, webpath):
return requests.post(url1 + webpath, json=param, headers=headers)
def request0(data):
return requests.post(url, data='', headers=headers)
#return requests.post(url, data=json.dumps(data), headers=headers)
|
Handle different environments, for automation (I4).
|
Handle different environments, for automation (I4).
|
Python
|
agpl-3.0
|
rroart/aether,rroart/aether,rroart/aether,rroart/aether,rroart/aether
|
0adadcb3f04e2ecb98b5ca5de1afba2ba7208d23
|
spacy/tests/parser/test_beam_parse.py
|
spacy/tests/parser/test_beam_parse.py
|
import spacy
import pytest
@pytest.mark.models
def test_beam_parse():
nlp = spacy.load('en_core_web_sm')
doc = nlp(u'Australia is a country', disable=['ner'])
ents = nlp.entity(doc, beam_width=2)
print(ents)
|
# coding: utf8
from __future__ import unicode_literals
import pytest
@pytest.mark.models('en')
def test_beam_parse(EN):
doc = EN(u'Australia is a country', disable=['ner'])
ents = EN.entity(doc, beam_width=2)
print(ents)
|
Fix beam parse model test
|
Fix beam parse model test
|
Python
|
mit
|
aikramer2/spaCy,spacy-io/spaCy,recognai/spaCy,recognai/spaCy,explosion/spaCy,recognai/spaCy,explosion/spaCy,spacy-io/spaCy,recognai/spaCy,explosion/spaCy,spacy-io/spaCy,spacy-io/spaCy,aikramer2/spaCy,explosion/spaCy,spacy-io/spaCy,aikramer2/spaCy,spacy-io/spaCy,honnibal/spaCy,explosion/spaCy,honnibal/spaCy,aikramer2/spaCy,recognai/spaCy,honnibal/spaCy,aikramer2/spaCy,recognai/spaCy,honnibal/spaCy,explosion/spaCy,aikramer2/spaCy
|
1f2deb95ba543bf05dd78f1df2e9ee6d17a2c4c3
|
buffer/tests/test_profiles_manager.py
|
buffer/tests/test_profiles_manager.py
|
import json
from nose.tools import eq_, raises
from mock import MagicMock, patch
from buffer.managers.profiles import Profiles
from buffer.models.profile import PATHS
mocked_response = {
'name': 'me',
'service': 'twiter',
'id': 1
}
def test_profiles_manager_all_method():
'''
Test basic profiles retrieving
'''
mocked_api = MagicMock()
mocked_api.get.return_value = [{'a':'b'}]
with patch('buffer.managers.profiles.Profile') as mocked_profile:
mocked_profile.return_value = 1
profiles = Profiles(api=mocked_api).all()
eq_(profiles, [1])
mocked_api.get.assert_called_once_with(url=PATHS['GET_PROFILES'])
mocked_profile.assert_called_once_with(mocked_api, {'a': 'b'})
|
import json
from nose.tools import eq_, raises
from mock import MagicMock, patch
from buffer.managers.profiles import Profiles
from buffer.models.profile import Profile, PATHS
mocked_response = {
'name': 'me',
'service': 'twiter',
'id': 1
}
def test_profiles_manager_all_method():
'''
Test basic profiles retrieving
'''
mocked_api = MagicMock()
mocked_api.get.return_value = [{'a':'b'}]
with patch('buffer.managers.profiles.Profile') as mocked_profile:
mocked_profile.return_value = 1
profiles = Profiles(api=mocked_api).all()
eq_(profiles, [1])
mocked_api.get.assert_called_once_with(url=PATHS['GET_PROFILES'])
mocked_profile.assert_called_once_with(mocked_api, {'a': 'b'})
def test_profiles_manager_filter_method():
'''
Test basic profiles filtering based on some minimal criteria
'''
mocked_api = MagicMock()
profiles = Profiles(mocked_api, [{'a':'b'}, {'a': 'c'}])
eq_(profiles.filter(a='b'), [{'a': 'b'}])
def test_profiles_manager_filter_method_empty():
'''
Test basic profiles filtering when the manager is empty
'''
mocked_api = MagicMock()
mocked_api.get.return_value = [{'a':'b'}, {'a': 'c'}]
profiles = Profiles(api=mocked_api)
eq_(profiles.filter(a='b'), [Profile(mocked_api, {'a': 'b'})])
|
Test profiles manager filterting method
|
Test profiles manager filterting method
|
Python
|
mit
|
vtemian/buffpy,bufferapp/buffer-python
|
8d3931fd5effabf9c5d56cb03ae15630ae984963
|
postalcodes_mexico/cli.py
|
postalcodes_mexico/cli.py
|
# -*- coding: utf-8 -*-
"""Console script for postalcodes_mexico."""
import sys
import click
@click.command()
def main(args=None):
"""Console script for postalcodes_mexico."""
click.echo("Replace this message by putting your code into "
"postalcodes_mexico.cli.main")
click.echo("See click documentation at http://click.pocoo.org/")
return 0
if __name__ == "__main__":
sys.exit(main()) # pragma: no cover
|
# -*- coding: utf-8 -*-
"""Console script for postalcodes_mexico."""
import sys
import click
from postalcodes_mexico import postalcodes_mexico
@click.command()
@click.argument('postalcode', type=str)
def main(postalcode):
"""Console script for postalcodes_mexico."""
places = postalcodes_mexico.places(postalcode)
click.echo(places)
return 0
if __name__ == "__main__":
sys.exit(main()) # pragma: no cover
|
Create simple CLI for the `places` function
|
Create simple CLI for the `places` function
|
Python
|
mit
|
FlowFX/postalcodes_mexico
|
006b645315190eb532ede9c36c77a7fbc4c27237
|
quotations/apps/api/v1.py
|
quotations/apps/api/v1.py
|
from tastypie.authorization import DjangoAuthorization
from tastypie import fields
from tastypie.resources import ModelResource, ALL_WITH_RELATIONS
from quotations.apps.quotations import models as quotations_models
from quotations.libs.auth import MethodAuthentication
from quotations.libs.serializers import Serializer
class BaseMeta(object):
serializer = Serializer()
authentication = MethodAuthentication()
authorization = DjangoAuthorization()
class AuthorResource(ModelResource):
class Meta(BaseMeta):
queryset = quotations_models.Author.objects.all()
resource_name = 'authors'
filtering = {
'name': ['exact', 'contains']
}
class QuotationResource(ModelResource):
author = fields.ForeignKey(AuthorResource, 'author', full=True)
class Meta(BaseMeta):
queryset = quotations_models.Quotation.objects.all()
resource_name = 'quotations'
filtering = {
'text': ['contains'],
'author': ALL_WITH_RELATIONS
}
|
from tastypie.authorization import DjangoAuthorization
from tastypie import fields
from tastypie.resources import ModelResource, ALL_WITH_RELATIONS
from quotations.apps.quotations import models as quotations_models
from quotations.libs.auth import MethodAuthentication
from quotations.libs.serializers import Serializer
class BaseMeta(object):
serializer = Serializer()
authentication = MethodAuthentication()
authorization = DjangoAuthorization()
class AuthorResource(ModelResource):
class Meta(BaseMeta):
queryset = quotations_models.Author.objects.all()
resource_name = 'authors'
filtering = {
'name': ['exact', 'contains']
}
class QuotationResource(ModelResource):
author = fields.ForeignKey(AuthorResource, 'author', full=True)
class Meta(BaseMeta):
queryset = quotations_models.Quotation.objects.all()
resource_name = 'quotations'
filtering = {
'text': ['contains'],
'author': ALL_WITH_RELATIONS
}
def get_object_list(self, request):
object_list = super(QuotationResource, self).get_object_list(request)
if request.GET.get('random', False):
object_list = object_list.order_by('?')
return object_list
|
Allow retrieval of a random quote
|
Allow retrieval of a random quote
|
Python
|
mit
|
jessamynsmith/socialjusticebingo,jessamynsmith/underquoted,jessamynsmith/socialjusticebingo,jessamynsmith/underquoted,jessamynsmith/underquoted,jessamynsmith/socialjusticebingo,jessamynsmith/underquoted
|
8be6b576007f89fad50ea1dfacad46614c0a97c5
|
apps/domain/src/main/core/exceptions.py
|
apps/domain/src/main/core/exceptions.py
|
"""Specific PyGrid exceptions."""
class PyGridError(Exception):
def __init__(self, message):
super().__init__(message)
class AuthorizationError(PyGridError):
def __init__(self, message=""):
if not message:
message = "User is not authorized for this operation!"
super().__init__(message)
class RoleNotFoundError(PyGridError):
def __init__(self):
message = "Role ID not found!"
super().__init__(message)
class UserNotFoundError(PyGridError):
def __init__(self):
message = "User not found!"
super().__init__(message)
class GroupNotFoundError(PyGridError):
def __init__(self):
message = "Group ID not found!"
super().__init__(message)
class InvalidRequestKeyError(PyGridError):
def __init__(self):
message = "Invalid request key!"
super().__init__(message)
class InvalidCredentialsError(PyGridError):
def __init__(self):
message = "Invalid credentials!"
super().__init__(message)
class MissingRequestKeyError(PyGridError):
def __init__(self, message=""):
if not message:
message = "Missing request key!"
super().__init__(message)
|
"""Specific PyGrid exceptions."""
class PyGridError(Exception):
def __init__(self, message):
super().__init__(message)
class AuthorizationError(PyGridError):
def __init__(self, message=""):
if not message:
message = "User is not authorized for this operation!"
super().__init__(message)
class RoleNotFoundError(PyGridError):
def __init__(self):
message = "Role ID not found!"
super().__init__(message)
class UserNotFoundError(PyGridError):
def __init__(self):
message = "User not found!"
super().__init__(message)
class EnvironmentNotFoundError(PyGridError):
def __init__(self):
message = "Environment not found!"
super().__init__(message)
class GroupNotFoundError(PyGridError):
def __init__(self):
message = "Group ID not found!"
super().__init__(message)
class InvalidRequestKeyError(PyGridError):
def __init__(self):
message = "Invalid request key!"
super().__init__(message)
class InvalidCredentialsError(PyGridError):
def __init__(self):
message = "Invalid credentials!"
super().__init__(message)
class MissingRequestKeyError(PyGridError):
def __init__(self, message=""):
if not message:
message = "Missing request key!"
super().__init__(message)
|
ADD new exception -> EnvironmentNotFound!
|
ADD new exception -> EnvironmentNotFound!
|
Python
|
apache-2.0
|
OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft
|
e0b82cf9ed24870cb313328e5539acc5fe7f6508
|
stock_awesome/levels/chock_a_block.py
|
stock_awesome/levels/chock_a_block.py
|
import time
from stock_awesome.obj import market
def main():
"""
Algorithm: Wait for an ask, then send a fill or kill for the quantity of the ask at the ask
price.
"""
m = market.StockAPI('WEB29978261', 'NOWUEX', 'BBCM')
#collection of orders placed
orders = {}
filled = 0
upper_limit = 3300
#try to buy 100000
to_send = 1000
while to_send > 0:
quote = m.quote()
ask = quote.get('ask')
if ask and ask < upper_limit:
r = m.buy(quote['askSize'], quote['ask'], order_type='fill-or-kill')
to_send -= 1
orders[r['id']] = r
orders = update_orders(m, orders)
filled += update_filled(orders)
else:
time.sleep(1)
def update_orders(m, orders):
"""
update order status
"""
return {o: m.order_status(o) for o in orders}
def update_filled(orders):
"""
Remove filled orders and update our count.
"""
closed = [o for o in orders if not orders[o]['open']]
#remove and sum filled orders
filled = sum(orders.pop(o)['totalFilled'] for o in closed)
return filled
if __name__ == '__main__':
main()
|
import time
from stock_awesome.obj import market
def main():
"""
Algorithm: Wait for an ask, then send a fill or kill for the quantity of the ask at the ask
price.
"""
m = market.StockAPI('RAJ40214463', 'SSMCEX', 'IPSO')
#collection of orders placed
orders = {}
filled = 0
upper_limit = 2450
#try to buy 100000
to_buy = 100000
while to_buy > 0:
quote = m.quote()
ask = quote.get('ask', 0)
bid = quote.get('bid')
if ask < upper_limit:
r = m.buy(quote['askSize'], ask, order_type='fill-or-kill')
to_buy -= r['totalFilled']
print("Bought {}, {} remaining".format(r['totalFilled'], to_buy))
else:
time.sleep(1)
print('done')
def update_orders(m, orders):
"""
update order status
"""
return {o: m.order_status(o) for o in orders}
def update_filled(orders):
"""
Remove filled orders and update our count.
"""
closed = [o for o in orders if not orders[o]['open']]
#remove and sum filled orders
filled = sum(orders.pop(o)['totalFilled'] for o in closed)
return filled
if __name__ == '__main__':
main()
|
Add some (inefective) score maximizing attempts
|
Add some (inefective) score maximizing attempts
|
Python
|
mit
|
ForeverWintr/stock_awesome
|
89193a6571dd74501533160b409cad8835c51625
|
gcframe/tests/urls.py
|
gcframe/tests/urls.py
|
# -*- coding: utf-8 -*-
""" Simple urls for use in testing the gcframe app. """
from __future__ import unicode_literals
# The defaults module is deprecated in Django 1.5, but necessary to
# support Django 1.3. drop ``.defaults`` when dropping 1.3 support.
from django.conf.urls.defaults import patterns, url
from .views import normal, framed, exempt
urlpatterns = patterns('',
url(r'normal/$', normal, name='gcframe-test-normal'),
url(r'framed/$', framed, name='gcframe-test-framed'),
url(r'exempt/$', exempt, name='gcframe-test-exempt'),
)
|
# -*- coding: utf-8 -*-
""" Simple urls for use in testing the gcframe app. """
from __future__ import unicode_literals
try:
from django.conf.urls import patterns, url
except ImportError: # Django 1.3
from django.conf.urls.defaults import patterns, url
from .views import normal, framed, exempt
urlpatterns = patterns('',
url(r'normal/$', normal, name='gcframe-test-normal'),
url(r'framed/$', framed, name='gcframe-test-framed'),
url(r'exempt/$', exempt, name='gcframe-test-exempt'),
)
|
Handle a Django deprecation properly.
|
Handle a Django deprecation properly.
Should have done this in commit cb4eae7b7.
|
Python
|
bsd-3-clause
|
benspaulding/django-gcframe
|
6bfb23294c2cc445479f4c8098b8e62647cf01bd
|
test/test_notification_integration.py
|
test/test_notification_integration.py
|
import os
import select
import groundstation.fs_watcher as fs_watcher
from groundstation.peer_socket import PeerSocket
from integration_fixture import StationIntegrationFixture, \
TestListener, \
TestClient
class StationFSWatcherIntegration(StationIntegrationFixture):
def test_notifies_peer(self):
read_sockets = []
write_sockets = []
def tick():
return select.select(read_sockets, write_sockets, [], 1)
addr = os.path.join(self.dir, "listener")
listener = TestListener(addr)
client = TestClient(addr)
peer = listener.accept(PeerSocket)
watcher = fs_watcher.FSWatcher(self.stations[0].store.object_root)
read_sockets.append(client)
read_sockets.append(watcher)
self.stations[0].write("trolololol")
(sread, _, _) = tick()
self.assertIn(watcher, sread)
obj_name = watcher.read()
client.notify_new_object(self.stations[0], obj_name)
client.send()
peer.recv()
data = peer.packet_queue.pop()
gizmo = self.stations[1].gizmo_factory.hydrate(data, peer)
assert gizmo is not None, "gizmo_factory returned None"
gizmo.process()
watcher.kill()
|
import os
import select
import groundstation.fs_watcher as fs_watcher
from groundstation.peer_socket import PeerSocket
from groundstation.utils import path2id
from integration_fixture import StationIntegrationFixture, \
TestListener, \
TestClient
class StationFSWatcherIntegration(StationIntegrationFixture):
def test_notifies_peer(self):
read_sockets = []
write_sockets = []
def tick():
return select.select(read_sockets, write_sockets, [], 1)
addr = os.path.join(self.dir, "listener")
listener = TestListener(addr)
client = TestClient(addr)
peer = listener.accept(PeerSocket)
watcher = fs_watcher.FSWatcher(self.stations[0].store.object_root)
read_sockets.append(client)
read_sockets.append(watcher)
self.stations[0].write("trolololol")
(sread, _, _) = tick()
self.assertIn(watcher, sread)
obj_name = path2id(watcher.read())
client.notify_new_object(self.stations[0], obj_name)
client.send()
peer.recv()
data = peer.packet_queue.pop()
gizmo = self.stations[1].gizmo_factory.hydrate(data, peer)
assert gizmo is not None, "gizmo_factory returned None"
gizmo.process()
peer.send()
client.recv()
data = client.packet_queue.pop()
gizmo = self.stations[0].gizmo_factory.hydrate(data, peer)
assert gizmo is not None, "gizmo_factory returned None"
self.assertEqual(gizmo.verb, "FETCHOBJECT")
self.assertEqual(gizmo.payload, obj_name)
gizmo.process()
watcher.kill()
|
Validate that we can translate a NEWOBJECT into a FETCHOBJECT
|
Validate that we can translate a NEWOBJECT into a FETCHOBJECT
|
Python
|
mit
|
richo/groundstation,richo/groundstation,richo/groundstation,richo/groundstation,richo/groundstation
|
6fe5a416ed229e7ec8efab9d6b3dac43f16515b6
|
corehq/apps/domain/__init__.py
|
corehq/apps/domain/__init__.py
|
from corehq.preindex import ExtraPreindexPlugin
from django.conf import settings
ExtraPreindexPlugin.register('domain', __file__, (
settings.NEW_USERS_GROUPS_DB, settings.NEW_FIXTURES_DB, 'meta'))
|
from corehq.preindex import ExtraPreindexPlugin
from django.conf import settings
ExtraPreindexPlugin.register('domain', __file__, (
settings.NEW_DOMAINS_DB,
settings.NEW_USERS_GROUPS_DB,
settings.NEW_FIXTURES_DB,
'meta',
))
|
Add the new domains db
|
Add the new domains db
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq
|
e12432b0c97d1ddebf16df821fe6c77bb8b6a66b
|
wagtail/wagtailsites/wagtail_hooks.py
|
wagtail/wagtailsites/wagtail_hooks.py
|
from django.conf.urls import include, url
from django.core import urlresolvers
from django.utils.translation import ugettext_lazy as _
from wagtail.wagtailcore import hooks
from wagtail.wagtailadmin.menu import MenuItem
from wagtail.wagtailsites import urls
def register_admin_urls():
return [
url(r'^sites/', include(urls)),
]
hooks.register('register_admin_urls', register_admin_urls)
def construct_main_menu(request, menu_items):
if request.user.is_superuser:
menu_items.append(
MenuItem(_('Sites'), urlresolvers.reverse('wagtailsites_index'), classnames='icon icon-site', order=602)
)
hooks.register('construct_main_menu', construct_main_menu)
|
from django.conf.urls import include, url
from django.core import urlresolvers
from django.utils.translation import ugettext_lazy as _
from wagtail.wagtailcore import hooks
from wagtail.wagtailadmin.menu import MenuItem
from wagtail.wagtailsites import urls
@hooks.register('register_admin_urls')
def register_admin_urls():
return [
url(r'^sites/', include(urls)),
]
class SitesMenuItem(MenuItem):
def is_shown(self, request):
return request.user.is_superuser
@hooks.register('register_settings_menu_item')
def register_sites_menu_item():
return MenuItem(_('Sites'), urlresolvers.reverse('wagtailsites_index'), classnames='icon icon-site', order=602)
|
Move Sites to the settings menu (and use decorator syntax for hooks)
|
Move Sites to the settings menu (and use decorator syntax for hooks)
|
Python
|
bsd-3-clause
|
mixxorz/wagtail,wagtail/wagtail,KimGlazebrook/wagtail-experiment,gasman/wagtail,mayapurmedia/wagtail,kaedroho/wagtail,jnns/wagtail,serzans/wagtail,hanpama/wagtail,iho/wagtail,marctc/wagtail,kurtw/wagtail,nilnvoid/wagtail,nrsimha/wagtail,gasman/wagtail,jorge-marques/wagtail,Toshakins/wagtail,rsalmaso/wagtail,takeflight/wagtail,Tivix/wagtail,chimeno/wagtail,nilnvoid/wagtail,hanpama/wagtail,torchbox/wagtail,takeflight/wagtail,marctc/wagtail,hanpama/wagtail,chimeno/wagtail,iho/wagtail,taedori81/wagtail,benjaoming/wagtail,takeshineshiro/wagtail,bjesus/wagtail,hamsterbacke23/wagtail,wagtail/wagtail,Tivix/wagtail,torchbox/wagtail,Klaudit/wagtail,rv816/wagtail,m-sanders/wagtail,nealtodd/wagtail,kaedroho/wagtail,FlipperPA/wagtail,chrxr/wagtail,mayapurmedia/wagtail,nrsimha/wagtail,WQuanfeng/wagtail,zerolab/wagtail,KimGlazebrook/wagtail-experiment,benjaoming/wagtail,janusnic/wagtail,taedori81/wagtail,tangentlabs/wagtail,iho/wagtail,kurtrwall/wagtail,FlipperPA/wagtail,mephizzle/wagtail,thenewguy/wagtail,Pennebaker/wagtail,mikedingjan/wagtail,gasman/wagtail,nimasmi/wagtail,hamsterbacke23/wagtail,timorieber/wagtail,bjesus/wagtail,m-sanders/wagtail,stevenewey/wagtail,kurtrwall/wagtail,marctc/wagtail,darith27/wagtail,inonit/wagtail,mixxorz/wagtail,chrxr/wagtail,jorge-marques/wagtail,hamsterbacke23/wagtail,JoshBarr/wagtail,nutztherookie/wagtail,davecranwell/wagtail,mephizzle/wagtail,KimGlazebrook/wagtail-experiment,Tivix/wagtail,jordij/wagtail,janusnic/wagtail,mayapurmedia/wagtail,rjsproxy/wagtail,hamsterbacke23/wagtail,mixxorz/wagtail,davecranwell/wagtail,jorge-marques/wagtail,gasman/wagtail,davecranwell/wagtail,zerolab/wagtail,wagtail/wagtail,mixxorz/wagtail,dresiu/wagtail,chimeno/wagtail,kurtrwall/wagtail,mikedingjan/wagtail,timorieber/wagtail,nealtodd/wagtail,mjec/wagtail,dresiu/wagtail,serzans/wagtail,quru/wagtail,jnns/wagtail,nimasmi/wagtail,tangentlabs/wagtail,kaedroho/wagtail,bjesus/wagtail,takeshineshiro/wagtail,takeshineshiro/wagtail,nutztherookie/wagtail,dresiu/wagtail,rjsproxy/wagtail,torchbox/wagtail,kaedroho/wagtail,nutztherookie/wagtail,rv816/wagtail,kurtw/wagtail,kaedroho/wagtail,WQuanfeng/wagtail,timorieber/wagtail,quru/wagtail,Pennebaker/wagtail,kurtw/wagtail,jordij/wagtail,dresiu/wagtail,zerolab/wagtail,serzans/wagtail,Pennebaker/wagtail,thenewguy/wagtail,iho/wagtail,iansprice/wagtail,Toshakins/wagtail,gasman/wagtail,stevenewey/wagtail,m-sanders/wagtail,JoshBarr/wagtail,zerolab/wagtail,rsalmaso/wagtail,nilnvoid/wagtail,inonit/wagtail,thenewguy/wagtail,rjsproxy/wagtail,wagtail/wagtail,taedori81/wagtail,jorge-marques/wagtail,jnns/wagtail,chrxr/wagtail,Klaudit/wagtail,iansprice/wagtail,nealtodd/wagtail,takeflight/wagtail,Klaudit/wagtail,takeflight/wagtail,tangentlabs/wagtail,nutztherookie/wagtail,Klaudit/wagtail,FlipperPA/wagtail,Toshakins/wagtail,chimeno/wagtail,timorieber/wagtail,quru/wagtail,gogobook/wagtail,mixxorz/wagtail,rv816/wagtail,stevenewey/wagtail,nilnvoid/wagtail,jnns/wagtail,mikedingjan/wagtail,rjsproxy/wagtail,nimasmi/wagtail,darith27/wagtail,mayapurmedia/wagtail,takeshineshiro/wagtail,nealtodd/wagtail,wagtail/wagtail,bjesus/wagtail,mjec/wagtail,janusnic/wagtail,jordij/wagtail,thenewguy/wagtail,marctc/wagtail,rsalmaso/wagtail,zerolab/wagtail,janusnic/wagtail,kurtw/wagtail,quru/wagtail,inonit/wagtail,chrxr/wagtail,gogobook/wagtail,rv816/wagtail,FlipperPA/wagtail,darith27/wagtail,benjaoming/wagtail,taedori81/wagtail,davecranwell/wagtail,Pennebaker/wagtail,tangentlabs/wagtail,hanpama/wagtail,rsalmaso/wagtail,inonit/wagtail,nimasmi/wagtail,WQuanfeng/wagtail,stevenewey/wagtail,thenewguy/wagtail,taedori81/wagtail,m-sanders/wagtail,Toshakins/wagtail,gogobook/wagtail,iansprice/wagtail,JoshBarr/wagtail,chimeno/wagtail,jordij/wagtail,nrsimha/wagtail,Tivix/wagtail,jorge-marques/wagtail,WQuanfeng/wagtail,darith27/wagtail,mephizzle/wagtail,mephizzle/wagtail,JoshBarr/wagtail,nrsimha/wagtail,gogobook/wagtail,iansprice/wagtail,benjaoming/wagtail,mjec/wagtail,mjec/wagtail,mikedingjan/wagtail,torchbox/wagtail,rsalmaso/wagtail,kurtrwall/wagtail,dresiu/wagtail,serzans/wagtail,KimGlazebrook/wagtail-experiment
|
6689858b2364a668b362a5f00d4c86e57141dc37
|
numba/cuda/models.py
|
numba/cuda/models.py
|
from llvmlite import ir
from numba.core.datamodel.registry import register_default
from numba.core.extending import register_model, models
from numba.core import types
from numba.cuda.types import Dim3, GridGroup, CUDADispatcher
@register_model(Dim3)
class Dim3Model(models.StructModel):
def __init__(self, dmm, fe_type):
members = [
('x', types.int32),
('y', types.int32),
('z', types.int32)
]
super().__init__(dmm, fe_type, members)
@register_model(GridGroup)
class GridGroupModel(models.PrimitiveModel):
def __init__(self, dmm, fe_type):
be_type = ir.IntType(64)
super().__init__(dmm, fe_type, be_type)
@register_default(types.Float)
class FloatModel(models.PrimitiveModel):
def __init__(self, dmm, fe_type):
if fe_type == types.float32:
be_type = ir.FloatType()
elif fe_type == types.float16:
be_type = ir.IntType(16)
elif fe_type == types.float64:
be_type = ir.DoubleType()
else:
raise NotImplementedError(fe_type)
super(FloatModel, self).__init__(dmm, fe_type, be_type)
register_model(CUDADispatcher)(models.OpaqueModel)
|
from llvmlite import ir
from numba.core.datamodel.registry import register_default
from numba.core.extending import register_model, models
from numba.core import types
from numba.cuda.types import Dim3, GridGroup, CUDADispatcher
@register_model(Dim3)
class Dim3Model(models.StructModel):
def __init__(self, dmm, fe_type):
members = [
('x', types.int32),
('y', types.int32),
('z', types.int32)
]
super().__init__(dmm, fe_type, members)
@register_model(GridGroup)
class GridGroupModel(models.PrimitiveModel):
def __init__(self, dmm, fe_type):
be_type = ir.IntType(64)
super().__init__(dmm, fe_type, be_type)
@register_default(types.Float)
class FloatModel(models.PrimitiveModel):
def __init__(self, dmm, fe_type):
if fe_type == types.float16:
be_type = ir.IntType(16)
elif fe_type == types.float32:
be_type = ir.FloatType()
elif fe_type == types.float64:
be_type = ir.DoubleType()
else:
raise NotImplementedError(fe_type)
super(FloatModel, self).__init__(dmm, fe_type, be_type)
register_model(CUDADispatcher)(models.OpaqueModel)
|
Reorder FloatModel checks in ascending order
|
CUDA: Reorder FloatModel checks in ascending order
|
Python
|
bsd-2-clause
|
cpcloud/numba,numba/numba,numba/numba,seibert/numba,cpcloud/numba,cpcloud/numba,seibert/numba,seibert/numba,cpcloud/numba,numba/numba,IntelLabs/numba,numba/numba,IntelLabs/numba,cpcloud/numba,seibert/numba,IntelLabs/numba,IntelLabs/numba,seibert/numba,IntelLabs/numba,numba/numba
|
4a650922ee97b9cb54b203cab9709d511487d9ff
|
silver/tests/factories.py
|
silver/tests/factories.py
|
"""Factories for the silver app."""
# import factory
# from .. import models
|
import factory
from silver.models import Provider
class ProviderFactory(factory.django.DjangoModelFactory):
class Meta:
model = Provider
|
Add factory for the Provider model
|
Add factory for the Provider model
|
Python
|
apache-2.0
|
PressLabs/silver,PressLabs/silver,PressLabs/silver
|
9a51358871f04e2a5552621b6ac2c9dbe1ee8345
|
main.py
|
main.py
|
#!/usr/bin/env python
from pysnap import Snapchat
import secrets
s = Snapchat()
s.login(secrets.USERNAME, secrets.PASSWORD)
friends_to_add = [friend['name'] for friend in s.get_updates()['added_friends'] if friend['type'] == 1]
for friend in friends_to_add:
s.add_friend(friend)
snaps = [snap['id'] for snap in s.get_snaps() if snap['status'] == 1 and snap['media_type'] == 0]
for snap in snaps:
with open('tmp.jpg', 'wb') as f:
f.write(s.get_blob(snap))
media_id = s.upload('tmp.jpg')
s.post_story(media_id, 5)
s.mark_viewed(snap)
|
!/usr/bin/env python
from pysnap import Snapchat
import secrets
s = Snapchat()
s.login(secrets.USERNAME, secrets.PASSWORD)
friends_to_add = [friend['name'] for friend in s.get_updates()['added_friends'] if friend['type'] == 1]
for friend in friends_to_add:
s.add_friend(friend)
snaps = [snap['id'] for snap in s.get_snaps() if snap['status'] == 1 and snap['media_type'] == 0]
for snap in snaps:
with open('~/SnapchatBot/tmp.jpg', 'wb') as f:
f.write(s.get_blob(snap))
media_id = s.upload('~/SnapchatBot/tmp.jpg')
s.post_story(media_id, 5)
s.mark_viewed(snap)
|
Save temporary pictures to local directory
|
Save temporary pictures to local directory
|
Python
|
mit
|
jollex/SnapchatBot
|
d0a907872749f1bb54d6e8e160ea170059289623
|
source/custom/combo.py
|
source/custom/combo.py
|
# -*- coding: utf-8 -*-
## \package custom.combo
# MIT licensing
# See: LICENSE.txt
import wx
from wx.combo import OwnerDrawnComboBox
class ComboBox(OwnerDrawnComboBox):
def __init__(self, parent, win_id, value=wx.EmptyString, pos=wx.DefaultPosition,
size=wx.DefaultSize, choices=[], style=0, validator=wx.DefaultValidator,
name=wx.ComboBoxNameStr):
OwnerDrawnComboBox.__init__(self, parent, win_id, value, pos, size, choices,
style, validator, name)
self.Default = self.GetLabel()
self.Priority = []
## Resets ComboBox to defaults
def Reset(self):
if not self.Count:
self.SetValue(self.Default)
return self.Value == self.Default
return False
|
# -*- coding: utf-8 -*-
## \package custom.combo
# MIT licensing
# See: LICENSE.txt
import wx
from wx.combo import OwnerDrawnComboBox
class ComboBox(OwnerDrawnComboBox):
def __init__(self, parent, win_id=wx.ID_ANY, value=wx.EmptyString, pos=wx.DefaultPosition,
size=wx.DefaultSize, choices=[], style=0, validator=wx.DefaultValidator,
name=wx.ComboBoxNameStr):
OwnerDrawnComboBox.__init__(self, parent, win_id, value, pos, size, choices,
style, validator, name)
self.Default = self.GetLabel()
self.Priority = []
## Resets ComboBox to defaults
def Reset(self):
if not self.Count:
self.SetValue(self.Default)
return self.Value == self.Default
return False
|
Set ComboBox class default ID to wx.ID_ANY
|
Set ComboBox class default ID to wx.ID_ANY
|
Python
|
mit
|
AntumDeluge/desktop_recorder,AntumDeluge/desktop_recorder
|
2560ca287e81cbefb6037e5688bfa4ef74d85149
|
clock.py
|
clock.py
|
from __future__ import print_function
from apscheduler.schedulers.blocking import BlockingScheduler
import logging
import subprocess
logging.basicConfig()
scheduler = BlockingScheduler()
@scheduler.scheduled_job('interval', minutes=1)
def timed_job_min1():
print("Run notifier")
subprocess.run(
"notifier -concurrency=5 -fetcher-cache=true -notification-interval=1 && curl -sS https://nosnch.in/c411a3a685",
shell=True,
check=True)
# @scheduler.scheduled_job('interval', minutes=10)
# def timed_job_min10():
# print("Run notifier")
# subprocess.run(
# "notifier -concurrency=5 -fetcher-cache=true -notification-interval=10 && curl -sS https://nosnch.in/c411a3a685",
# shell=True,
# check=True)
scheduler.start()
|
from __future__ import print_function
from apscheduler.schedulers.blocking import BlockingScheduler
import logging
import subprocess
logging.basicConfig()
scheduler = BlockingScheduler()
@scheduler.scheduled_job('interval', minutes=1)
def timed_job_min1():
print("Run notifier")
subprocess.check_call(
"notifier -concurrency=5 -fetcher-cache=true -notification-interval=1 && curl -sS https://nosnch.in/c411a3a685",
shell=True)
# @scheduler.scheduled_job('interval', minutes=10)
# def timed_job_min10():
# print("Run notifier")
# subprocess.run(
# "notifier -concurrency=5 -fetcher-cache=true -notification-interval=10 && curl -sS https://nosnch.in/c411a3a685",
# shell=True,
# check=True)
scheduler.start()
|
Change call method for Python2.7
|
Change call method for Python2.7
|
Python
|
mit
|
oinume/lekcije,oinume/dmm-eikaiwa-fft,oinume/lekcije,oinume/dmm-eikaiwa-fft,oinume/lekcije,oinume/dmm-eikaiwa-fft,oinume/lekcije,oinume/lekcije,oinume/lekcije,oinume/dmm-eikaiwa-fft
|
6bb9a4ed50ad879c56cdeae0dedb49bba6780780
|
matchers/volunteer.py
|
matchers/volunteer.py
|
import random
from base import BaseMatcher
class VolunteerMatcher(BaseMatcher):
dev_text = "volunteer someone"
all_text = "volunteer a dev"
dev_candidates = ['Steve', 'Arthur', 'Honza', 'Fernando', 'Nick']
all_candidates = dev_candidates + ['Craig', 'Evan']
def respond(self, message, user=None):
if self.dev_text in message.lower():
victim = random.choice(self.dev_candidates)
self.speak('%s is it' % victim)
elif self.all_text in message.lower():
victim = random.choice(self.all_candidates)
self.speak('%s is it' % victim)
|
import random
from base import BaseMatcher
class VolunteerMatcher(BaseMatcher):
dev_text = "volunteer someone"
all_text = "volunteer a dev"
dev_candidates = ['sjl', 'arthurdebert', 'honza', 'fernandotakai', 'nicksergeant']
all_candidates = dev_candidates + ['cz', 'ehazlett']
def respond(self, message, user=None):
if self.dev_text in message.lower():
victim = random.choice(self.dev_candidates)
self.speak('%s is it' % victim)
elif self.all_text in message.lower():
victim = random.choice(self.all_candidates)
self.speak('%s is it' % victim)
|
Use IRC Nicks instead of real names.
|
Use IRC Nicks instead of real names.
|
Python
|
bsd-2-clause
|
honza/nigel
|
b24083b0991157a1e0d8a533fc1cac3aa2e4523c
|
similarities/utils.py
|
similarities/utils.py
|
import echonest
from artists.models import Artist
from echonest.models import SimilarResponse
from users.models import User
from .models import (GeneralArtist, UserSimilarity, Similarity,
update_similarities)
def add_new_similarities(artist, force_update=False):
similarities = []
responses = SimilarResponse.objects.filter(
normalized_name=artist.normalized_name)
if responses.exists() and not force_update:
return # Echo Nest similarities already added
user = User.objects.get(email='echonest')
artist_names = echonest.get_similar(artist.name)
cc_artists = Artist.objects.filter(name__in=artist_names)
for cc_artist in cc_artists:
kwargs = dict(
cc_artist=cc_artist,
other_artist=artist,
)
UserSimilarity.objects.get_or_create(defaults={'weight': 1},
user=user, **kwargs)
similarities.append(Similarity.objects.get_or_create(**kwargs)[0])
update_similarities(similarities)
def get_similar(name):
artist, _ = GeneralArtist.objects.get_or_create(
normalized_name=name.upper(), defaults={'name': name})
add_new_similarities(artist)
return Artist.objects.filter(similarity__other_artist=artist,
similarity__weight__gt=0)
|
from django.db.models import Q
import echonest
from artists.models import Artist
from echonest.models import SimilarResponse
from users.models import User
from .models import (GeneralArtist, UserSimilarity, Similarity,
update_similarities)
def add_new_similarities(artist, force_update=False):
similarities = []
responses = SimilarResponse.objects.filter(
normalized_name=artist.normalized_name)
if responses.exists() and not force_update:
return # Echo Nest similarities already added
user = User.objects.get(email='echonest')
artist_names = echonest.get_similar(artist.name)
cc_artists = Artist.objects.filter(name__in=artist_names)
for cc_artist in cc_artists:
kwargs = dict(
cc_artist=cc_artist,
other_artist=artist,
)
UserSimilarity.objects.get_or_create(defaults={'weight': 1},
user=user, **kwargs)
similarities.append(Similarity.objects.get_or_create(**kwargs)[0])
update_similarities(similarities)
def get_similar(name):
artist, _ = GeneralArtist.objects.get_or_create(
normalized_name=name.upper(), defaults={'name': name})
add_new_similarities(artist)
similar = Q(similarity__other_artist=artist, similarity__weight__gt=0)
return Artist.objects.filter(similar).order_by('-similarity__weight')
|
Order similar artist results properly
|
Order similar artist results properly
|
Python
|
bsd-3-clause
|
FreeMusicNinja/api.freemusic.ninja
|
7016b7bb026e0fe557ca06efa81dace9999e526d
|
hubbot/Modules/Healthcheck.py
|
hubbot/Modules/Healthcheck.py
|
from twisted.internet import reactor, protocol
from hubbot.moduleinterface import ModuleInterface
class Echo(protocol.Protocol):
"""This is just about the simplest possible protocol"""
def dataReceived(self, data):
"""As soon as any data is received, write it back."""
self.transport.write(data)
class Healthcheck(ModuleInterface):
port = 9999
def __init__(self, bot):
self.healthcheck_server = protocol.ServerFactory()
self.healthcheck_server.protocol = Echo
super().__init__(bot)
def on_load(self):
reactor.listenTCP(self.port, self.healthcheck_server)
def on_unload(self):
reactor.stopListening(self.port)
def help(self, message):
return f"Hosts an HTTP healthcheck server on port {self.port}."
|
from twisted.protocols import basic
from twisted.internet import protocol, reactor
from hubbot.moduleinterface import ModuleInterface
class HealthcheckProtocol(basic.LineReceiver):
def lineReceived(self, line):
response_body = "All is well. Ish."
self.sendLine("HTTP/1.0 200 OK".encode("UTF-8"))
self.sendLine("Content-Type: text/plain".encode("UTF-8"))
self.sendLine(f"Content-Length: {len(response_body)}\n".encode("UTF-8"))
self.transport.write(response_body)
self.transport.loseConnection()
class Healthcheck(ModuleInterface):
port = 9999
def __init__(self, bot):
self.healthcheck_server = protocol.ServerFactory()
self.healthcheck_server.protocol = HealthcheckProtocol
super().__init__(bot)
def on_load(self):
reactor.listenTCP(self.port, self.healthcheck_server)
def on_unload(self):
reactor.stopListening(self.port)
def help(self, message):
return f"Hosts an HTTP healthcheck server on port {self.port}."
|
Write a slightly less dumb protocol?
|
Write a slightly less dumb protocol?
|
Python
|
mit
|
HubbeKing/Hubbot_Twisted
|
1704e66caa06524d9b595c312d3a5f5d93683261
|
app/models/cnes_bed.py
|
app/models/cnes_bed.py
|
from sqlalchemy import Column, Integer, String, func
from app import db
class CnesBed(db.Model):
__tablename__ = 'cnes_bed'
year = Column(Integer, primary_key=True)
region = Column(String(1), primary_key=True)
mesoregion = Column(String(4), primary_key=True)
microregion = Column(String(5), primary_key=True)
state = Column(String(2), primary_key=True)
municipality = Column(String(7), primary_key=True)
cnes = Column(String(7), primary_key=True)
@classmethod
def dimensions(cls):
return [
'year',
'region',
'mesoregion',
'microregion',
'state',
'municipality',
]
@classmethod
def aggregate(cls, value):
return {
'beds': func.count(cls.cnes)
}[value]
@classmethod
def values(cls):
return ['beds']
|
from sqlalchemy import Column, Integer, String, func
from app import db
class CnesBed(db.Model):
__tablename__ = 'cnes_bed'
year = Column(Integer, primary_key=True)
region = Column(String(1), primary_key=True)
mesoregion = Column(String(4), primary_key=True)
microregion = Column(String(5), primary_key=True)
state = Column(String(2), primary_key=True)
municipality = Column(String(7), primary_key=True)
cnes = Column(String(7), primary_key=True)
bed_type = Column(String(7), primary_key=True)
@classmethod
def dimensions(cls):
return [
'year',
'region',
'mesoregion',
'microregion',
'state',
'municipality',
]
@classmethod
def aggregate(cls, value):
return {
'beds': func.count()
}[value]
@classmethod
def values(cls):
return ['beds']
|
Add bed_type to cnes_establishment model
|
Add bed_type to cnes_establishment model
|
Python
|
mit
|
DataViva/dataviva-api,daniel1409/dataviva-api
|
0eaff91695eefcf289e31d8ca93d19ab5bbd392d
|
katana/expr.py
|
katana/expr.py
|
import re
class Expr(object):
def __init__(self, name, regex):
self.name = name
self.regex = regex
def on_match(self, string):
return [self.name, string]
def callback(self, _, string):
return self.on_match(string)
class Scanner(object):
def __init__(self, exprs):
self.scanner = re.Scanner([
(e.regex, e.callback) for e in exprs
])
def match(self, string):
tokens, extra = self.scanner.scan(string)
if extra:
raise ValueError
return tokens
|
import re
class Expr(object):
def __init__(self, name, regex):
self.name = name
self.regex = regex
def __iter__(self):
yield self.regex
yield lambda _, token: self.on_match(token)
def on_match(self, string):
return [self.name, string]
class Scanner(object):
def __init__(self, exprs):
self.scanner = re.Scanner([
tuple(e) for e in exprs
])
def match(self, string):
tokens, extra = self.scanner.scan(string)
if extra:
raise ValueError
return tokens
|
Refactor Expr object to be more self contained
|
Refactor Expr object to be more self contained
|
Python
|
mit
|
eugene-eeo/katana
|
ce2e5b0dc3ddafe931a902cb7aa24c3adbc246b7
|
fireplace/cards/wog/neutral_legendary.py
|
fireplace/cards/wog/neutral_legendary.py
|
from ..utils import *
##
# Minions
|
from ..utils import *
##
# Minions
class OG_122:
"Mukla, Tyrant of the Vale"
play = Give(CONTROLLER, "EX1_014t") * 2
class OG_318:
"Hogger, Doom of Elwynn"
events = SELF_DAMAGE.on(Summon(CONTROLLER, "OG_318t"))
class OG_338:
"Nat, the Darkfisher"
events = BeginTurn(OPPONENT).on(COINFLIP & Draw(OPPONENT))
|
Implement corrupted Mukla, Hogger and Nat
|
Implement corrupted Mukla, Hogger and Nat
|
Python
|
agpl-3.0
|
beheh/fireplace,NightKev/fireplace,jleclanche/fireplace
|
5ed9e43ec451aca9bdca4391bd35934e5fe4aea3
|
huts/management/commands/dumphutsjson.py
|
huts/management/commands/dumphutsjson.py
|
from django.core.management.base import BaseCommand
from huts.utils import export
class Command(BaseCommand):
args = ''
help = 'Dumps the huts, agencies, and regions in the json api format.'
def handle(self, *args, **options):
print(export.db_as_json().encode('utf-8'))
|
from optparse import make_option
from django.core.management.base import BaseCommand
from huts.utils import export
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option(
'--file',
help='Write to file instead of stdout'
),
)
help = 'Dumps the huts, agencies, and regions in the json api format.'
def handle(self, *args, **options):
out = options['file'] or self.stdout
out.write(export.db_as_json().encode('utf-8'))
|
Update command to take file argument
|
Update command to take file argument
|
Python
|
mit
|
dylanfprice/hutmap,dylanfprice/hutmap,dylanfprice/hutmap,muescha/hutmap,muescha/hutmap,dylanfprice/hutmap,muescha/hutmap,muescha/hutmap
|
4f9db35566332778853e993f7791116d66c49dd4
|
grako/rendering.py
|
grako/rendering.py
|
# -*- coding: utf-8 -*-
from __future__ import print_function, division, absolute_import, unicode_literals
import itertools
from .util import trim
def render(item, **fields):
""" Render the given item
"""
if item is None:
return ''
elif isinstance(item, Renderer):
return item.render(**fields)
elif isinstance(item, list):
return ''.join(render(e) for e in item)
else:
return str(item)
class Renderer(object):
template = ''
_counter = itertools.count()
def __init__(self, template=None):
if template is not None:
self.template = template
def counter(self):
return next(self._counter)
def render_fields(self, fields):
pass
def render(self, template=None, **fields):
if template is None:
template = self.template
fields.update({k:v for k, v in vars(self).items() if not k.startswith('_')})
self.render_fields(fields)
fields = {k:render(v) for k, v in fields.items()}
try:
return trim(template).format(**fields)
except KeyError as e:
raise KeyError(str(e), type(self))
|
# -*- coding: utf-8 -*-
from __future__ import print_function, division, absolute_import, unicode_literals
import itertools
from .util import trim
def render(item, **fields):
""" Render the given item
"""
if item is None:
return ''
elif isinstance(item, Renderer):
return item.render(**fields)
elif isinstance(item, list):
return ''.join(render(e) for e in item)
else:
return str(item)
class Renderer(object):
template = ''
_counter = itertools.count()
def __init__(self, template=None):
if template is not None:
self.template = template
def counter(self):
return next(self._counter)
def render_fields(self, fields):
pass
def render(self, template=None, **fields):
fields.update({k:v for k, v in vars(self).items() if not k.startswith('_')})
self.render_fields(fields)
if template is None:
template = self.template
fields = {k:render(v) for k, v in fields.items()}
try:
return trim(template).format(**fields)
except KeyError as e:
raise KeyError(str(e), type(self))
|
Allow render_fields to override the default template.
|
Allow render_fields to override the default template.
|
Python
|
bsd-2-clause
|
swayf/grako,swayf/grako
|
2d3e52567d7d361428ce93d02cc42ecaddacab6c
|
tests/test_commands.py
|
tests/test_commands.py
|
# -*- coding: utf-8 -*-
from couchapp import commands
from couchapp.errors import AppError
from mock import Mock, patch
from nose.tools import raises
@patch('couchapp.commands.document')
def test_init_dest(mock_doc):
commands.init(None, None, '/tmp/mk')
mock_doc.assert_called_once_with('/tmp/mk', create=True)
@patch('os.getcwd', return_value='/mock_dir')
@patch('couchapp.commands.document')
def test_init_dest_auto(mock_doc, mock_cwd):
commands.init(None, None)
mock_doc.assert_called_once_with('/mock_dir', create=True)
@raises(AppError)
@patch('os.getcwd', return_value=None)
@patch('couchapp.commands.document')
def test_init_dest_auto(mock_doc, mock_cwd):
commands.init(None, None)
|
# -*- coding: utf-8 -*-
from couchapp import commands
from couchapp.errors import AppError
from mock import Mock, patch
from nose.tools import raises
@patch('couchapp.commands.document')
def test_init_dest(mock_doc):
commands.init(None, None, '/tmp/mk')
mock_doc.assert_called_once_with('/tmp/mk', create=True)
@patch('os.getcwd', return_value='/mock_dir')
@patch('couchapp.commands.document')
def test_init_dest_auto(mock_doc, mock_cwd):
commands.init(None, None)
mock_doc.assert_called_once_with('/mock_dir', create=True)
@raises(AppError)
@patch('os.getcwd', return_value=None)
@patch('couchapp.commands.document')
def test_init_dest_none(mock_doc, mock_cwd):
commands.init(None, None)
def test_push_outside():
'''
$ couchapp push /path/to/app
'''
pass
@patch('couchapp.commands.document', return_value='{"status": "ok"}')
def test_push_export_outside(mock_doc):
'''
$ couchapp push --export /path/to/app
'''
conf = Mock(name='conf')
appdir = '/mock_dir'
commands.push(conf, None, appdir, export=True)
mock_doc.assert_called_once_with(appdir, create=False, docid=None)
conf.update.assert_called_once_with(appdir)
@patch('couchapp.commands.document', return_value='{"status": "ok"}')
def test_push_export_inside(mock_doc):
'''
In the app dir::
$ couchapp push --export
'''
conf = Mock(name='conf')
appdir = '/mock_dir'
commands.push(conf, appdir, export=True)
mock_doc.assert_called_once_with(appdir, create=False, docid=None)
conf.update.assert_called_once_with(appdir)
|
Test cases for push with export flag
|
Test cases for push with export flag
|
Python
|
apache-2.0
|
couchapp/couchapp,h4ki/couchapp,couchapp/couchapp,couchapp/couchapp,h4ki/couchapp,h4ki/couchapp,couchapp/couchapp,h4ki/couchapp
|
9dc253b79d885ca205b557f88fca6fa35bd8fe21
|
tests/test_selector.py
|
tests/test_selector.py
|
from contextlib import contextmanager
from scell import Selector
from pytest import raises, fixture
def test_select(selector):
res = list(selector.select())
assert res
for event in res:
assert event.ready
def test_select_empty():
sel = Selector()
assert list(sel.select()) == []
def test_unregister(selector):
for fp in list(selector):
selector.unregister(fp)
assert not selector
def test_info(selector):
for fp in selector:
assert selector.info(fp).wants_read
assert selector.info(0) is None
def test_callbacks(selector):
res = selector.select()
exp = len(selector)
assert sum(m.callback() for m in res) == exp
def test_ready(selector):
ready = list(selector.ready())
assert ready
for event in ready:
assert event.ready
class TestScoped(object):
@fixture
def sel(self):
return Selector()
def test_peaceful(self, sel, handles):
with sel.scoped(handles) as monitors:
r = list(sel.ready())
for ev in r:
assert ev.monitored in monitors
assert ev.fp in handles
assert r
assert not sel
def test_exception(self, sel, handles):
with raises(NameError):
with sel.scoped(handles) as _:
raise NameError
assert not sel
|
from contextlib import contextmanager
from scell import Selector
from pytest import raises, fixture
def test_select(selector):
res = list(selector.select())
assert res
for event in res:
assert event.ready
def test_select_empty():
sel = Selector()
assert list(sel.select()) == []
def test_unregister(selector):
for fp in list(selector):
selector.unregister(fp)
assert not selector
def test_info(selector):
for fp in selector:
assert selector.info(fp).wants_read
assert selector.info(0) is None
def test_callbacks(selector):
res = selector.select()
exp = len(selector)
assert sum(m.callback() for m in res) == exp
def test_ready(selector):
ready = list(selector.ready())
assert ready
for event in ready:
assert event.ready
class TestScoped(object):
@fixture
def sel(self):
return Selector()
def test_peaceful(self, sel, handles):
with sel.scoped(handles) as monitors:
r = set(k.fp for k in sel.ready())
assert r == set(handles)
assert not sel
def test_exception(self, sel, handles):
with raises(NameError):
with sel.scoped(handles) as _:
raise NameError
assert not sel
|
Make Selector.scope test more rigorous
|
Make Selector.scope test more rigorous
|
Python
|
mit
|
eugene-eeo/scell
|
7520e1285af36292def45f892808841e78cc4a2b
|
bloop/index.py
|
bloop/index.py
|
missing = object()
class GlobalSecondaryIndex(object):
def __init__(self, hash_key=None, range_key=None,
write_units=1, read_units=1, name=missing):
self._model_name = None
self._backing_name = name
self.write_units = write_units
self.read_units = read_units
self.hash_key = hash_key
self.range_key = range_key
@property
def model_name(self):
''' Name of the model's attr that references self '''
return self._model_name
@property
def dynamo_name(self):
if self._backing_name is missing:
return self.model_name
return self._backing_name
|
class Index(object):
def __init__(self, write_units=1, read_units=1, name=None, range_key=None):
self._model_name = None
self._dynamo_name = name
self.write_units = write_units
self.read_units = read_units
self.range_key = range_key
@property
def model_name(self):
''' Name of the model's attr that references self '''
return self._model_name
@model_name.setter
def model_name(self, value):
if self._model_name is not None:
raise AttributeError("{} model_name already set to '{}'".format(
self.__class__.__name__, self._model_name))
self._model_name = value
@property
def dynamo_name(self):
if self._dynamo_name is None:
return self.model_name
return self._dynamo_name
class GlobalSecondaryIndex(Index):
def __init__(self, hash_key=None, **kwargs):
super().__init__(**kwargs)
self.hash_key = hash_key
class LocalSecondaryIndex(Index):
''' when constructing a model, you MUST set this index's model attr. '''
@property
def hash_key(self):
hash_column = self.model.__meta__['dynamo.table.hash_key']
return hash_column.dynamo_name
|
Refactor GSI, LSI to use base Index class
|
Refactor GSI, LSI to use base Index class
|
Python
|
mit
|
numberoverzero/bloop,numberoverzero/bloop
|
db4ccce9e418a1227532bde8834ca682bc873609
|
system/t04_mirror/show.py
|
system/t04_mirror/show.py
|
from lib import BaseTest
class ShowMirror1Test(BaseTest):
"""
show mirror: regular mirror
"""
fixtureCmds = ["aptly mirror create mirror1 http://mirror.yandex.ru/debian/ wheezy"]
runCmd = "aptly mirror show mirror1"
class ShowMirror2Test(BaseTest):
"""
show mirror: missing mirror
"""
runCmd = "aptly mirror show mirror-xx"
expectedCode = 1
class ShowMirror3Test(BaseTest):
"""
show mirror: regular mirror with packages
"""
fixtureDB = True
runCmd = "aptly mirror show --with-packages wheezy-contrib"
|
from lib import BaseTest
import re
class ShowMirror1Test(BaseTest):
"""
show mirror: regular mirror
"""
fixtureCmds = ["aptly mirror create mirror1 http://mirror.yandex.ru/debian/ wheezy"]
runCmd = "aptly mirror show mirror1"
class ShowMirror2Test(BaseTest):
"""
show mirror: missing mirror
"""
runCmd = "aptly mirror show mirror-xx"
expectedCode = 1
class ShowMirror3Test(BaseTest):
"""
show mirror: regular mirror with packages
"""
fixtureDB = True
runCmd = "aptly mirror show --with-packages wheezy-contrib"
outputMatchPrepare = lambda _, s: re.sub(r"Last update: [0-9:A-Za-z -]+\n", "", s)
|
Remove updated at while comparing.
|
Remove updated at while comparing.
|
Python
|
mit
|
gearmover/aptly,bsundsrud/aptly,adfinis-forks/aptly,vincentbernat/aptly,gdbdzgd/aptly,ceocoder/aptly,adfinis-forks/aptly,seaninspace/aptly,neolynx/aptly,scalp42/aptly,gdbdzgd/aptly,sobczyk/aptly,neolynx/aptly,scalp42/aptly,aptly-dev/aptly,seaninspace/aptly,aptly-dev/aptly,bsundsrud/aptly,gdbdzgd/aptly,bankonme/aptly,adfinis-forks/aptly,sobczyk/aptly,seaninspace/aptly,vincentbernat/aptly,smira/aptly,jola5/aptly,scalp42/aptly,smira/aptly,ceocoder/aptly,gearmover/aptly,bankonme/aptly,bsundsrud/aptly,vincentbernat/aptly,ceocoder/aptly,jola5/aptly,jola5/aptly,aptly-dev/aptly,gearmover/aptly,sobczyk/aptly,neolynx/aptly,smira/aptly,bankonme/aptly
|
1e8c094c0f806b624a41447446676c1f2ac3590d
|
tools/debug_adapter.py
|
tools/debug_adapter.py
|
#!/usr/bin/python
import sys
if 'darwin' in sys.platform:
sys.path.append('/Applications/Xcode.app/Contents/SharedFrameworks/LLDB.framework/Resources/Python')
sys.path.append('.')
import adapter
adapter.main.run_tcp_server()
|
#!/usr/bin/python
import sys
import subprocess
import string
out = subprocess.check_output(['lldb', '-P'])
sys.path.append(string.strip(out))
sys.path.append('.')
import adapter
adapter.main.run_tcp_server()
|
Fix adapter debugging on Linux.
|
Fix adapter debugging on Linux.
|
Python
|
mit
|
vadimcn/vscode-lldb,vadimcn/vscode-lldb,vadimcn/vscode-lldb,vadimcn/vscode-lldb,vadimcn/vscode-lldb,vadimcn/vscode-lldb,vadimcn/vscode-lldb
|
b757a5e24fa8018647827b8194c985881df872d5
|
scipy/signal/setup.py
|
scipy/signal/setup.py
|
#!/usr/bin/env python
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('signal', parent_package, top_path)
config.add_data_dir('tests')
config.add_extension('sigtools',
sources=['sigtoolsmodule.c',
'firfilter.c','medianfilter.c'],
depends = ['sigtools.h']
)
config.add_extension('spline',
sources = ['splinemodule.c','S_bspline_util.c','D_bspline_util.c',
'C_bspline_util.c','Z_bspline_util.c','bspline_util.c'],
)
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration(top_path='').todict())
|
#!/usr/bin/env python
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('signal', parent_package, top_path)
config.add_data_dir('tests')
config.add_extension('sigtools',
sources=['sigtoolsmodule.c',
'firfilter.c','medianfilter.c'],
depends = ['sigtools.h', 'newsig.c']
)
config.add_extension('spline',
sources = ['splinemodule.c','S_bspline_util.c','D_bspline_util.c',
'C_bspline_util.c','Z_bspline_util.c','bspline_util.c'],
)
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration(top_path='').todict())
|
Add newsig.c as a dependency to sigtools module.
|
Add newsig.c as a dependency to sigtools module.
|
Python
|
bsd-3-clause
|
andyfaff/scipy,sauliusl/scipy,newemailjdm/scipy,jor-/scipy,mikebenfield/scipy,jsilter/scipy,mortada/scipy,josephcslater/scipy,jjhelmus/scipy,trankmichael/scipy,larsmans/scipy,jamestwebber/scipy,jonycgn/scipy,haudren/scipy,petebachant/scipy,endolith/scipy,vigna/scipy,e-q/scipy,raoulbq/scipy,aeklant/scipy,fredrikw/scipy,josephcslater/scipy,efiring/scipy,scipy/scipy,nonhermitian/scipy,sriki18/scipy,dch312/scipy,nvoron23/scipy,gfyoung/scipy,ales-erjavec/scipy,woodscn/scipy,kalvdans/scipy,nvoron23/scipy,gef756/scipy,richardotis/scipy,pyramania/scipy,Newman101/scipy,gfyoung/scipy,person142/scipy,teoliphant/scipy,njwilson23/scipy,aman-iitj/scipy,Gillu13/scipy,pbrod/scipy,gef756/scipy,futurulus/scipy,sonnyhu/scipy,ChanderG/scipy,tylerjereddy/scipy,jjhelmus/scipy,woodscn/scipy,witcxc/scipy,bkendzior/scipy,haudren/scipy,aman-iitj/scipy,jjhelmus/scipy,WarrenWeckesser/scipy,vhaasteren/scipy,pizzathief/scipy,zerothi/scipy,apbard/scipy,nonhermitian/scipy,haudren/scipy,andyfaff/scipy,endolith/scipy,gdooper/scipy,pschella/scipy,vanpact/scipy,dch312/scipy,anntzer/scipy,dch312/scipy,anielsen001/scipy,zxsted/scipy,niknow/scipy,newemailjdm/scipy,Srisai85/scipy,anntzer/scipy,person142/scipy,raoulbq/scipy,nonhermitian/scipy,richardotis/scipy,Newman101/scipy,rgommers/scipy,aarchiba/scipy,hainm/scipy,jonycgn/scipy,jsilter/scipy,matthew-brett/scipy,kalvdans/scipy,juliantaylor/scipy,pschella/scipy,Srisai85/scipy,Srisai85/scipy,Shaswat27/scipy,apbard/scipy,sargas/scipy,sriki18/scipy,mortonjt/scipy,ales-erjavec/scipy,Newman101/scipy,jakevdp/scipy,Dapid/scipy,maciejkula/scipy,jonycgn/scipy,Kamp9/scipy,lukauskas/scipy,trankmichael/scipy,endolith/scipy,andim/scipy,befelix/scipy,raoulbq/scipy,dch312/scipy,andim/scipy,grlee77/scipy,nmayorov/scipy,pschella/scipy,fernand/scipy,mtrbean/scipy,mgaitan/scipy,zxsted/scipy,vberaudi/scipy,scipy/scipy,vberaudi/scipy,andim/scipy,rmcgibbo/scipy,newemailjdm/scipy,hainm/scipy,aarchiba/scipy,behzadnouri/scipy,niknow/scipy,bkendzior/scipy,Eric89GXL/scipy,apbard/scipy,mgaitan/scipy,Shaswat27/scipy,e-q/scipy,vberaudi/scipy,ilayn/scipy,woodscn/scipy,felipebetancur/scipy,kalvdans/scipy,witcxc/scipy,jseabold/scipy,anielsen001/scipy,mdhaber/scipy,rmcgibbo/scipy,Stefan-Endres/scipy,zerothi/scipy,vanpact/scipy,anntzer/scipy,Kamp9/scipy,Kamp9/scipy,ChanderG/scipy,tylerjereddy/scipy,minhlongdo/scipy,mikebenfield/scipy,piyush0609/scipy,WarrenWeckesser/scipy,richardotis/scipy,ogrisel/scipy,scipy/scipy,zaxliu/scipy,dominicelse/scipy,jjhelmus/scipy,lukauskas/scipy,andyfaff/scipy,fernand/scipy,gertingold/scipy,scipy/scipy,mhogg/scipy,mingwpy/scipy,arokem/scipy,aeklant/scipy,jakevdp/scipy,mhogg/scipy,WillieMaddox/scipy,zxsted/scipy,perimosocordiae/scipy,befelix/scipy,sauliusl/scipy,ales-erjavec/scipy,jseabold/scipy,rgommers/scipy,aarchiba/scipy,newemailjdm/scipy,Shaswat27/scipy,pnedunuri/scipy,pyramania/scipy,mingwpy/scipy,mortonjt/scipy,niknow/scipy,befelix/scipy,ales-erjavec/scipy,Newman101/scipy,ChanderG/scipy,jonycgn/scipy,trankmichael/scipy,lukauskas/scipy,Dapid/scipy,pnedunuri/scipy,Eric89GXL/scipy,andyfaff/scipy,cpaulik/scipy,Eric89GXL/scipy,gertingold/scipy,anntzer/scipy,giorgiop/scipy,vhaasteren/scipy,zerothi/scipy,behzadnouri/scipy,futurulus/scipy,surhudm/scipy,sargas/scipy,rgommers/scipy,lhilt/scipy,sargas/scipy,Newman101/scipy,Dapid/scipy,pizzathief/scipy,maniteja123/scipy,FRidh/scipy,matthewalbani/scipy,niknow/scipy,WarrenWeckesser/scipy,Srisai85/scipy,rmcgibbo/scipy,ndchorley/scipy,gfyoung/scipy,chatcannon/scipy,larsmans/scipy,zerothi/scipy,Shaswat27/scipy,sonnyhu/scipy,pizzathief/scipy,ilayn/scipy,gdooper/scipy,aeklant/scipy,efiring/scipy,gfyoung/scipy,jor-/scipy,nmayorov/scipy,petebachant/scipy,sonnyhu/scipy,gdooper/scipy,maniteja123/scipy,jsilter/scipy,mortada/scipy,witcxc/scipy,mtrbean/scipy,andyfaff/scipy,njwilson23/scipy,mdhaber/scipy,hainm/scipy,Stefan-Endres/scipy,ogrisel/scipy,minhlongdo/scipy,gef756/scipy,jor-/scipy,vanpact/scipy,pbrod/scipy,mdhaber/scipy,gertingold/scipy,vberaudi/scipy,felipebetancur/scipy,pnedunuri/scipy,mdhaber/scipy,richardotis/scipy,dominicelse/scipy,cpaulik/scipy,mtrbean/scipy,mtrbean/scipy,giorgiop/scipy,nvoron23/scipy,gertingold/scipy,FRidh/scipy,ortylp/scipy,mikebenfield/scipy,Stefan-Endres/scipy,Stefan-Endres/scipy,felipebetancur/scipy,kleskjr/scipy,gef756/scipy,pizzathief/scipy,sauliusl/scipy,sauliusl/scipy,mingwpy/scipy,jor-/scipy,cpaulik/scipy,rmcgibbo/scipy,behzadnouri/scipy,kleskjr/scipy,hainm/scipy,pschella/scipy,larsmans/scipy,petebachant/scipy,mortada/scipy,vigna/scipy,Newman101/scipy,minhlongdo/scipy,perimosocordiae/scipy,bkendzior/scipy,zaxliu/scipy,vigna/scipy,Dapid/scipy,Shaswat27/scipy,maniteja123/scipy,njwilson23/scipy,pyramania/scipy,jonycgn/scipy,raoulbq/scipy,josephcslater/scipy,anntzer/scipy,befelix/scipy,ortylp/scipy,nmayorov/scipy,gdooper/scipy,anielsen001/scipy,fernand/scipy,haudren/scipy,ortylp/scipy,jsilter/scipy,niknow/scipy,ilayn/scipy,sonnyhu/scipy,jseabold/scipy,felipebetancur/scipy,surhudm/scipy,mortonjt/scipy,pnedunuri/scipy,mortada/scipy,zaxliu/scipy,josephcslater/scipy,grlee77/scipy,felipebetancur/scipy,matthew-brett/scipy,teoliphant/scipy,ortylp/scipy,vanpact/scipy,larsmans/scipy,argriffing/scipy,fernand/scipy,mortonjt/scipy,zxsted/scipy,zxsted/scipy,cpaulik/scipy,vigna/scipy,petebachant/scipy,maniteja123/scipy,teoliphant/scipy,Gillu13/scipy,larsmans/scipy,jamestwebber/scipy,njwilson23/scipy,mgaitan/scipy,vhaasteren/scipy,fernand/scipy,chatcannon/scipy,endolith/scipy,hainm/scipy,andim/scipy,grlee77/scipy,tylerjereddy/scipy,matthewalbani/scipy,minhlongdo/scipy,sargas/scipy,mhogg/scipy,Gillu13/scipy,vigna/scipy,jamestwebber/scipy,jsilter/scipy,richardotis/scipy,gef756/scipy,FRidh/scipy,mgaitan/scipy,zaxliu/scipy,mdhaber/scipy,piyush0609/scipy,matthewalbani/scipy,arokem/scipy,josephcslater/scipy,maciejkula/scipy,person142/scipy,ogrisel/scipy,mikebenfield/scipy,Srisai85/scipy,mingwpy/scipy,matthew-brett/scipy,piyush0609/scipy,Stefan-Endres/scipy,nonhermitian/scipy,arokem/scipy,maciejkula/scipy,argriffing/scipy,zerothi/scipy,lhilt/scipy,ogrisel/scipy,lukauskas/scipy,efiring/scipy,bkendzior/scipy,grlee77/scipy,arokem/scipy,matthew-brett/scipy,nonhermitian/scipy,rmcgibbo/scipy,fredrikw/scipy,sonnyhu/scipy,pnedunuri/scipy,sriki18/scipy,andim/scipy,niknow/scipy,zxsted/scipy,kalvdans/scipy,sauliusl/scipy,WillieMaddox/scipy,richardotis/scipy,tylerjereddy/scipy,vhaasteren/scipy,mtrbean/scipy,dch312/scipy,piyush0609/scipy,aarchiba/scipy,gdooper/scipy,andyfaff/scipy,raoulbq/scipy,aeklant/scipy,newemailjdm/scipy,Kamp9/scipy,Gillu13/scipy,newemailjdm/scipy,kleskjr/scipy,andim/scipy,Dapid/scipy,giorgiop/scipy,fredrikw/scipy,maciejkula/scipy,Eric89GXL/scipy,mgaitan/scipy,larsmans/scipy,apbard/scipy,behzadnouri/scipy,gertingold/scipy,rgommers/scipy,pbrod/scipy,haudren/scipy,petebachant/scipy,futurulus/scipy,ndchorley/scipy,endolith/scipy,ChanderG/scipy,aarchiba/scipy,vhaasteren/scipy,petebachant/scipy,WarrenWeckesser/scipy,jseabold/scipy,woodscn/scipy,surhudm/scipy,aman-iitj/scipy,apbard/scipy,mhogg/scipy,efiring/scipy,pbrod/scipy,surhudm/scipy,WarrenWeckesser/scipy,trankmichael/scipy,giorgiop/scipy,efiring/scipy,witcxc/scipy,raoulbq/scipy,fernand/scipy,cpaulik/scipy,zaxliu/scipy,woodscn/scipy,teoliphant/scipy,maciejkula/scipy,lukauskas/scipy,matthewalbani/scipy,perimosocordiae/scipy,mortonjt/scipy,scipy/scipy,aman-iitj/scipy,fredrikw/scipy,person142/scipy,ndchorley/scipy,njwilson23/scipy,pschella/scipy,matthewalbani/scipy,Shaswat27/scipy,argriffing/scipy,Gillu13/scipy,Kamp9/scipy,nvoron23/scipy,Kamp9/scipy,jakevdp/scipy,nvoron23/scipy,woodscn/scipy,bkendzior/scipy,surhudm/scipy,e-q/scipy,matthew-brett/scipy,argriffing/scipy,argriffing/scipy,vhaasteren/scipy,njwilson23/scipy,WillieMaddox/scipy,cpaulik/scipy,FRidh/scipy,dominicelse/scipy,WillieMaddox/scipy,haudren/scipy,Gillu13/scipy,sonnyhu/scipy,jor-/scipy,ales-erjavec/scipy,FRidh/scipy,jseabold/scipy,ortylp/scipy,ChanderG/scipy,juliantaylor/scipy,hainm/scipy,Eric89GXL/scipy,sriki18/scipy,pyramania/scipy,behzadnouri/scipy,rgommers/scipy,juliantaylor/scipy,aman-iitj/scipy,pbrod/scipy,pbrod/scipy,jjhelmus/scipy,ilayn/scipy,ilayn/scipy,ortylp/scipy,argriffing/scipy,sriki18/scipy,mtrbean/scipy,trankmichael/scipy,e-q/scipy,futurulus/scipy,vberaudi/scipy,teoliphant/scipy,sargas/scipy,tylerjereddy/scipy,grlee77/scipy,juliantaylor/scipy,endolith/scipy,mgaitan/scipy,scipy/scipy,ogrisel/scipy,surhudm/scipy,ilayn/scipy,ales-erjavec/scipy,mdhaber/scipy,jamestwebber/scipy,ndchorley/scipy,ndchorley/scipy,mortonjt/scipy,witcxc/scipy,anntzer/scipy,futurulus/scipy,pnedunuri/scipy,Stefan-Endres/scipy,futurulus/scipy,anielsen001/scipy,perimosocordiae/scipy,Eric89GXL/scipy,nmayorov/scipy,nmayorov/scipy,FRidh/scipy,chatcannon/scipy,jamestwebber/scipy,person142/scipy,gef756/scipy,kalvdans/scipy,mingwpy/scipy,giorgiop/scipy,minhlongdo/scipy,kleskjr/scipy,piyush0609/scipy,rmcgibbo/scipy,arokem/scipy,Dapid/scipy,mortada/scipy,WarrenWeckesser/scipy,dominicelse/scipy,WillieMaddox/scipy,anielsen001/scipy,piyush0609/scipy,jonycgn/scipy,ChanderG/scipy,zerothi/scipy,kleskjr/scipy,fredrikw/scipy,mortada/scipy,gfyoung/scipy,aman-iitj/scipy,vberaudi/scipy,anielsen001/scipy,lhilt/scipy,minhlongdo/scipy,chatcannon/scipy,jseabold/scipy,dominicelse/scipy,felipebetancur/scipy,lukauskas/scipy,mingwpy/scipy,fredrikw/scipy,nvoron23/scipy,perimosocordiae/scipy,befelix/scipy,trankmichael/scipy,mhogg/scipy,ndchorley/scipy,sauliusl/scipy,vanpact/scipy,behzadnouri/scipy,giorgiop/scipy,efiring/scipy,sriki18/scipy,lhilt/scipy,jakevdp/scipy,pizzathief/scipy,jakevdp/scipy,chatcannon/scipy,kleskjr/scipy,pyramania/scipy,vanpact/scipy,perimosocordiae/scipy,zaxliu/scipy,chatcannon/scipy,maniteja123/scipy,WillieMaddox/scipy,juliantaylor/scipy,mikebenfield/scipy,e-q/scipy,maniteja123/scipy,aeklant/scipy,mhogg/scipy,Srisai85/scipy,lhilt/scipy
|
73e8864e745ca75c2ea327b53244c9f2f4183e1a
|
lambda_function.py
|
lambda_function.py
|
#!/usr/bin/env python2
from StringIO import StringIO
import boto3
from dmr_marc_users_cs750 import (
get_users, get_groups,
write_contacts_csv,
write_contacts_xlsx
)
def s3_contacts(contacts, bucket, key):
s3 = boto3.client('s3')
o = StringIO()
if key.endswith('.csv'):
t = 'text/csv'
write_contacts_csv(contacts, o)
elif key.endswith('.xlsx'):
t = 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
write_contacts_xlsx(contacts, o)
s3.put_object(
Bucket=bucket, Key=key,
Body=o.getvalue(), ContentType=t, ACL='public-read')
o.close()
def lambda_handler(event=None, context=None):
users = get_users()
groups = get_groups()
s3_contacts(contacts=users, bucket='dmr-contacts', key='DMR_contacts.csv')
s3_contacts(contacts=groups+users,
bucket='dmr-contacts', key='contacts-dci.xlsx')
if __name__ == '__main__':
lambda_handler()
|
#!/usr/bin/env python2
from StringIO import StringIO
import boto3
from dmr_marc_users_cs750 import (
get_users, get_groups,
write_contacts_csv,
write_contacts_xlsx,
)
from dmrx_most_heard_n0gsg import (
get_users as get_most_heard,
write_n0gsg_csv,
)
def s3_contacts(contacts, bucket, key):
s3 = boto3.client('s3')
o = StringIO()
if key.endswith('.csv'):
t = 'text/csv'
if key.startswith('N0GSG/'):
write_n0gsg_csv(contacts, o)
else:
write_contacts_csv(contacts, o)
elif key.endswith('.xlsx'):
t = 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
write_contacts_xlsx(contacts, o)
s3.put_object(
Bucket=bucket, Key=key,
Body=o.getvalue(), ContentType=t, ACL='public-read')
o.close()
def lambda_handler(event=None, context=None):
marc = get_users()
dmrx = get_most_heard()
groups = get_groups()
s3_contacts(contacts=marc, bucket='dmr-contacts',
key='CS750/DMR_contacts.csv')
s3_contacts(contacts=groups+marc, bucket='dmr-contacts',
key='CS750/dci-bm-marc.xlsx')
s3_contacts(contacts=dmrx, bucket='dmr-contacts',
key='N0GSG/dmrx-most-heard.csv')
if __name__ == '__main__':
lambda_handler()
|
Add N0GSG DMRX MostHeard to AWS Lambda function
|
Add N0GSG DMRX MostHeard to AWS Lambda function
|
Python
|
apache-2.0
|
ajorg/DMR_contacts
|
6dfb0c1ea4fb3d12d14a07d0e831eb32f3b2f340
|
yaml_argparse.py
|
yaml_argparse.py
|
import argparse
import yaml
def parse_arguments_based_on_yaml(yaml_file):
with open(yaml_file) as f:
yaml_data = yaml.load(f)
# to start with, support only a single parameter
key = list(yaml_data.keys())[0]
value = yaml_data[key]
parser = argparse.ArgumentParser()
parser.add_argument("-{}".format(key), default=value)
args = parser.parse_args()
return args
|
import argparse
import yaml
def parse_arguments_based_on_yaml(yaml_file):
with open(yaml_file) as f:
yaml_data = yaml.load(f)
parser = argparse.ArgumentParser()
for key, value in yaml_data.items():
parser.add_argument("-{}".format(key), default=value)
args = parser.parse_args()
return args
|
Implement creating arguments for multiple strings
|
Implement creating arguments for multiple strings
|
Python
|
mit
|
krasch/yaml_argparse,krasch/quickargs
|
fbcdd58775be1b6a72e1d1415f62a7bfade3dbd1
|
pages/views.py
|
pages/views.py
|
from django.http import Http404
from django.shortcuts import get_object_or_404
from django.contrib.sites.models import SITE_CACHE
from pages import settings
from pages.models import Page, Content
from pages.utils import auto_render, get_language_from_request
def details(request, page_id=None, slug=None,
template_name=settings.DEFAULT_PAGE_TEMPLATE):
lang = get_language_from_request(request)
site = request.site
pages = Page.objects.navigation(site).order_by("tree_id")
if pages:
if page_id:
current_page = get_object_or_404(
Page.objects.published(site), pk=page_id)
elif slug:
slug_content = Content.objects.get_page_slug(slug, site)
if slug_content and \
slug_content.page.calculated_status in (
Page.PUBLISHED, Page.HIDDEN):
current_page = slug_content.page
else:
raise Http404
else:
current_page = pages[0]
template_name = current_page.get_template()
else:
raise Http404
return template_name, locals()
details = auto_render(details)
|
from django.http import Http404
from django.shortcuts import get_object_or_404
from django.contrib.sites.models import SITE_CACHE
from pages import settings
from pages.models import Page, Content
from pages.utils import auto_render, get_language_from_request
def details(request, page_id=None, slug=None,
template_name=settings.DEFAULT_PAGE_TEMPLATE):
"""
Example view that get the root pages for navigation,
and the current page if there is any root page.
All is rendered with the current page's template.
"""
lang = get_language_from_request(request)
site = request.site
pages = Page.objects.navigation(site).order_by("tree_id")
if pages:
if page_id:
current_page = get_object_or_404(
Page.objects.published(site), pk=page_id)
elif slug:
slug_content = Content.objects.get_page_slug(slug, site)
if slug_content and \
slug_content.page.calculated_status in (
Page.PUBLISHED, Page.HIDDEN):
current_page = slug_content.page
else:
raise Http404
else:
current_page = pages[0]
template_name = current_page.get_template()
else:
raise Http404
return template_name, locals()
details = auto_render(details)
|
Add documentation to the default view
|
Add documentation to the default view
git-svn-id: 54fea250f97f2a4e12c6f7a610b8f07cb4c107b4@292 439a9e5f-3f3e-0410-bc46-71226ad0111b
|
Python
|
bsd-3-clause
|
remik/django-page-cms,akaihola/django-page-cms,oliciv/django-page-cms,akaihola/django-page-cms,akaihola/django-page-cms,remik/django-page-cms,oliciv/django-page-cms,batiste/django-page-cms,pombredanne/django-page-cms-1,pombredanne/django-page-cms-1,remik/django-page-cms,pombredanne/django-page-cms-1,batiste/django-page-cms,oliciv/django-page-cms,batiste/django-page-cms,remik/django-page-cms
|
eae4b06bd798eab3a46bdd5b7452411bb7fb02e1
|
dashcam.py
|
dashcam.py
|
# dashcam.py
# A Raspberry Pi powered, GPS enabled, 3D printed bicycle dashcam
# By Matthew Timmons-Brown, The Raspberry Pi Guy
import pygame
import picamera
import os
os.putenv('SDL_VIDEODRIVER', 'fbcon')
os.putenv('SDL_FBDEV' , '/dev/fb1')
os.putenv('SDL_MOUSEDRV' , 'TSLIB')
os.putenv('SDL_MOUSEDEV' , '/dev/input/touchscreen')
pygame.init()
pygame.mouse.set_visible(False)
screen = pygame.display.set_mode((0,0), pygame.FULLSCREEN)
|
# dashcam.py
# A Raspberry Pi powered, GPS enabled, 3D printed bicycle dashcam
# By Matthew Timmons-Brown, The Raspberry Pi Guy
import pygame
import picamera
import os
import sys
import io
os.putenv('SDL_VIDEODRIVER', 'fbcon')
os.putenv('SDL_FBDEV' , '/dev/fb1')
os.putenv('SDL_MOUSEDRV' , 'TSLIB')
os.putenv('SDL_MOUSEDEV' , '/dev/input/touchscreen')
size = width, height = 320, 240
pygame.init()
pygame.mouse.set_visible(False)
screen = pygame.display.set_mode(size)
go_button = pygame.image.load("/home/pi/bike_dashcam/media/go.bmp")
|
Update dascham with pygame GO button load
|
Update dascham with pygame GO button load
|
Python
|
mit
|
the-raspberry-pi-guy/bike_dashcam,the-raspberry-pi-guy/bike_dashcam
|
b35d4292e50e8a8dc56635bddeac5a1fc42a5d19
|
tveebot_tracker/source.py
|
tveebot_tracker/source.py
|
from abc import ABC, abstractmethod
class TVShowNotFound(Exception):
""" Raised when a reference does not match any TV Show available """
class EpisodeSource(ABC):
"""
Abstract base class to define the interface for and episode source.
An episode source is used by the tracker to obtain episode files. A
source is usually based on a feed that provides links to TV Show's
episodes.
Every source has its own protocol to obtain the information and it uses
its own format to present that information. Implementations of this
interface are responsible for implementing the details of how to obtain
the episode files' information and present them to the tracker.
"""
# Called by the tracker when it wants to get the episodes available for
# a specific TVShow
@abstractmethod
def get_episodes_for(self, tvshow_reference: str) -> list:
"""
Retrieve all available episode files corresponding to the specified
TV show. Multiple files for the same episode may be retrieved.
The TV show to obtain the episodes from is identified by some reference
that uniquely identifies it within the episode source in question.
:param tvshow_reference: reference that uniquely identifies the TV show
to get the episodes for
:return: a list containing all episode files available for the specified
TV Show. An empty list if none is found.
:raise TVShowNotFound: if the specified reference does not match to any
TV show available
"""
|
from abc import ABC, abstractmethod
class TVShowNotFound(Exception):
""" Raised when a reference does not match any TV Show available """
class EpisodeSource(ABC):
"""
Abstract base class to define the interface for and episode source.
An episode source is used by the tracker to obtain episode files. A
source is usually based on a feed that provides links to TV Show's
episodes.
Every source has its own protocol to obtain the information and it uses
its own format to present that information. Implementations of this
interface are responsible for implementing the details of how to obtain
the episode files' information and present them to the tracker.
"""
# Called by the tracker when it wants to get the episodes available for
# a specific TVShow
@abstractmethod
def fetch(self, tvshow_reference: str) -> list:
"""
Fetches all available episode files, corresponding to the specified
TV show. Multiple files for the same episode may be retrieved.
The TV show to obtain the episodes from is identified by some reference
that uniquely identifies it within the episode source in question.
:param tvshow_reference: reference that uniquely identifies the TV show
to get the episodes for
:return: a list containing all episode files available for the specified
TV Show. An empty list if none is found.
:raise TVShowNotFound: if the specified reference does not match to any
TV show available
"""
|
Rename Source's get_episodes_for() method to fetch()
|
Rename Source's get_episodes_for() method to fetch()
|
Python
|
mit
|
tveebot/tracker
|
c7601ed4144b12717f536f2fc2fc0ddb5745ec27
|
opentaxii/auth/sqldb/models.py
|
opentaxii/auth/sqldb/models.py
|
import hmac
import bcrypt
from sqlalchemy.schema import Column
from sqlalchemy.types import Integer, String
from sqlalchemy.ext.declarative import declarative_base
__all__ = ['Base', 'Account']
Base = declarative_base()
MAX_STR_LEN = 256
class Account(Base):
__tablename__ = 'accounts'
id = Column(Integer, primary_key=True)
username = Column(String(MAX_STR_LEN), unique=True)
password_hash = Column(String(MAX_STR_LEN))
def set_password(self, password):
if isinstance(password, unicode):
password = password.encode('utf-8')
self.password_hash = bcrypt.hashpw(password, bcrypt.gensalt())
def is_password_valid(self, password):
if isinstance(password, unicode):
password = password.encode('utf-8')
hashed = self.password_hash.encode('utf-8')
return hmac.compare_digest(bcrypt.hashpw(password, hashed), hashed)
|
import bcrypt
from sqlalchemy.schema import Column
from sqlalchemy.types import Integer, String
from sqlalchemy.ext.declarative import declarative_base
from werkzeug.security import safe_str_cmp
__all__ = ['Base', 'Account']
Base = declarative_base()
MAX_STR_LEN = 256
class Account(Base):
__tablename__ = 'accounts'
id = Column(Integer, primary_key=True)
username = Column(String(MAX_STR_LEN), unique=True)
password_hash = Column(String(MAX_STR_LEN))
def set_password(self, password):
if isinstance(password, unicode):
password = password.encode('utf-8')
self.password_hash = bcrypt.hashpw(password, bcrypt.gensalt())
def is_password_valid(self, password):
if isinstance(password, unicode):
password = password.encode('utf-8')
hashed = self.password_hash.encode('utf-8')
return safe_str_cmp(bcrypt.hashpw(password, hashed), hashed)
|
Use Werkzeug's safe_str_cmp() instead of hmac.compare_digest()
|
Use Werkzeug's safe_str_cmp() instead of hmac.compare_digest()
Werkzeug will use the latter on Python >2.7.7, and provides a fallback
for older Python versions.
|
Python
|
bsd-3-clause
|
EclecticIQ/OpenTAXII,Intelworks/OpenTAXII,EclecticIQ/OpenTAXII,Intelworks/OpenTAXII
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.