commit
stringlengths 40
40
| old_file
stringlengths 4
236
| new_file
stringlengths 4
236
| old_contents
stringlengths 1
3.26k
| new_contents
stringlengths 16
4.43k
| subject
stringlengths 16
624
| message
stringlengths 17
3.29k
| lang
stringclasses 5
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
|
|---|---|---|---|---|---|---|---|---|---|
6384e6a23f73eddf1099e01ed0d8c067141651a5
|
tcelery/__init__.py
|
tcelery/__init__.py
|
from __future__ import absolute_import
import celery
from tornado import ioloop
from .connection import ConnectionPool
from .producer import NonBlockingTaskProducer
from .result import AsyncResult
VERSION = (0, 4, 0)
__version__ = '.'.join(map(str, VERSION)) + '-dev'
def setup_nonblocking_producer(celery_app=None, io_loop=None,
on_ready=None, result_cls=AsyncResult,
limit=1):
celery_app = celery_app or celery.current_app
io_loop = io_loop or ioloop.IOLoop.instance()
NonBlockingTaskProducer.app = celery_app
NonBlockingTaskProducer.conn_pool = ConnectionPool(limit, io_loop)
NonBlockingTaskProducer.result_cls = result_cls
if celery_app.conf['BROKER_URL'] and celery_app.conf['BROKER_URL'].startswith('amqp'):
celery.app.amqp.AMQP.producer_cls = NonBlockingTaskProducer
def connect():
broker_url = celery_app.connection().as_uri(include_password=True)
options = celery_app.conf.get('CELERYT_PIKA_OPTIONS', {})
NonBlockingTaskProducer.conn_pool.connect(broker_url,
options=options,
callback=on_ready)
io_loop.add_callback(connect)
|
from __future__ import absolute_import
import celery
from tornado import ioloop
from .connection import ConnectionPool
from .producer import NonBlockingTaskProducer
from .result import AsyncResult
VERSION = (0, 3, 4)
__version__ = '.'.join(map(str, VERSION))
def setup_nonblocking_producer(celery_app=None, io_loop=None,
on_ready=None, result_cls=AsyncResult,
limit=1):
celery_app = celery_app or celery.current_app
io_loop = io_loop or ioloop.IOLoop.instance()
NonBlockingTaskProducer.app = celery_app
NonBlockingTaskProducer.conn_pool = ConnectionPool(limit, io_loop)
NonBlockingTaskProducer.result_cls = result_cls
if celery_app.conf['BROKER_URL'] and celery_app.conf['BROKER_URL'].startswith('amqp'):
celery.app.amqp.AMQP.producer_cls = NonBlockingTaskProducer
def connect():
broker_url = celery_app.connection().as_uri(include_password=True)
options = celery_app.conf.get('CELERYT_PIKA_OPTIONS', {})
NonBlockingTaskProducer.conn_pool.connect(broker_url,
options=options,
callback=on_ready)
io_loop.add_callback(connect)
|
Set release version to 0.3.4
|
Set release version to 0.3.4
|
Python
|
bsd-3-clause
|
shnjp/tornado-celery,qudos-com/tornado-celery,mher/tornado-celery,sangwonl/tornado-celery
|
eb25c6900b307792821f7db6bcfa92cc62a80298
|
lims/pricebook/views.py
|
lims/pricebook/views.py
|
from rest_framework import viewsets
from rest_framework.decorators import list_route
from rest_framework.response import Response
from lims.permissions.permissions import IsInAdminGroupOrRO
from lims.shared.mixins import AuditTrailViewMixin
from .models import PriceBook
from .serializers import PriceBookSerializer
from lims.pricebook.management.commands.getpricebooks import get_pricebooks
class PriceBookViewSet(AuditTrailViewMixin, viewsets.ModelViewSet):
queryset = PriceBook.objects.all()
serializer_class = PriceBookSerializer
permission_classes = (IsInAdminGroupOrRO,)
filter_fields = ('name', 'identifier',)
@list_route()
def updateall(self, request):
get_pricebooks()
return Response({'message': 'Pricebooks updated'})
|
from django.conf import settings
from simple_salesforce import Salesforce
from rest_framework import viewsets
from rest_framework.decorators import list_route
from rest_framework.response import Response
from lims.permissions.permissions import IsInAdminGroupOrRO
from lims.shared.mixins import AuditTrailViewMixin
from .models import PriceBook
from .serializers import PriceBookSerializer
from lims.pricebook.management.commands.getpricebooks import get_pricebooks
class PriceBookViewSet(AuditTrailViewMixin, viewsets.ModelViewSet):
queryset = PriceBook.objects.all()
serializer_class = PriceBookSerializer
permission_classes = (IsInAdminGroupOrRO,)
filter_fields = ('name', 'identifier',)
def perform_create(self, serializer):
serializer.save()
get_pricebooks()
@list_route(methods=['POST'])
def updateall(self, request):
get_pricebooks()
return Response({'message': 'Pricebooks updated'})
@list_route()
def on_crm(self, request):
"""
List of all pricebooks available on thr CRM
"""
sf = Salesforce(instance_url=settings.SALESFORCE_URL,
username=settings.SALESFORCE_USERNAME,
password=settings.SALESFORCE_PASSWORD,
security_token=settings.SALESFORCE_TOKEN)
pricebooks = sf.query("SELECT id,name FROM Pricebook2")
return Response(pricebooks['records'])
|
Add list crm pricebooks endpoint and update pricebook fetching
|
Add list crm pricebooks endpoint and update pricebook fetching
|
Python
|
mit
|
GETLIMS/LIMS-Backend,GETLIMS/LIMS-Backend
|
07dc719807a6d890fa33338746caca61704de0a1
|
src/genbank-gff-to-nquads.py
|
src/genbank-gff-to-nquads.py
|
#!/usr/bin/env python
import jargparse
#################
### CONSTANTS ###
#################
metadataPrefix = '#'
accessionKey = '#!genome-build-accession NCBI_Assembly:'
locusTagAttributeKey = 'locus_tag'
#################
### FUNCTIONS ###
#################
def parseRecord(record, locusTags):
components = record.split()
type = components[2]
rawAttributes = components[8]
if type == 'gene':
attributes = rawAttributes.split(';')
for a in attributes:
(key, value) = a.split('=')
# print a
if key == locusTagAttributeKey:
locusTags.append(value)
parser = jargparse.ArgParser('Convert Genbank GFF into an n-quad file')
parser.add_argument('gffPath', help='path to the GFF')
parser.add_argument('outPath', help='path to output the n-quads')
args = parser.parse_args()
accessionIdentifier = 'NONE FOUND'
locusTags = []
with open(args.gffPath) as f:
for line in f:
line = line.strip()
if line.startswith(metadataPrefix):
if line.startswith(accessionKey):
accessionIdentifier = line[len(accessionKey):]
else:
parseRecord(line, locusTags)
with open(args.outPath, 'w') as f:
for locusTag in locusTags:
f.write('<%s> <locus> "%s" .\n' % (accessionIdentifier, locusTag))
|
#!/usr/bin/env python
import jargparse
#################
### CONSTANTS ###
#################
metadataPrefix = '#'
accessionKey = '#!genome-build-accession NCBI_Assembly:'
#################
### FUNCTIONS ###
#################
def parseRecord(record, locusTags):
locusTagAttributeKey = 'locus_tag'
components = record.split()
type = components[2]
rawAttributes = components[8]
if type == 'gene':
attributes = rawAttributes.split(';')
for a in attributes:
(key, value) = a.split('=')
# print a
if key == locusTagAttributeKey:
locusTags.append(value)
parser = jargparse.ArgParser('Convert Genbank GFF into an n-quad file')
parser.add_argument('gffPath', help='path to the GFF')
parser.add_argument('outPath', help='path to output the n-quads')
args = parser.parse_args()
accessionIdentifier = 'NONE FOUND'
locusTags = []
with open(args.gffPath) as f:
for line in f:
line = line.strip()
if line.startswith(metadataPrefix):
if line.startswith(accessionKey):
accessionIdentifier = line[len(accessionKey):]
else:
parseRecord(line, locusTags)
with open(args.outPath, 'w') as f:
for locusTag in locusTags:
f.write('<%s> <locus> "%s" .\n' % (accessionIdentifier, locusTag))
|
Move locus tag attribute key name into the function that uses it
|
Move locus tag attribute key name into the function that uses it
|
Python
|
apache-2.0
|
justinccdev/biolta
|
af85d44d9a6f7cf65fe504816bcf4a10ba603d51
|
pdfdocument/utils.py
|
pdfdocument/utils.py
|
import re
from django.http import HttpResponse
from pdfdocument.document import PDFDocument
FILENAME_RE = re.compile(r'[^A-Za-z0-9\-\.]+')
def pdf_response(filename, as_attachment=True, **kwargs):
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = '%s; filename="%s.pdf"' % (
'attachment' if as_attachment else 'inline',
FILENAME_RE.sub('-', filename),
)
return PDFDocument(response, **kwargs), response
|
import re
from django.http import HttpResponse
from pdfdocument.document import PDFDocument
FILENAME_RE = re.compile(r'[^A-Za-z0-9\-\.]+')
def pdf_response(filename, as_attachment=True, pdfdocument=PDFDocument,
**kwargs):
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = '%s; filename="%s.pdf"' % (
'attachment' if as_attachment else 'inline',
FILENAME_RE.sub('-', filename),
)
return pdfdocument(response, **kwargs), response
|
Make the PDFDocument class used in pdf_response configurable
|
Make the PDFDocument class used in pdf_response configurable
|
Python
|
bsd-3-clause
|
matthiask/pdfdocument,dongguangming/pdfdocument
|
d4e8839ac02935b86c1634848476a9a8512c376d
|
delivery_transsmart/models/res_partner.py
|
delivery_transsmart/models/res_partner.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Delivery Transsmart Ingegration
# © 2016 - 1200 Web Development <http://1200wd.com/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields, api, _
import openerp.addons.decimal_precision as dp
from openerp.exceptions import Warning
class ProductProduct(models.Model):
_inherit = 'product.product'
service_level_id = fields.Many2one(
'delivery.service.level',
string='Service Level')
service_level_time_id = fields.Many2one(
'delivery.service.level.time',
string='Service Level Time')
class ResPartner(models.Model):
_inherit = 'res.partner'
transsmart_code = fields.Char(
size=128,
string="Transsmart Code")
transsmart_id = fields.Integer(
"Transsmart ID")
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Delivery Transsmart Ingegration
# © 2016 - 1200 Web Development <http://1200wd.com/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields, api, _
import openerp.addons.decimal_precision as dp
from openerp.exceptions import Warning
class ResPartner(models.Model):
_inherit = 'res.partner'
transsmart_code = fields.Char(
size=128,
string="Transsmart Code")
transsmart_id = fields.Integer(
"Transsmart ID")
|
Remove double product field definitions
|
[DEL] Remove double product field definitions
|
Python
|
agpl-3.0
|
1200wd/1200wd_addons,1200wd/1200wd_addons
|
b8d73fb12fa91a6f0aa33ed985dd5521843e05b8
|
src/zeit/content/dynamicfolder/browser/tests/test_folder.py
|
src/zeit/content/dynamicfolder/browser/tests/test_folder.py
|
import zeit.cms.interfaces
import zeit.cms.testing
import zeit.content.dynamicfolder.testing
class EditDynamicFolder(zeit.cms.testing.BrowserTestCase):
layer = zeit.content.dynamicfolder.testing.DYNAMIC_LAYER
def test_check_out_and_edit_folder(self):
b = self.browser
b.open('http://localhost/++skin++vivi/repository/dynamicfolder')
b.getLink('Checkout').click()
b.getControl(
'Configuration file').value = 'http://xml.zeit.de/testcontent'
b.getControl('Apply').click()
self.assertEllipsis('...Updated on...', b.contents)
b.getLink('Checkin').click()
self.assertIn('repository', b.url)
with zeit.cms.testing.site(self.getRootFolder()):
folder = zeit.cms.interfaces.ICMSContent(
'http://xml.zeit.de/dynamicfolder')
self.assertEqual(
'http://xml.zeit.de/testcontent', folder.config_file.uniqueId)
|
import zeit.cms.interfaces
import zeit.cms.testing
import zeit.content.dynamicfolder.testing
class EditDynamicFolder(zeit.cms.testing.BrowserTestCase):
layer = zeit.content.dynamicfolder.testing.DYNAMIC_LAYER
def test_check_out_and_edit_folder(self):
b = self.browser
b.open('http://localhost/++skin++vivi/repository/dynamicfolder')
b.getLink('Checkout').click()
b.getControl(
'Configuration file').value = 'http://xml.zeit.de/testcontent'
b.getControl('Apply').click()
self.assertEllipsis('...Updated on...', b.contents)
b.getLink('Checkin').click()
self.assertIn('repository', b.url)
folder = zeit.cms.interfaces.ICMSContent(
'http://xml.zeit.de/dynamicfolder')
self.assertEqual(
'http://xml.zeit.de/testcontent', folder.config_file.uniqueId)
|
Remove superfluous test setup after zeit.cms got smarter
|
MAINT: Remove superfluous test setup after zeit.cms got smarter
|
Python
|
bsd-3-clause
|
ZeitOnline/zeit.content.dynamicfolder
|
1a1a45fe5175d002c239610be487607dbb7cdde1
|
thinc/neural/_classes/feed_forward.py
|
thinc/neural/_classes/feed_forward.py
|
from .model import Model
from ... import describe
def _run_child_hooks(model, X, y):
for layer in model._layers:
for hook in layer.on_data_hooks:
hook(layer, X, y)
X = layer(X[:1000])
@describe.on_data(_run_child_hooks)
class FeedForward(Model):
'''A feed-forward network, that chains multiple Model instances together.'''
def __init__(self, layers, **kwargs):
self._layers = layers
Model.__init__(self, **kwargs)
@property
def input_shape(self):
return self._layers[0].input_shape
@property
def output_shape(self):
return self._layers[-1].output_shape
def begin_update(self, X, drop=0.):
callbacks = []
for layer in self._layers:
X, inc_layer_grad = layer.begin_update(X, drop=drop)
callbacks.append(inc_layer_grad)
def continue_update(gradient, sgd=None):
for callback in reversed(callbacks):
if gradient is None or callback == None:
break
gradient = callback(gradient, sgd)
return gradient
return X, continue_update
|
from .model import Model
from ... import describe
def _run_child_hooks(model, X, y):
for layer in model._layers:
for hook in layer.on_data_hooks:
hook(layer, X, y)
X = layer(X)
if hasattr(X, 'shape'):
X = model.ops.xp.ascontiguousarray(X)
@describe.on_data(_run_child_hooks)
class FeedForward(Model):
'''A feed-forward network, that chains multiple Model instances together.'''
def __init__(self, layers, **kwargs):
self._layers = layers
Model.__init__(self, **kwargs)
@property
def input_shape(self):
return self._layers[0].input_shape
@property
def output_shape(self):
return self._layers[-1].output_shape
def begin_update(self, X, drop=0.):
callbacks = []
for layer in self._layers:
X, inc_layer_grad = layer.begin_update(X, drop=drop)
callbacks.append(inc_layer_grad)
def continue_update(gradient, sgd=None):
for callback in reversed(callbacks):
if gradient is None or callback == None:
break
gradient = callback(gradient, sgd)
return gradient
return X, continue_update
|
Make copy of X in feed-forward
|
Make copy of X in feed-forward
|
Python
|
mit
|
spacy-io/thinc,explosion/thinc,spacy-io/thinc,explosion/thinc,spacy-io/thinc,explosion/thinc,explosion/thinc
|
c862a4c40f17040017e9bb6f67f5b9fa293c23e5
|
mcb_interface/driver.py
|
mcb_interface/driver.py
|
#!/usr/bin/env python3
from romi import Romi
romi = Romi()
from time import sleep
# from math import pi
while True:
battery_millivolts = romi.read_battery_millivolts()
v_x, v_theta, x, y, theta = romi.read_odometry()
romi.velocity_command(0.1, 0)
print "Battery Voltage: ", battery_millivolts[0], " Volts."
print "Vx: ", v_x, " m/s"
print "Vtheta: ", v_theta, "rad/s"
sleep(0.01)
|
#!/usr/bin/env python3
from romi import Romi
romi = Romi()
from time import sleep
# from math import pi
while True:
battery_millivolts = romi.read_battery_millivolts()
v_x, v_theta, x, y, theta = romi.read_odometry()
romi.velocity_command(1.0, 0)
print "Battery Voltage: ", battery_millivolts[0], " Volts."
print "Vx: ", v_x, " m/s"
print "Vtheta: ", v_theta, "rad/s"
print "X: ", x, " Y: ", y, " Theta: ", theta
sleep(0.01)
|
Add more output printing, increase velocity command.
|
Add more output printing, increase velocity command.
|
Python
|
mit
|
waddletown/sw
|
5f888f5ee388efa046bc9e0de0622e5c8b66d712
|
src/viewsapp/views.py
|
src/viewsapp/views.py
|
from django.shortcuts import (
get_object_or_404, render)
from django.views.decorators.http import \
require_http_methods
from .models import ExampleModel
@require_http_methods(['GET', 'HEAD'])
def model_detail(request, *args, **kwargs):
request_slug = kwargs.get('slug')
example_obj = get_object_or_404(
ExampleModel, slug=request_slug)
return render(
request,
'viewsapp/detail.html',
{'object': example_obj})
|
from django.shortcuts import (
get_object_or_404, render)
from django.views.decorators.http import \
require_safe
from .models import ExampleModel
@require_safe
def model_detail(request, *args, **kwargs):
request_slug = kwargs.get('slug')
example_obj = get_object_or_404(
ExampleModel, slug=request_slug)
return render(
request,
'viewsapp/detail.html',
{'object': example_obj})
|
Switch HTTP restriction decorator to require_safe.
|
Switch HTTP restriction decorator to require_safe.
|
Python
|
bsd-2-clause
|
jambonrose/djangocon2015-views,jambonrose/djangocon2015-views
|
b2df5972bcc9f3367c3832719d1590410317bbba
|
swift/obj/dedupe/fp_index.py
|
swift/obj/dedupe/fp_index.py
|
__author__ = 'mjwtom'
import sqlite3
import unittest
class fp_index:
def __init__(self, name):
if name.endswith('.db'):
self.name = name
else:
self.name = name + '.db'
self.conn = sqlite3.connect(name)
self.c = self.conn.cursor()
self.c.execute('''CREATE TABLE IF NOT EXISTS fp_index (key text, value text)''')
def insert(self, key, value):
data = (key, value)
self.c.execute('INSERT INTO fp_index VALUES (?, ?)', data)
self.conn.commit()
def lookup(self, key):
data = (key,)
self.c.execute('SELECT value FROM fp_index WHERE key=?', data)
return self.c.fetchone()
def testinsert():
fp = fp_index('/home/mjwtom/mydb.db')
for i in range(0, 100):
str = i.__str__()
fp.insert(str, str)
def testselect():
fp = fp_index('/home/mjwtom/mydb.db')
for i in range(0, 100):
str = i.__str__()
c = fp.lookup(str)
for row in c:
print row
if __name__ == '__main__':
unittest.main()
|
__author__ = 'mjwtom'
import sqlite3
import unittest
class Fp_Index(object):
def __init__(self, name):
if name.endswith('.db'):
self.name = name
else:
self.name = name + '.db'
self.conn = sqlite3.connect(name)
self.c = self.conn.cursor()
self.c.execute('''CREATE TABLE IF NOT EXISTS fp_index (key text, value text)''')
def insert(self, key, value):
data = (key, value)
self.c.execute('INSERT INTO fp_index VALUES (?, ?)', data)
self.conn.commit()
def lookup(self, key):
data = (key,)
self.c.execute('SELECT value FROM fp_index WHERE key=?', data)
return self.c.fetchone()
'''
def testinsert():
fp = fp_index('/home/mjwtom/mydb.db')
for i in range(0, 100):
str = i.__str__()
fp.insert(str, str)
def testselect():
fp = fp_index('/home/mjwtom/mydb.db')
for i in range(0, 100):
str = i.__str__()
c = fp.lookup(str)
for row in c:
print row
if __name__ == '__main__':
unittest.main()
'''
|
Use database to detect the duplication. But the md5 value does not match. Need to add some code here
|
Use database to detect the duplication. But the md5 value does not match. Need to add some code here
|
Python
|
apache-2.0
|
mjwtom/swift,mjwtom/swift
|
bdee8b95429a6ac96cb0577e7eddbd25b764ebfc
|
mirrit/web/models.py
|
mirrit/web/models.py
|
from humbledb import Mongo, Document
class User(Document):
username = ''
password = ''
email = ''
config_database = 'mirrit'
config_collection = 'users'
@property
def id(self):
return unicode(self._id)
@property
def user_id(self):
return unicode(self._id)
@staticmethod
def get_by_login(cls, username, password):
with Mongo:
return cls.find({'username': username,
'password': password})
def persist(self):
with Mongo:
if self._id:
super(User, self).__self_class__.update({'_id': self._id}, self, w=1)
else:
super(User, self).__self_class__.insert(self, w=1)
|
from bson.objectid import ObjectId
from humbledb import Mongo, Document
class ClassProperty (property):
"""Subclass property to make classmethod properties possible"""
def __get__(self, cls, owner):
return self.fget.__get__(None, owner)()
class User(Document):
username = ''
password = ''
email = ''
config_database = 'mirrit'
config_collection = 'users'
@property
def id(self):
return unicode(self._id)
@property
def user_id(self):
return unicode(self._id)
@staticmethod
def get_by_login(cls, username, password):
with Mongo:
return cls.find({'username': username,
'password': password})
def persist(self):
with Mongo:
if self._id:
super(User, self).__self_class__.update(
{'_id': self._id}, self, w=1)
else:
super(User, self).__self_class__.insert(self, w=1)
class Wrapper(object):
def get(self, id):
with Mongo:
return User.find({'_id': ObjectId(id)})
wrapper = Wrapper()
User.query = wrapper
|
Fix stupid pseudo-django model crap in signup
|
Fix stupid pseudo-django model crap in signup
|
Python
|
bsd-3-clause
|
1stvamp/mirrit
|
f76bba08c1a8cfd3c821f641adb2b10e3cfa47b9
|
tests/test_base_os.py
|
tests/test_base_os.py
|
from .fixtures import elasticsearch
def test_base_os(host):
assert host.system_info.distribution == 'centos'
assert host.system_info.release == '7'
def test_java_home_env_var(host):
java_path_cmdline = '$JAVA_HOME/bin/java -version'
assert host.run(java_path_cmdline).exit_status == 0
|
from .fixtures import elasticsearch
def test_base_os(host):
assert host.system_info.distribution == 'centos'
assert host.system_info.release == '7'
def test_java_home_env_var(host):
java_path_cmdline = '$JAVA_HOME/bin/java -version'
assert host.run(java_path_cmdline).exit_status == 0
def test_no_core_files_exist_in_root(host):
core_file_check_cmdline = 'ls -l /core*'
assert host.run(core_file_check_cmdline).exit_status != 0
|
Add acceptance test to ensure image doesn't contain core files in /
|
Add acceptance test to ensure image doesn't contain core files in /
In some occasions, depending on the build platform (noticed with aufs with old docker-ce versions) may create a /corefile.<pid>.
Fail a build if the produced image containers any /core* files.
Relates #97
|
Python
|
apache-2.0
|
jarpy/elasticsearch-docker,jarpy/elasticsearch-docker
|
5fc80b347191761d848f6bf736358ec1ec351f33
|
fbmsgbot/bot.py
|
fbmsgbot/bot.py
|
from http_client import HttpClient
class Bot():
"""
@brief Facebook messenger bot
"""
def __init__(self, token):
self.api_token = token
self.client = HttpClient(token)
def send_message(self, message, completion):
def _completion(response, error):
print error
if error is None:
# TODO: Is there anything the bot needs to do?
# maybe retry if it fails...?
pass
else:
print response
completion(response)
self.client.submit_request(
'/me/messages',
'POST',
message.to_json(),
_completion)
def set_welcome(self, message, completion):
def _completion(response, error):
print error
if error is None:
# TODO: Is there anything the bot needs to do?
# maybe retry if it fails...?
pass
else:
print response
completion(response)
self.client.submit_request(
'/me/thread_settings',
'POST',
message.to_json(),
_completion)
|
from http_client import HttpClient
class Bot():
"""
@brief Facebook messenger bot
"""
def __init__(self, token):
self.api_token = token
self.client = HttpClient(token)
def send_message(self, message, completion):
def _completion(response, error):
if error is not None:
pass
else:
completion(response)
self.client.submit_request(
'/me/messages',
'POST',
message.to_json(),
_completion)
def set_welcome(self, message, completion):
def _completion(response, error):
if error is not None:
pass
else:
completion(response)
self.client.submit_request(
'/me/thread_settings',
'POST',
message.to_json(),
_completion)
|
Remove print statments and fix completion logic
|
Remove print statments and fix completion logic
|
Python
|
mit
|
ben-cunningham/pybot,ben-cunningham/python-messenger-bot
|
a32831dbf6b46b33691a76e43012e9fbbbc80e17
|
superlists/lists/tests.py
|
superlists/lists/tests.py
|
from django.test import TestCase
# Create your tests here.
|
from django.test import TestCase
class SmokeTest(TestCase):
def test_bad_maths(self):
self.assertEqual(1 + 1, 3)
|
Add app for lists, with deliberately failing unit test
|
Add app for lists, with deliberately failing unit test
|
Python
|
mit
|
jrwiegand/tdd-project,jrwiegand/tdd-project,jrwiegand/tdd-project
|
41cf41f501b715902cf180b5a2f62ce16a816f30
|
oscar/core/prices.py
|
oscar/core/prices.py
|
class TaxNotKnown(Exception):
"""
Exception for when a tax-inclusive price is requested but we don't know
what the tax applicable is (yet).
"""
class Price(object):
"""
Simple price class that encapsulates a price and its tax information
Attributes:
incl_tax (Decimal): Price including taxes
excl_tax (Decimal): Price excluding taxes
tax (Decimal): Tax amount
is_tax_known (bool): Whether tax is known
currency (str): 3 character currency code
"""
def __init__(self, currency, excl_tax, incl_tax=None, tax=None):
self.currency = currency
self.excl_tax = excl_tax
if incl_tax is not None:
self.incl_tax = incl_tax
self.is_tax_known = True
self.tax = incl_tax - excl_tax
elif tax is not None:
self.incl_tax = excl_tax + tax
self.is_tax_known = True
self.tax = tax
else:
self.is_tax_known = False
|
class TaxNotKnown(Exception):
"""
Exception for when a tax-inclusive price is requested but we don't know
what the tax applicable is (yet).
"""
class Price(object):
"""
Simple price class that encapsulates a price and its tax information
Attributes:
incl_tax (Decimal): Price including taxes
excl_tax (Decimal): Price excluding taxes
tax (Decimal): Tax amount
is_tax_known (bool): Whether tax is known
currency (str): 3 character currency code
"""
def __init__(self, currency, excl_tax, incl_tax=None, tax=None):
self.currency = currency
self.excl_tax = excl_tax
if incl_tax is not None:
self.incl_tax = incl_tax
self.is_tax_known = True
self.tax = incl_tax - excl_tax
elif tax is not None:
self.incl_tax = excl_tax + tax
self.is_tax_known = True
self.tax = tax
else:
self.is_tax_known = False
def __repr__(self):
if self.is_tax_known:
return "%s(currency=%r, excl_tax=%r, incl_tax=%r, tax=%r)" % (
self.__class__.__name__, self.currency, self.excl_tax,
self.incl_tax, self.tax)
return "%s(currency=%r, excl_tax=%r)" % (
self.__class__.__name__, self.currency, self.excl_tax)
|
Define __repr__ for the core Price class
|
Define __repr__ for the core Price class
|
Python
|
bsd-3-clause
|
saadatqadri/django-oscar,WillisXChen/django-oscar,adamend/django-oscar,sasha0/django-oscar,faratro/django-oscar,bnprk/django-oscar,jinnykoo/christmas,jinnykoo/wuyisj.com,WillisXChen/django-oscar,WadeYuChen/django-oscar,taedori81/django-oscar,taedori81/django-oscar,bschuon/django-oscar,WadeYuChen/django-oscar,WillisXChen/django-oscar,nfletton/django-oscar,dongguangming/django-oscar,QLGu/django-oscar,django-oscar/django-oscar,thechampanurag/django-oscar,pasqualguerrero/django-oscar,solarissmoke/django-oscar,okfish/django-oscar,bschuon/django-oscar,QLGu/django-oscar,sonofatailor/django-oscar,nickpack/django-oscar,lijoantony/django-oscar,ademuk/django-oscar,binarydud/django-oscar,marcoantoniooliveira/labweb,nfletton/django-oscar,ahmetdaglarbas/e-commerce,binarydud/django-oscar,josesanch/django-oscar,nfletton/django-oscar,Bogh/django-oscar,Bogh/django-oscar,john-parton/django-oscar,sasha0/django-oscar,binarydud/django-oscar,machtfit/django-oscar,eddiep1101/django-oscar,mexeniz/django-oscar,lijoantony/django-oscar,taedori81/django-oscar,QLGu/django-oscar,josesanch/django-oscar,dongguangming/django-oscar,solarissmoke/django-oscar,eddiep1101/django-oscar,anentropic/django-oscar,monikasulik/django-oscar,michaelkuty/django-oscar,eddiep1101/django-oscar,okfish/django-oscar,faratro/django-oscar,jlmadurga/django-oscar,pdonadeo/django-oscar,kapari/django-oscar,marcoantoniooliveira/labweb,vovanbo/django-oscar,dongguangming/django-oscar,john-parton/django-oscar,thechampanurag/django-oscar,kapari/django-oscar,solarissmoke/django-oscar,jmt4/django-oscar,pdonadeo/django-oscar,bnprk/django-oscar,ka7eh/django-oscar,machtfit/django-oscar,manevant/django-oscar,saadatqadri/django-oscar,jinnykoo/wuyisj.com,mexeniz/django-oscar,ka7eh/django-oscar,thechampanurag/django-oscar,itbabu/django-oscar,jinnykoo/wuyisj.com,django-oscar/django-oscar,monikasulik/django-oscar,faratro/django-oscar,Jannes123/django-oscar,sonofatailor/django-oscar,WadeYuChen/django-oscar,michaelkuty/django-oscar,kapt/django-oscar,binarydud/django-oscar,Bogh/django-oscar,WillisXChen/django-oscar,saadatqadri/django-oscar,nickpack/django-oscar,MatthewWilkes/django-oscar,django-oscar/django-oscar,anentropic/django-oscar,spartonia/django-oscar,sonofatailor/django-oscar,kapari/django-oscar,nickpack/django-oscar,Bogh/django-oscar,taedori81/django-oscar,jinnykoo/christmas,okfish/django-oscar,manevant/django-oscar,dongguangming/django-oscar,jinnykoo/wuyisj.com,amirrpp/django-oscar,itbabu/django-oscar,DrOctogon/unwash_ecom,jinnykoo/wuyisj,jlmadurga/django-oscar,john-parton/django-oscar,kapari/django-oscar,DrOctogon/unwash_ecom,nfletton/django-oscar,vovanbo/django-oscar,Jannes123/django-oscar,spartonia/django-oscar,jlmadurga/django-oscar,lijoantony/django-oscar,jmt4/django-oscar,jmt4/django-oscar,pdonadeo/django-oscar,rocopartners/django-oscar,sonofatailor/django-oscar,ka7eh/django-oscar,mexeniz/django-oscar,rocopartners/django-oscar,manevant/django-oscar,bschuon/django-oscar,MatthewWilkes/django-oscar,eddiep1101/django-oscar,bnprk/django-oscar,MatthewWilkes/django-oscar,monikasulik/django-oscar,michaelkuty/django-oscar,rocopartners/django-oscar,jinnykoo/wuyisj,itbabu/django-oscar,Jannes123/django-oscar,anentropic/django-oscar,bschuon/django-oscar,jinnykoo/christmas,adamend/django-oscar,marcoantoniooliveira/labweb,amirrpp/django-oscar,jinnykoo/wuyisj,monikasulik/django-oscar,DrOctogon/unwash_ecom,amirrpp/django-oscar,kapt/django-oscar,ahmetdaglarbas/e-commerce,marcoantoniooliveira/labweb,ademuk/django-oscar,jlmadurga/django-oscar,manevant/django-oscar,pasqualguerrero/django-oscar,pasqualguerrero/django-oscar,anentropic/django-oscar,jmt4/django-oscar,WillisXChen/django-oscar,kapt/django-oscar,itbabu/django-oscar,ka7eh/django-oscar,WadeYuChen/django-oscar,michaelkuty/django-oscar,thechampanurag/django-oscar,ahmetdaglarbas/e-commerce,okfish/django-oscar,pdonadeo/django-oscar,sasha0/django-oscar,spartonia/django-oscar,adamend/django-oscar,lijoantony/django-oscar,jinnykoo/wuyisj,MatthewWilkes/django-oscar,solarissmoke/django-oscar,faratro/django-oscar,mexeniz/django-oscar,spartonia/django-oscar,sasha0/django-oscar,josesanch/django-oscar,machtfit/django-oscar,vovanbo/django-oscar,john-parton/django-oscar,vovanbo/django-oscar,saadatqadri/django-oscar,WillisXChen/django-oscar,ademuk/django-oscar,django-oscar/django-oscar,nickpack/django-oscar,adamend/django-oscar,rocopartners/django-oscar,pasqualguerrero/django-oscar,ademuk/django-oscar,QLGu/django-oscar,amirrpp/django-oscar,ahmetdaglarbas/e-commerce,bnprk/django-oscar,Jannes123/django-oscar
|
836e946e5c6bfb6b097622193a4239c7eba1ca9a
|
thinglang/parser/blocks/handle_block.py
|
thinglang/parser/blocks/handle_block.py
|
from thinglang.compiler.buffer import CompilationBuffer
from thinglang.compiler.opcodes import OpcodeJump, OpcodePopLocal
from thinglang.lexer.blocks.exceptions import LexicalHandle
from thinglang.lexer.values.identifier import Identifier
from thinglang.parser.nodes import BaseNode
from thinglang.parser.rule import ParserRule
class HandleBlock(BaseNode):
"""
An exception handling block
"""
def __init__(self, exception_type: Identifier, exception_name: Identifier=None):
super(HandleBlock, self).__init__([exception_type, exception_name])
self.exception_type, self.exception_name = exception_type, exception_name
def compile(self, context: CompilationBuffer):
assert self.parent is None, 'Handle blocks may not be part of the AST after finalization'
buffer = context.optional()
if self.exception_name is not None:
buffer.append(OpcodePopLocal.from_reference(context.resolve(self.exception_name)), self.source_ref)
super(HandleBlock, self).compile(buffer)
buffer.append(OpcodeJump(context.next_index, absolute=True), self.source_ref)
return context.epilogue(buffer)
@staticmethod
@ParserRule.mark
def parse_handle_block_with_value(_: LexicalHandle, exception_type: Identifier, exception_name: Identifier):
return HandleBlock(exception_type, exception_name)
@staticmethod
@ParserRule.mark
def parse_handle_block(_: LexicalHandle, exception_type: Identifier):
return HandleBlock(exception_type)
|
from thinglang.compiler.buffer import CompilationBuffer
from thinglang.compiler.opcodes import OpcodeJump, OpcodePopLocal, OpcodePop
from thinglang.lexer.blocks.exceptions import LexicalHandle
from thinglang.lexer.values.identifier import Identifier
from thinglang.parser.nodes import BaseNode
from thinglang.parser.rule import ParserRule
class HandleBlock(BaseNode):
"""
An exception handling block
"""
def __init__(self, exception_type: Identifier, exception_name: Identifier=None):
super(HandleBlock, self).__init__([exception_type, exception_name])
self.exception_type, self.exception_name = exception_type, exception_name
def compile(self, context: CompilationBuffer):
assert self.parent is None, 'Handle blocks may not be part of the AST after finalization'
buffer = context.optional()
if self.exception_name is not None:
buffer.append(OpcodePopLocal.from_reference(context.resolve(self.exception_name)), self.source_ref)
else:
buffer.append(OpcodePop(), self.source_ref)
super(HandleBlock, self).compile(buffer)
buffer.append(OpcodeJump(context.next_index, absolute=True), self.source_ref)
return context.epilogue(buffer)
@staticmethod
@ParserRule.mark
def parse_handle_block_with_value(_: LexicalHandle, exception_type: Identifier, exception_name: Identifier):
return HandleBlock(exception_type, exception_name)
@staticmethod
@ParserRule.mark
def parse_handle_block(_: LexicalHandle, exception_type: Identifier):
return HandleBlock(exception_type)
|
Add missing void pop in uncaptured exception blocks
|
Add missing void pop in uncaptured exception blocks
|
Python
|
mit
|
ytanay/thinglang,ytanay/thinglang,ytanay/thinglang,ytanay/thinglang
|
44110a305b5a23609c5f6366da9d746244807dbb
|
power/__init__.py
|
power/__init__.py
|
# coding=utf-8
"""
Provides crossplatform checking of current power source, battery warning level and battery time remaining estimate.
Allows you to add observer for power notifications if platform supports it.
Usage:
from power import PowerManagement, PowerManagementObserver # Automatically imports platform-specific implementation
class Observer(PowerManagementObserver):
def on_power_sources_change(self, power_management):
print("Power sources did change.")
def on_time_remaining_change(self, power_management):
print("Time remaining did change.")
# class Observer(object):
# ...
# PowerManagementObserver.register(Observer)
"""
from sys import platform
from power.common import *
from power.version import VERSION
__version__ = VERSION
try:
if platform.startswith('darwin'):
from power.darwin import PowerManagement
elif platform.startswith('freebsd'):
from power.freebsd import PowerManagement
elif platform.startswith('win32'):
from power.win32 import PowerManagement
elif platform.startswith('linux'):
from power.linux import PowerManagement
else:
raise RuntimeError("{platform} is not supported.".format(platform=platform))
except RuntimeError as e:
import warnings
warnings.warn("Unable to load PowerManagement for {platform}. No-op PowerManagement class is used: {error}".format(error=str(e), platform=platform))
from power.common import PowerManagementNoop as PowerManagement
|
# coding=utf-8
"""
Provides crossplatform checking of current power source, battery warning level and battery time remaining estimate.
Allows you to add observer for power notifications if platform supports it.
Usage:
from power import PowerManagement, PowerManagementObserver # Automatically imports platform-specific implementation
class Observer(PowerManagementObserver):
def on_power_sources_change(self, power_management):
print("Power sources did change.")
def on_time_remaining_change(self, power_management):
print("Time remaining did change.")
# class Observer(object):
# ...
# PowerManagementObserver.register(Observer)
"""
from sys import platform
from power.common import *
from power.version import VERSION
__version__ = VERSION
try:
if platform.startswith('darwin'):
from power.darwin import PowerManagement
elif platform.startswith('freebsd'):
from power.freebsd import PowerManagement
elif platform.startswith('win32'):
from power.win32 import PowerManagement
elif platform.startswith('linux'):
from power.linux import PowerManagement
else:
raise RuntimeError("{platform} is not supported.".format(platform=platform))
except (RuntimeError, ImportError) as e:
import warnings
warnings.warn("Unable to load PowerManagement for {platform}. No-op PowerManagement class is used: {error}".format(error=str(e), platform=platform))
from power.common import PowerManagementNoop as PowerManagement
|
Use PowerManagementNoop on import errors
|
Use PowerManagementNoop on import errors
Platform implementation can fail to import its dependencies.
|
Python
|
mit
|
Kentzo/Power
|
0f7ba6290696e1ce75e42327fdfc4f9eae8614c3
|
pdfdocument/utils.py
|
pdfdocument/utils.py
|
from datetime import date
import re
from django.db.models import Max, Min
from django.http import HttpResponse
from pdfdocument.document import PDFDocument
def worklog_period(obj):
activity_period = obj.worklogentries.aggregate(Max('date'), Min('date'))
article_period = obj.articleentries.aggregate(Max('date'), Min('date'))
min_date = date(1900, 1, 1)
max_date = date(3000, 1, 1)
if not (activity_period['date__min'] or article_period['date__min']):
return (min_date, max_date)
start = min(activity_period['date__min'] or max_date, article_period['date__min'] or max_date)
end = max(activity_period['date__max'] or min_date, article_period['date__max'] or min_date)
return (start, end)
def worklog_period_string(obj):
start, end = obj.worklog_period()
return u'%s - %s' % (start.strftime('%d.%m.%Y'), end.strftime('%d.%m.%Y'))
FILENAME_RE = re.compile(r'[^A-Za-z0-9\-\.]+')
def pdf_response(filename):
response = HttpResponse(mimetype='application/pdf')
response['Content-Disposition'] = 'attachment; filename=%s.pdf' %\
FILENAME_RE.sub('-', filename)
return PDFDocument(response), response
|
from datetime import date
import re
from django.db.models import Max, Min
from django.http import HttpResponse
from pdfdocument.document import PDFDocument
def worklog_period(obj):
activity_period = obj.worklogentries.aggregate(Max('date'), Min('date'))
article_period = obj.articleentries.aggregate(Max('date'), Min('date'))
min_date = date(1900, 1, 1)
max_date = date(3000, 1, 1)
if not (activity_period['date__min'] or article_period['date__min']):
return (min_date, max_date)
start = min(activity_period['date__min'] or max_date, article_period['date__min'] or max_date)
end = max(activity_period['date__max'] or min_date, article_period['date__max'] or min_date)
return (start, end)
def worklog_period_string(obj):
start, end = obj.worklog_period()
return u'%s - %s' % (start.strftime('%d.%m.%Y'), end.strftime('%d.%m.%Y'))
FILENAME_RE = re.compile(r'[^A-Za-z0-9\-\.]+')
def pdf_response(filename, **kwargs):
response = HttpResponse(mimetype='application/pdf')
response['Content-Disposition'] = 'attachment; filename=%s.pdf' %\
FILENAME_RE.sub('-', filename)
return PDFDocument(response, **kwargs), response
|
Allow passing initialization kwargs to PDFDocument through pdf_response
|
Allow passing initialization kwargs to PDFDocument through pdf_response
|
Python
|
bsd-3-clause
|
matthiask/pdfdocument,dongguangming/pdfdocument
|
edfd2edc5496cb412477b7409f43aa53acf7dea9
|
tests/test_loadproblem.py
|
tests/test_loadproblem.py
|
# -*- coding: utf-8 -*-
import unittest
import os
from mathdeck import loadproblem
class TestMathdeckLoadProblem(unittest.TestCase):
def test_loadproblem_has_answers_attribute(self):
file_name = 'has_answers_attribute.py'
problem_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)),
'fixtures','loadproblem')
problem = loadproblem.load_file_as_module(problem_dir,file_name)
self.assertTrue(hasattr(problem,'answers'))
def test_loadproblem_has_no_answers_attribute(self):
file_name = 'has_no_answers_attribute.py'
problem_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)),
'fixtures','loadproblem')
self.assertRaises(Exception, loadproblem. \
load_file_as_module(problem_dir,file_name))
if __name__ == '__main__':
unittest.main()
|
# -*- coding: utf-8 -*-
import unittest
import os
from mathdeck import loadproblem
class TestMathdeckLoadProblem(unittest.TestCase):
def test_loadproblem_has_answers_attribute(self):
file_name = 'has_answers_attribute.py'
file = os.path.join(os.path.dirname(os.path.realpath(__file__)),
'fixtures','loadproblem', file_name)
problem = loadproblem.load_file_as_module(file)
self.assertTrue(hasattr(problem,'answers'))
def test_loadproblem_has_no_answers_attribute(self):
file_name = 'has_no_answers_attribute.py'
file = os.path.join(os.path.dirname(os.path.realpath(__file__)),
'fixtures','loadproblem', file_name)
self.assertRaises(Exception, loadproblem.load_file_as_module(file))
if __name__ == '__main__':
unittest.main()
|
Fix parameter values for load function
|
Fix parameter values for load function
|
Python
|
apache-2.0
|
patrickspencer/mathdeck,patrickspencer/mathdeck
|
503f92796b9368a78f39c41fb6bb596f32728b8d
|
herana/views.py
|
herana/views.py
|
import json
from django.shortcuts import render
from django.views.generic import View
from models import Institute, ProjectDetail
from forms import SelectInstituteForm, SelectOrgLevelForm
def home(request):
return render(request, 'index.html')
class ResultsView(View):
template_name = 'results.html'
def get(self, request, *args, **kwargs):
projects = ProjectDetail.objects.filter(
record_status=2,
is_rejected=False,
is_deleted=False)
institutes = {proj.institute for proj in projects}
data = {}
data['projects'] = [p.as_dict() for p in projects]
data['institutes'] = [i.as_dict() for i in institutes]
if request.user.is_proj_leader or request.user.is_institute_admin:
data['user_institute'] = request.user.get_user_institute().as_dict()
context = {
"data": json.dumps(data),
}
return render(
request,
self.template_name,
context=context)
|
import json
from django.shortcuts import render
from django.views.generic import View
from models import Institute, ProjectDetail
from forms import SelectInstituteForm, SelectOrgLevelForm
def home(request):
return render(request, 'index.html')
class ResultsView(View):
template_name = 'results.html'
def get(self, request, *args, **kwargs):
projects = ProjectDetail.objects.filter(
record_status=2,
is_rejected=False,
is_deleted=False)
institutes = {proj.institute for proj in projects}
data = {}
data['projects'] = [p.as_dict() for p in projects]
data['institutes'] = [i.as_dict() for i in institutes]
if request.user.is_authenticated():
if request.user.is_proj_leader or request.user.is_institute_admin:
data['user_institute'] = request.user.get_user_institute().as_dict()
context = {
"data": json.dumps(data),
}
return render(
request,
self.template_name,
context=context)
|
Check if user in logged in
|
Check if user in logged in
|
Python
|
mit
|
Code4SA/herana,Code4SA/herana,Code4SA/herana,Code4SA/herana
|
562a0868b3648e3ba40c29289ba7f4ebd4c75800
|
pyinfra/api/__init__.py
|
pyinfra/api/__init__.py
|
# pyinfra
# File: pyinfra/api/__init__.py
# Desc: import some stuff
from .config import Config # noqa: F401
from .deploy import deploy # noqa: F401
from .exceptions import ( # noqa: F401
DeployError,
InventoryError,
OperationError,
)
from .facts import FactBase # noqa: F401
from .inventory import Inventory # noqa: F401
from .operation import operation # noqa: F401
from .state import State # noqa: F401
|
# pyinfra
# File: pyinfra/api/__init__.py
# Desc: import some stuff
from .config import Config # noqa: F401
from .deploy import deploy # noqa: F401
from .exceptions import ( # noqa: F401
DeployError,
InventoryError,
OperationError,
)
from .facts import FactBase, ShortFactBase # noqa: F401
from .inventory import Inventory # noqa: F401
from .operation import operation # noqa: F401
from .state import State # noqa: F401
|
Add `ShortFactBase` import to `pyinfra.api`.
|
Add `ShortFactBase` import to `pyinfra.api`.
|
Python
|
mit
|
Fizzadar/pyinfra,Fizzadar/pyinfra
|
663a61362c30b737f2532de42b5b680795ccf608
|
quran_text/models.py
|
quran_text/models.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.utils.translation import ugettext_lazy as _
from django.db import models
class Sura(models.Model):
"""
Model to hold the Quran Chapters "Sura"
"""
index = models.PositiveIntegerField(primary_key=True)
name = models.CharField(max_length=20, unique=True, verbose_name=_('Sura'))
def __str__(self):
return self.name
class Meta:
ordering = ['index']
class Ayah(models.Model):
"""
Model to hold chapters' text ot Verse "Ayat"
"""
number = models.PositiveIntegerField(verbose_name=_('Number'))
sura = models.ForeignKey(Sura, related_name='ayat')
text = models.TextField()
def __str__(self):
return '{} - {}'.format(self.sura.index, self.number)
class Meta:
unique_together = ['number', 'sura']
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.utils.translation import ugettext_lazy as _
from django.db import models
class Sura(models.Model):
"""
Model to hold the Quran Chapters "Sura"
"""
index = models.PositiveIntegerField(primary_key=True)
name = models.CharField(max_length=20, unique=True, verbose_name=_('Sura'))
def __str__(self):
return self.name
class Meta:
ordering = ['index']
class Ayah(models.Model):
"""
Model to hold chapters' text ot Verse "Ayat"
"""
number = models.PositiveIntegerField(verbose_name=_('Number'))
sura = models.ForeignKey(Sura, related_name='ayat')
text = models.TextField()
def __str__(self):
return '{} - {}'.format(self.sura.index, self.number)
class Meta:
unique_together = ['number', 'sura']
ordering = ['sura', 'number']
|
Add ordering to Ayah model
|
Add ordering to Ayah model
|
Python
|
mit
|
EmadMokhtar/tafseer_api
|
ddabef55b9dde75af422d4dedb2d5578d7019905
|
tests/test_authentication.py
|
tests/test_authentication.py
|
import unittest
from flask import json
from api import db
from api.BucketListAPI import app
from instance.config import application_config
class AuthenticationTestCase(unittest.TestCase):
def setUp(self):
app.config.from_object(application_config['TestingEnv'])
self.client = app.test_client()
# Binds the app to current context
with app.app_context():
# Create all tables
db.create_all()
def test_index_route(self):
response = self.client.get('/')
self.assertEqual(response.status_code, 201)
self.assertIn('Welcome Message', response.data.decode())
def test_registration_with_missing_dredentials(self):
"""Should throw error for missing credentials"""
user = json.dumps({
'name': '',
'email': '',
'password': ''
})
response = self.client.post('/auth/register', data=user)
self.assertEqual(response.status_code, 400)
self.assertIn('Missing', response.data.decode())
def tearDown(self):
# Drop all tables
with app.app_context():
# Drop all tables
db.session.remove()
db.drop_all()
if __name__ == '__main__':
unittest.main()
|
import unittest
from flask import json
from api import db
from api.BucketListAPI import app
from instance.config import application_config
class AuthenticationTestCase(unittest.TestCase):
def setUp(self):
app.config.from_object(application_config['TestingEnv'])
self.client = app.test_client()
# Binds the app to current context
with app.app_context():
# Create all tables
db.create_all()
def test_index_route(self):
response = self.client.get('/')
self.assertEqual(response.status_code, 201)
self.assertIn('Welcome Message', response.data.decode())
def test_registration_with_missing_dredentials(self):
"""Should throw error for missing credentials"""
user = json.dumps({
'name': '',
'email': '',
'password': ''
})
response = self.client.post('/auth/register', data=user)
self.assertEqual(response.status_code, 400)
self.assertIn('Missing', response.data.decode())
def test_registration_with_invalid_email(self):
"""Should return invalid email"""
user = json.dumps({
'name': 'Patrick',
'email': 'pato',
'password': 'pat'
})
response = self.client.post('/auth/register', data=user)
self.assertEqual(response.status_code, 400)
self.assertIn('Invalid Email', response.data.decode())
def tearDown(self):
# Drop all tables
with app.app_context():
# Drop all tables
db.session.remove()
db.drop_all()
if __name__ == '__main__':
unittest.main()
|
Add test for invalid email
|
Add test for invalid email
|
Python
|
mit
|
patlub/BucketListAPI,patlub/BucketListAPI
|
adcba0285ef700738a63986c7657bd9e5ac85d85
|
wikipendium/user/forms.py
|
wikipendium/user/forms.py
|
from django.forms import Form, CharField, ValidationError
from django.contrib.auth.models import User
class UserChangeForm(Form):
username = CharField(max_length=30, label='New username')
def clean(self):
cleaned_data = super(UserChangeForm, self).clean()
if User.objects.filter(username=cleaned_data['username']).count():
raise ValidationError('Username already taken!')
return cleaned_data
class EmailChangeForm(Form):
email = CharField(max_length=75, label='New email')
|
from django.forms import Form, CharField, EmailField, ValidationError
from django.contrib.auth.models import User
class UserChangeForm(Form):
username = CharField(max_length=30, label='New username')
def clean(self):
cleaned_data = super(UserChangeForm, self).clean()
if User.objects.filter(username=cleaned_data['username']).count():
raise ValidationError('Username already taken!')
return cleaned_data
class EmailChangeForm(Form):
email = EmailField(max_length=75, label='New email')
|
Use EmailField for email validation
|
Use EmailField for email validation
|
Python
|
apache-2.0
|
stianjensen/wikipendium.no,stianjensen/wikipendium.no,stianjensen/wikipendium.no
|
945baec1540ff72b85b3d0563511d93cb33d660e
|
nbgrader/tests/formgrader/fakeuser.py
|
nbgrader/tests/formgrader/fakeuser.py
|
import os
from jupyterhub.auth import LocalAuthenticator
from jupyterhub.spawner import LocalProcessSpawner
from tornado import gen
class FakeUserAuth(LocalAuthenticator):
"""Authenticate fake users"""
@gen.coroutine
def authenticate(self, handler, data):
"""If the user is on the whitelist, authenticate regardless of password.
If not, then don't authenticate.
"""
username = data['username']
if not self.check_whitelist(username):
return
return username
@staticmethod
def system_user_exists(user):
return True
class FakeUserSpawner(LocalProcessSpawner):
def user_env(self, env):
env['USER'] = self.user.name
env['HOME'] = os.getcwd()
env['SHELL'] = '/bin/bash'
return env
def make_preexec_fn(self, name):
home = os.getcwd()
def preexec():
# don't forward signals
os.setpgrp()
# start in the cwd
os.chdir(home)
return preexec
|
import os
from jupyterhub.auth import LocalAuthenticator
from jupyterhub.spawner import LocalProcessSpawner
from tornado import gen
class FakeUserAuth(LocalAuthenticator):
"""Authenticate fake users"""
@gen.coroutine
def authenticate(self, handler, data):
"""If the user is on the whitelist, authenticate regardless of password.
If not, then don't authenticate.
"""
username = data['username']
if not self.check_whitelist(username):
return
return username
@staticmethod
def system_user_exists(user):
return True
class FakeUserSpawner(LocalProcessSpawner):
def user_env(self, env):
env['USER'] = self.user.name
env['HOME'] = os.getcwd()
env['SHELL'] = '/bin/bash'
return env
def make_preexec_fn(self, name):
home = os.getcwd()
def preexec():
# start in the cwd
os.chdir(home)
return preexec
|
Remove os.setpgrp() from fake spawner
|
Remove os.setpgrp() from fake spawner
|
Python
|
bsd-3-clause
|
jhamrick/nbgrader,jupyter/nbgrader,EdwardJKim/nbgrader,jupyter/nbgrader,jhamrick/nbgrader,jupyter/nbgrader,ellisonbg/nbgrader,ellisonbg/nbgrader,EdwardJKim/nbgrader,ellisonbg/nbgrader,EdwardJKim/nbgrader,ellisonbg/nbgrader,jhamrick/nbgrader,jupyter/nbgrader,EdwardJKim/nbgrader,jhamrick/nbgrader,jupyter/nbgrader
|
1bbf986cbde2d0ec8add3ac845cb10fcd061e46d
|
nodeconductor/server/test_settings.py
|
nodeconductor/server/test_settings.py
|
# Django test settings for nodeconductor project.
from nodeconductor.server.doc_settings import *
INSTALLED_APPS += (
'nodeconductor.quotas.tests',
'nodeconductor.structure.tests',
)
ROOT_URLCONF = 'nodeconductor.structure.tests.urls'
|
# Django test settings for nodeconductor project.
from nodeconductor.server.doc_settings import *
INSTALLED_APPS += (
'nodeconductor.quotas.tests',
'nodeconductor.structure.tests',
)
ROOT_URLCONF = 'nodeconductor.structure.tests.urls'
# XXX: This option should be removed after itacloud assembly creation.
NODECONDUCTOR['IS_ITACLOUD'] = True
|
Add "IS_ITACLOUD" flag to settings
|
Add "IS_ITACLOUD" flag to settings
- itacloud-7125
|
Python
|
mit
|
opennode/nodeconductor,opennode/nodeconductor,opennode/nodeconductor
|
cdfd622f4e7017ab1860e1f7420d6f26424a69f1
|
dashboard_app/extension.py
|
dashboard_app/extension.py
|
from lava_server.extension import LavaServerExtension
class DashboardExtension(LavaServerExtension):
@property
def app_name(self):
return "dashboard_app"
@property
def name(self):
return "Dashboard"
@property
def main_view_name(self):
return "dashboard_app.views.bundle_stream_list"
@property
def description(self):
return "Validation Dashboard"
@property
def version(self):
import versiontools
import dashboard_app
return versiontools.format_version(dashboard_app.__version__)
def contribute_to_settings(self, settings):
super(DashboardExtension, self).contribute_to_settings(settings)
settings['INSTALLED_APPS'].extend([
"linaro_django_pagination",
"south",
])
settings['MIDDLEWARE_CLASSES'].append(
'linaro_django_pagination.middleware.PaginationMiddleware')
settings['RESTRUCTUREDTEXT_FILTER_SETTINGS'] = {
"initial_header_level": 4}
def contribute_to_settings_ex(self, settings_module, settings_object):
settings_module['DATAVIEW_DIRS'] = settings_object._settings.get(
"DATAVIEW_DIRS", [])
settings_module['DATAREPORT_DIRS'] = settings_object._settings.get(
"DATAREPORT_DIRS", [])
|
from lava_server.extension import LavaServerExtension
class DashboardExtension(LavaServerExtension):
@property
def app_name(self):
return "dashboard_app"
@property
def name(self):
return "Dashboard"
@property
def main_view_name(self):
return "dashboard_app.views.bundle_stream_list"
@property
def description(self):
return "Validation Dashboard"
@property
def version(self):
import versiontools
import dashboard_app
return versiontools.format_version(dashboard_app.__version__)
def contribute_to_settings(self, settings_module):
super(DashboardExtension, self).contribute_to_settings(settings_module)
settings_module['INSTALLED_APPS'].extend([
"linaro_django_pagination",
"south",
])
settings_module['MIDDLEWARE_CLASSES'].append(
'linaro_django_pagination.middleware.PaginationMiddleware')
def contribute_to_settings_ex(self, settings_module, settings_object):
settings_module['DATAVIEW_DIRS'] = settings_object._settings.get(
"DATAVIEW_DIRS", [])
settings_module['DATAREPORT_DIRS'] = settings_object._settings.get(
"DATAREPORT_DIRS", [])
# Enable constrained dataview database if requested
if settings_object._settings.get("use_dataview_database"):
# Copy everything from the default database and append _dataview to user
# name. The rest is out of scope (making sure it's actually setup
# properly, having permissions to login, permissions to view proper data)
settings_module['DATABASES']['dataview'] = dict(settings_module['DATABASES']['default'])
settings_module['DATABASES']['dataview']['USER'] += "_dataview"
|
Move support for dataview-specific database from lava-server
|
Move support for dataview-specific database from lava-server
|
Python
|
agpl-3.0
|
Linaro/lava-server,Linaro/lava-server,Linaro/lava-server,OSSystems/lava-server,OSSystems/lava-server,OSSystems/lava-server,Linaro/lava-server
|
419d2ca4d53e33c58d556b45bcc6910bd28ef91a
|
djangae/apps.py
|
djangae/apps.py
|
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class DjangaeConfig(AppConfig):
name = 'djangae'
verbose_name = _("Djangae")
def ready(self):
from .patches.contenttypes import patch
patch()
from djangae.db.backends.appengine.caching import reset_context
from django.core.signals import request_finished, request_started
request_finished.connect(reset_context)
request_started.connect(reset_context)
|
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class DjangaeConfig(AppConfig):
name = 'djangae'
verbose_name = _("Djangae")
def ready(self):
from .patches.contenttypes import patch
patch()
from djangae.db.backends.appengine.caching import reset_context
from django.core.signals import request_finished, request_started
request_finished.connect(reset_context, dispatch_uid="request_finished_context_reset")
request_started.connect(reset_context, dispatch_uid="request_started_context_reset")
|
Make sure we only connect to the signals onces
|
Make sure we only connect to the signals onces
|
Python
|
bsd-3-clause
|
kirberich/djangae,asendecka/djangae,asendecka/djangae,SiPiggles/djangae,wangjun/djangae,potatolondon/djangae,kirberich/djangae,SiPiggles/djangae,SiPiggles/djangae,leekchan/djangae,armirusco/djangae,chargrizzle/djangae,trik/djangae,grzes/djangae,armirusco/djangae,jscissr/djangae,trik/djangae,jscissr/djangae,wangjun/djangae,asendecka/djangae,leekchan/djangae,chargrizzle/djangae,wangjun/djangae,grzes/djangae,trik/djangae,potatolondon/djangae,grzes/djangae,jscissr/djangae,chargrizzle/djangae,kirberich/djangae,armirusco/djangae,leekchan/djangae
|
d81a68a46fbdc98f803c94a2123b48cca6f5da31
|
tests/aqdb/test_rebuild.py
|
tests/aqdb/test_rebuild.py
|
#!/ms/dist/python/PROJ/core/2.5.4-0/bin/python
"""Test module for rebuilding the database."""
import os
import __init__
import aquilon.aqdb.depends
import nose
import unittest
from subprocess import Popen, PIPE
class TestRebuild(unittest.TestCase):
def testrebuild(self):
env = {}
for (key, value) in os.environ.items():
env[key] = value
cmd = ['./build_db.py', '--delete', '--populate']
_DIR = os.path.dirname(os.path.realpath(__file__))
p = Popen(cmd, stdout=1, stderr=2, env=env, cwd=_DIR)
(out, err) = p.communicate()
self.assertEqual(p.returncode, 0, "Database rebuild failed:\n%s" % err)
if __name__=='__main__':
nose.runmodule()
# Copyright (C) 2008 Morgan Stanley
# This module is part of Aquilon
# ex: set expandtab softtabstop=4 shiftwidth=4: -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
|
#!/ms/dist/python/PROJ/core/2.5.4-0/bin/python
"""Test module for rebuilding the database."""
import os
import __init__
import aquilon.aqdb.depends
import nose
import unittest
from subprocess import Popen, PIPE
from aquilon.config import Config
class TestRebuild(unittest.TestCase):
def testrebuild(self):
env = {}
for (key, value) in os.environ.items():
env[key] = value
env["AQDCONF"] = Config().baseconfig
cmd = ['./build_db.py', '--delete', '--populate']
_DIR = os.path.dirname(os.path.realpath(__file__))
p = Popen(cmd, stdout=1, stderr=2, env=env, cwd=_DIR)
(out, err) = p.communicate()
self.assertEqual(p.returncode, 0, "Database rebuild failed:\n%s" % err)
if __name__=='__main__':
nose.runmodule()
# Copyright (C) 2008 Morgan Stanley
# This module is part of Aquilon
# ex: set expandtab softtabstop=4 shiftwidth=4: -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
|
Fix aqdb rebuild to work when not using AQDCONF env variable.
|
Fix aqdb rebuild to work when not using AQDCONF env variable.
|
Python
|
apache-2.0
|
guillaume-philippon/aquilon,quattor/aquilon,quattor/aquilon,stdweird/aquilon,stdweird/aquilon,guillaume-philippon/aquilon,quattor/aquilon,stdweird/aquilon,guillaume-philippon/aquilon
|
2db81321de1c506d6b61d8851de9ad4794deba3e
|
lmj/sim/base.py
|
lmj/sim/base.py
|
# Copyright (c) 2013 Leif Johnson <leif@leifjohnson.net>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
'''Base classes for simulations.'''
class World(object):
'''World is a small base class for simulation worlds.'''
def needs_reset(self):
'''Return True iff the world needs to be reset.'''
return False
def reset(self):
'''Reset the world state.'''
pass
def trace(self):
'''Return a string containing world state for later analysis.'''
return None
def step(self):
'''Advance the world simulation by one time step.'''
raise NotImplementedError
|
# Copyright (c) 2013 Leif Johnson <leif@leifjohnson.net>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
'''Base classes for simulations.'''
class World(object):
'''World is a small base class for simulation worlds.'''
def needs_reset(self):
'''Return True iff the world needs to be reset.'''
return False
def reset(self):
'''Reset the world state.'''
pass
def trace(self):
'''Return a string containing world state for later analysis.'''
return None
def step(self):
'''Advance the world simulation by one time step.'''
raise NotImplementedError
def on_key_press(self, key, keys):
'''Handle an otherwise-unhandled keypress event.'''
self.reset()
|
Allow for World to handle key presses.
|
Allow for World to handle key presses.
|
Python
|
mit
|
EmbodiedCognition/pagoda,EmbodiedCognition/pagoda
|
df58b36b6f62c39030d6ff28c6fb67c11f112df0
|
pyxrf/gui_module/main_window.py
|
pyxrf/gui_module/main_window.py
|
from PyQt5.QtWidgets import QMainWindow
_main_window_geometry = {
"initial_height": 800,
"initial_width": 1000,
"min_height": 400,
"min_width": 500,
}
class MainWindow(QMainWindow):
def __init__(self):
super().__init__()
self.initialize()
def initialize(self):
self.resize(_main_window_geometry["initial_width"],
_main_window_geometry["initial_height"])
self.setMinimumWidth(_main_window_geometry["min_width"])
self.setMinimumHeight(_main_window_geometry["min_height"])
|
from PyQt5.QtWidgets import QMainWindow
_main_window_geometry = {
"initial_height": 800,
"initial_width": 1000,
"min_height": 400,
"min_width": 500,
}
class MainWindow(QMainWindow):
def __init__(self):
super().__init__()
self.initialize()
def initialize(self):
self.resize(_main_window_geometry["initial_width"],
_main_window_geometry["initial_height"])
self.setMinimumWidth(_main_window_geometry["min_width"])
self.setMinimumHeight(_main_window_geometry["min_height"])
self.setWindowTitle("PyXRF window title")
|
Test window title on Mac
|
Test window title on Mac
|
Python
|
bsd-3-clause
|
NSLS-II-HXN/PyXRF,NSLS-II/PyXRF,NSLS-II-HXN/PyXRF
|
3005b947312c0219c6754e662496c876e46aafc4
|
model/openacademy_session.py
|
model/openacademy_session.py
|
# -*- coding: utf-8 -*-
from openerp import fields, models
class Session(models.Model):
_name = 'openacademy.session'
name = fields.Char(required=True)
start_date = fields.Date()
duration = fields.Float(digits=(6, 2), help="Duration in days")
seats = fields.Integer(string="Number of seats")
instructor_id = fields.Many2one('res.partner', string="instructor")
course_id = fields.Many2one('openacademy.course',
ondelete='cascade', string="Course", required=True)
attendee_ids = fields.Many2many('res.partner', string="Attendees")
|
# -*- coding: utf-8 -*-
from openerp import fields, models
class Session(models.Model):
_name = 'openacademy.session'
name = fields.Char(required=True)
start_date = fields.Date()
duration = fields.Float(digits=(6, 2), help="Duration in days")
seats = fields.Integer(string="Number of seats")
instructor_id = fields.Many2one('res.partner', string="instructor",
domain=['|', ('instructor', '=', True),
('category_id.name', 'ilike', "Teacher")])
course_id = fields.Many2one('openacademy.course',
ondelete='cascade', string="Course", required=True)
attendee_ids = fields.Many2many('res.partner', string="Attendees")
|
Add domain or and ilike
|
[REF] openacademy: Add domain or and ilike
|
Python
|
apache-2.0
|
luisll-vauxoo/openacademy
|
7371244c00c94fdc552c5d146ab1a245b643427e
|
reeprotocol/ip.py
|
reeprotocol/ip.py
|
"""IP Physical Layer
"""
from __future__ import absolute_import
import socket
from .protocol import PhysicalLayer
class Ip(PhysicalLayer):
"""IP Physical Layer"""
def __init__(self, addr):
"""Create an IP Physical Layer.
:addr tuple: Address tuple (host, port)
"""
self.addr = addr
self.connection = None
def connect(self):
"""Connect to `self.addr`
"""
self.connection = socket.create_connection(self.addr)
def disconnect(self):
"""Disconnects
"""
if self.connection:
self.connection.close()
def send_byte(self, byte):
"""Send a byte"""
assert isinstance(self.connection, socket.socket)
self.connection.send(byte)
def get_byte(self, timeout):
"""Read a byte"""
assert isinstance(self.connection, socket.socket)
self.connection.recv(1)
|
"""IP Physical Layer
"""
from __future__ import absolute_import
import socket
import queue
import threading
from .protocol import PhysicalLayer
class Ip(PhysicalLayer):
"""IP Physical Layer"""
def __init__(self, addr):
"""Create an IP Physical Layer.
:addr tuple: Address tuple (host, port)
"""
self.addr = addr
self.connection = None
self.connected = False
self.queue = queue.Queue()
self.thread = threading.Thread(target=self.read_port)
def connect(self):
"""Connect to `self.addr`
"""
self.connection = socket.create_connection(self.addr)
self.connected = True
self.thread.start()
def disconnect(self):
"""Disconnects
"""
if self.connection:
self.connection.close()
self.connected = False
def read_port(self):
"""Read bytes from socket
"""
while self.connected:
response = self.connection.recv(16)
if not response:
continue
for byte_resp in response:
self.queue.put(byte_resp)
def send_byte(self, byte):
"""Send a byte"""
assert isinstance(self.connection, socket.socket)
self.connection.send(byte)
def get_byte(self, timeout=60):
"""Read a byte"""
return self.queue.get(True, timeout=timeout)
|
Implement reading socket with threading
|
Implement reading socket with threading
|
Python
|
agpl-3.0
|
javierdelapuente/reeprotocol
|
e2b691810f9d9a33f054bf245f1429d6999338a6
|
dataproperty/_interface.py
|
dataproperty/_interface.py
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <gogogo.vm@gmail.com>
"""
from __future__ import absolute_import
import abc
import six
from ._function import is_nan
from ._typecode import Typecode
@six.add_metaclass(abc.ABCMeta)
class DataPeropertyInterface(object):
__slots__ = ()
@abc.abstractproperty
def align(self): # pragma: no cover
pass
@abc.abstractproperty
def decimal_places(self): # pragma: no cover
pass
@abc.abstractproperty
def typecode(self): # pragma: no cover
pass
@property
def format_str(self):
if self.typecode == Typecode.INT:
return "d"
if self.typecode == Typecode.FLOAT:
if is_nan(self.decimal_places):
return "f"
return ".%df" % (self.decimal_places)
return "s"
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <gogogo.vm@gmail.com>
"""
from __future__ import absolute_import
import abc
import six
from ._function import is_nan
from ._typecode import Typecode
@six.add_metaclass(abc.ABCMeta)
class DataPeropertyInterface(object):
__slots__ = ()
@abc.abstractproperty
def align(self): # pragma: no cover
pass
@abc.abstractproperty
def decimal_places(self): # pragma: no cover
pass
@abc.abstractproperty
def typecode(self): # pragma: no cover
pass
|
Delete property from the interface class
|
Delete property from the interface class
|
Python
|
mit
|
thombashi/DataProperty
|
367d8415773a44356ce604ecfc839117798f7d3a
|
tests/test_pytestplugin.py
|
tests/test_pytestplugin.py
|
from io import FileIO
from six import next
from pkg_resources import resource_filename, working_set
from wex.readable import EXT_WEXIN
from wex.output import EXT_WEXOUT
from wex import pytestplugin
def pytest_funcarg__parent(request):
return request.session
response = b"""HTTP/1.1 200 OK\r
Content-type: application/json\r
\r
{"args":{"this":"that"}}"""
def setup_module():
entry = resource_filename(__name__, 'fixtures/TestMe.egg')
working_set.add_entry(entry)
def test_pytest_collect_file(tmpdir, parent):
# FTM just to see how to coverage test the plugin
r0_wexin = tmpdir.join('0' + EXT_WEXIN)
r0_wexout = tmpdir.join('0' + EXT_WEXOUT)
with FileIO(r0_wexin.strpath, 'w') as fp:
fp.write(response)
with FileIO(r0_wexout.strpath, 'w') as fp:
fp.write(b'this\t"that"\n')
fileobj = pytestplugin.pytest_collect_file(parent, r0_wexin)
item = next(fileobj.collect())
item.runtest()
|
from io import FileIO
from six import next
import pytest
from pkg_resources import resource_filename, working_set
from wex.readable import EXT_WEXIN
from wex.output import EXT_WEXOUT
from wex import pytestplugin
@pytest.fixture
def parent(request):
return request.session
response = b"""HTTP/1.1 200 OK\r
Content-type: application/json\r
\r
{"args":{"this":"that"}}"""
def setup_module():
entry = resource_filename(__name__, 'fixtures/TestMe.egg')
working_set.add_entry(entry)
def test_pytest_collect_file(tmpdir, parent):
# FTM just to see how to coverage test the plugin
r0_wexin = tmpdir.join('0' + EXT_WEXIN)
r0_wexout = tmpdir.join('0' + EXT_WEXOUT)
with FileIO(r0_wexin.strpath, 'w') as fp:
fp.write(response)
with FileIO(r0_wexout.strpath, 'w') as fp:
fp.write(b'this\t"that"\n')
fileobj = pytestplugin.pytest_collect_file(parent, r0_wexin)
item = next(fileobj.collect())
item.runtest()
|
Replace funcarg with fixture for pytestplugin
|
Replace funcarg with fixture for pytestplugin
|
Python
|
bsd-3-clause
|
gilessbrown/wextracto,eBay/wextracto,eBay/wextracto,gilessbrown/wextracto
|
26eaaffb872d7046be9417ae53302e59dbc7b808
|
TrainingDataGenerator/Scripts/generateNumberImage.py
|
TrainingDataGenerator/Scripts/generateNumberImage.py
|
# -*- coding: utf-8 -*-
import threading
import os
import shutil
from PIL import Image, ImageDraw2, ImageDraw, ImageFont
import random
count = range(0, 200)
path = './generatedNumberImages'
text = '0123456789X'
def start():
if os.path.exists(path):
shutil.rmtree(path)
os.mkdir(path)
for idx in count:
t = threading.Thread(target=create_image, args=([idx]))
t.start()
def create_image(idx):
o_image = Image.open('background.png')
drawBrush = ImageDraw.Draw(o_image)
drawBrush.text((100 + random.randint(-30, 30), 20 + random.randint(-5, 5)), text, fill='black', font=ImageFont.truetype('./OCR-B 10 BT.ttf', 20 + random.randint(-5, 5)))
o_image.rotate(random.randint(-2, 2)).save(path + '/%d.png' % idx)
if __name__ == '__main__':
start()
|
# -*- coding: utf-8 -*-
import threading
import os
import shutil
from PIL import Image, ImageDraw2, ImageDraw, ImageFont, ImageEnhance
import random
count = range(0, 200)
path = './generatedNumberImages'
text = '0123456789X'
def start():
if os.path.exists(path):
shutil.rmtree(path)
os.mkdir(path)
for idx in count:
t = threading.Thread(target=create_image, args=([idx]))
t.start()
def create_image(idx):
o_image = Image.open('background.png')
drawBrush = ImageDraw.Draw(o_image)
drawBrush.text((100 + random.randint(-30, 30), 20 + random.randint(-5, 5)), text, fill='black', font=ImageFont.truetype('./OCR-B 10 BT.ttf', 20 + random.randint(-5, 5)))
o_image = ImageEnhance.Color(o_image).enhance(random.uniform(0, 2)) # 着色
o_image = ImageEnhance.Brightness(o_image).enhance(random.uniform(0.3, 2)) #亮度
o_image = ImageEnhance.Contrast(o_image).enhance(random.uniform(0.2, 2)) # 对比度
o_image = ImageEnhance.Sharpness(o_image).enhance(random.uniform(0.2, 3.0)) #对比度
o_image = o_image.rotate(random.randint(-2, 2))
o_image.save(path + '/%d.png' % idx)
if __name__ == '__main__':
start()
|
Add Image Enhance for generated image.
|
Add Image Enhance for generated image.
|
Python
|
apache-2.0
|
KevinGong2013/ChineseIDCardOCR,KevinGong2013/ChineseIDCardOCR,KevinGong2013/ChineseIDCardOCR
|
4f84482803049b40d7b7da26d9d624a6a63b4820
|
core/utils.py
|
core/utils.py
|
# -*- coding: utf-8 -*-
from django.utils import timezone
def duration_string(duration, precision='s'):
"""Format hours, minutes and seconds as a human-friendly string (e.g. "2
hours, 25 minutes, 31 seconds") with precision to h = hours, m = minutes or
s = seconds.
"""
h, m, s = duration_parts(duration)
duration = ''
if h > 0:
duration = '{} hour{}'.format(h, 's' if h > 1 else '')
if m > 0 and precision != 'h':
duration += '{}{} minute{}'.format(
'' if duration == '' else ', ', m, 's' if m > 1 else '')
if s > 0 and precision != 'h' and precision != 'm':
duration += '{}{} second{}'.format(
'' if duration == '' else ', ', s, 's' if s > 1 else '')
return duration
def duration_parts(duration):
"""Get hours, minutes and seconds from a timedelta.
"""
if not isinstance(duration, timezone.timedelta):
raise TypeError('Duration provided must be a timedetla')
h, remainder = divmod(duration.seconds, 3600)
h += duration.days * 24
m, s = divmod(remainder, 60)
return h, m, s
|
# -*- coding: utf-8 -*-
from django.utils import timezone
from django.utils.translation import ngettext
def duration_string(duration, precision='s'):
"""Format hours, minutes and seconds as a human-friendly string (e.g. "2
hours, 25 minutes, 31 seconds") with precision to h = hours, m = minutes or
s = seconds.
"""
h, m, s = duration_parts(duration)
duration = ''
if h > 0:
duration = ngettext('%(hours)s hour', '%(hours)s hours', h) % {
'hours': h
}
if m > 0 and precision != 'h':
if duration != '':
duration += ', '
duration += ngettext('%(minutes)s minute', '%(minutes)s minutes', m) % {
'minutes': m
}
if s > 0 and precision != 'h' and precision != 'm':
if duration != '':
duration += ', '
duration += ngettext('%(seconds)s second', '%(seconds)s seconds', s) % {
'seconds': s
}
return duration
def duration_parts(duration):
"""Get hours, minutes and seconds from a timedelta.
"""
if not isinstance(duration, timezone.timedelta):
raise TypeError('Duration provided must be a timedetla')
h, remainder = divmod(duration.seconds, 3600)
h += duration.days * 24
m, s = divmod(remainder, 60)
return h, m, s
|
Add translation support to `duration_string` utility
|
Add translation support to `duration_string` utility
|
Python
|
bsd-2-clause
|
cdubz/babybuddy,cdubz/babybuddy,cdubz/babybuddy
|
786ebc992ac09cd4b25e90ee2a243447e39c237f
|
director/accounts/forms.py
|
director/accounts/forms.py
|
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Layout, Submit
from django import forms
from accounts.models import Account, Team
from lib.data_cleaning import clean_slug, SlugType
from lib.forms import ModelFormWithCreate
class CleanNameMixin:
def clean_name(self):
return clean_slug(self.cleaned_data["name"], SlugType.ACCOUNT)
class AccountSettingsForm(CleanNameMixin, forms.ModelForm):
helper = FormHelper()
helper.layout = Layout(
"name", "logo", Submit("submit", "Update", css_class="button is-primary")
)
class Meta:
model = Account
fields = ("name", "logo")
class AccountCreateForm(CleanNameMixin, ModelFormWithCreate):
helper = FormHelper()
helper.layout = Layout("name", "logo",)
class Meta:
model = Account
fields = ("name", "logo")
class TeamForm(forms.ModelForm):
helper = FormHelper()
helper.form_tag = False
class Meta:
model = Team
fields = ("name", "description")
widgets = {"name": forms.TextInput}
|
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Layout, Div, HTML, Submit
from django import forms
from accounts.models import Account, Team
from lib.data_cleaning import clean_slug, SlugType
from lib.forms import ModelFormWithCreate
from assets.thema import themes
class CleanNameMixin:
def clean_name(self):
return clean_slug(self.cleaned_data["name"], SlugType.ACCOUNT)
class AccountCreateForm(CleanNameMixin, ModelFormWithCreate):
helper = FormHelper()
helper.layout = Layout("name", "logo",)
class Meta:
model = Account
fields = ("name", "logo")
class AccountSettingsForm(CleanNameMixin, forms.ModelForm):
helper = FormHelper()
helper.layout = Layout(
Div(
HTML(
'<p class="title is-4">Identity</p>'
'<p class="subtitle is-5">Settings for your account\'s public profile.</p>'
),
"name",
"logo",
css_class="section",
),
Div(
HTML(
'<p class="title is-4">Content</p>'
'<p class="subtitle is-5">Settings affecting how content is served for your projects.</p>'
),
"theme",
"hosts",
css_class="section",
),
Submit("submit", "Update", css_class="button is-primary"),
)
class Meta:
model = Account
fields = ("name", "logo", "theme", "hosts")
widgets = {
"theme": forms.Select(choices=[(theme, theme) for theme in themes]),
"hosts": forms.TextInput(),
}
class TeamForm(forms.ModelForm):
helper = FormHelper()
helper.form_tag = False
class Meta:
model = Team
fields = ("name", "description")
widgets = {"name": forms.TextInput}
|
Add theme and hosts to settings
|
feat(Accounts): Add theme and hosts to settings
|
Python
|
apache-2.0
|
stencila/hub,stencila/hub,stencila/hub,stencila/hub,stencila/hub
|
c3c1b9c6a1d13f38cd50762b451ca19eb0a05ff2
|
run_deploy_job_wr.py
|
run_deploy_job_wr.py
|
#!/usr/bin/env python
import json
import os
import subprocess
import sys
from tempfile import NamedTemporaryFile
def main():
command = [
'$HOME/juju-ci-tools/run-deploy-job-remote.bash',
os.environ['revision_build'],
os.environ['JOB_NAME'],
]
command.extend(sys.argv[2:])
with NamedTemporaryFile() as config_file:
json.dump({'command': command, 'install': {}}, config_file)
config_file.flush()
subprocess.check_call(['workspace-run', config_file.name, sys.argv[1]])
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
import json
import os
import subprocess
import sys
from tempfile import NamedTemporaryFile
def main():
revision_build = os.environ['revision_build']
job_name = os.environ['JOB_NAME']
build_number = os.environ['BUILD_NUMBER']
prefix='juju-ci/products/version-{}/{}/build-{}'.format(
revision_build, job_name, build_number)
s3_config = join(os.environ['HOME'], 'cloud-city/juju-qa.s3cfg')
command = [
'$HOME/juju-ci-tools/run-deploy-job-remote.bash',
revision_build,
job_name,
]
command.extend(sys.argv[2:])
with NamedTemporaryFile() as config_file:
json.dump({
'command': command, 'install': {},
'artifacts': {'artifacts': ['*']},
'bucket': 'juju-qa-data',
}, config_file)
config_file.flush()
subprocess.check_call([
'workspace-run', config_file.name, sys.argv[1], prefix,
'--s3-config', s3_config,
])
if __name__ == '__main__':
main()
|
Update for more artifact support.
|
Update for more artifact support.
|
Python
|
agpl-3.0
|
mjs/juju,mjs/juju,mjs/juju,mjs/juju,mjs/juju,mjs/juju,mjs/juju
|
41236c2be66b6f790308cba321cb482807814323
|
ubersmith/calls/device.py
|
ubersmith/calls/device.py
|
"""Device call classes.
These classes implement any response cleaning and validation needed. If a
call class isn't defined for a given method then one is created using
ubersmith.calls.BaseCall.
"""
from ubersmith.calls import BaseCall, GroupCall
from ubersmith.utils import prepend_base
__all__ = [
'GetCall',
'ListCall',
]
_ = prepend_base(__name__.split('.')[-1])
class GetCall(BaseCall):
method = _('get')
required_fields = ['device_id']
class ListCall(GroupCall):
method = _('list')
rename_fields = {'clientid': 'client_id'}
int_fields = ['client_id']
|
"""Device call classes.
These classes implement any response cleaning and validation needed. If a
call class isn't defined for a given method then one is created using
ubersmith.calls.BaseCall.
"""
from ubersmith.calls import BaseCall, GroupCall
from ubersmith.utils import prepend_base
__all__ = [
'GetCall',
'ListCall',
]
_ = prepend_base(__name__.split('.')[-1])
class GetCall(BaseCall):
method = _('get')
required_fields = ['device_id']
class ListCall(GroupCall):
method = _('list')
rename_fields = {'clientid': 'client_id'}
int_fields = ['client_id']
class ModuleGraphCall(FileCall):
method = _('module_graph')
|
Make module graph call return a file.
|
Make module graph call return a file.
|
Python
|
mit
|
jasonkeene/python-ubersmith,jasonkeene/python-ubersmith,hivelocity/python-ubersmith,hivelocity/python-ubersmith
|
61a6d057302767aa49633d6d010f7da583035533
|
web/templatetags/getattribute.py
|
web/templatetags/getattribute.py
|
import re
from django import template
from django.conf import settings
numeric_test = re.compile("^\d+$")
register = template.Library()
def getattribute(value, arg):
"""Gets an attribute of an object dynamically from a string name"""
if hasattr(value, str(arg)):
return getattr(value, arg)
elif hasattr(value, 'has_key') and value.has_key(arg):
return value[arg]
elif numeric_test.match(str(arg)) and len(value) > int(arg):
return value[int(arg)]
else:
return settings.TEMPLATE_STRING_IF_INVALID
register.filter('getattribute', getattribute)
# Then, in template:
# {% load getattribute %}
# {{ object|getattribute:dynamic_string_var }}
|
import re
from django import template
from django.conf import settings
numeric_test = re.compile("^\d+$")
register = template.Library()
def getattribute(value, arg):
"""Gets an attribute of an object dynamically from a string name"""
if hasattr(value, str(arg)):
if callable(getattr(value, arg)):
return getattr(value, arg)()
return getattr(value, arg)
elif hasattr(value, 'has_key') and value.has_key(arg):
return value[arg]
elif numeric_test.match(str(arg)) and len(value) > int(arg):
return value[int(arg)]
else:
return settings.TEMPLATE_STRING_IF_INVALID
register.filter('getattribute', getattribute)
# Then, in template:
# {% load getattribute %}
# {{ object|getattribute:dynamic_string_var }}
|
Call objects methods directly from the templates yay
|
web: Call objects methods directly from the templates yay
|
Python
|
apache-2.0
|
SchoolIdolTomodachi/SchoolIdolAPI,laurenor/SchoolIdolAPI,dburr/SchoolIdolAPI,laurenor/SchoolIdolAPI,rdsathene/SchoolIdolAPI,SchoolIdolTomodachi/SchoolIdolAPI,SchoolIdolTomodachi/SchoolIdolAPI,rdsathene/SchoolIdolAPI,rdsathene/SchoolIdolAPI,dburr/SchoolIdolAPI,dburr/SchoolIdolAPI,laurenor/SchoolIdolAPI
|
f4c989567fa77002541c5e5199f2fc3f8e53d6da
|
test_htmlgen/image.py
|
test_htmlgen/image.py
|
from unittest import TestCase
from asserts import assert_equal
from htmlgen import Image
from test_htmlgen.util import parse_short_tag
class ImageTest(TestCase):
def test_attributes(self):
image = Image("my-image.png", "Alternate text")
assert_equal("my-image.png", image.url)
assert_equal("Alternate text", image.alternate_text)
def test_attributes_default_alt(self):
image = Image("my-image.png")
assert_equal("", image.alternate_text)
def test_with_alt(self):
image = Image("my-image.png", "Alternate text")
tag = parse_short_tag(str(image))
assert_equal("img", tag.name)
assert_equal("my-image.png", image._attributes["src"])
assert_equal("Alternate text", image._attributes["alt"])
def test_without_alt(self):
image = Image("my-image.png")
tag = parse_short_tag(str(image))
assert_equal("img", tag.name)
assert_equal("my-image.png", image._attributes["src"])
assert_equal("", image._attributes["alt"])
|
from unittest import TestCase
from asserts import assert_equal
from htmlgen import Image
from test_htmlgen.util import parse_short_tag
class ImageTest(TestCase):
def test_attributes(self):
image = Image("my-image.png", "Alternate text")
assert_equal("my-image.png", image.url)
assert_equal("Alternate text", image.alternate_text)
def test_attributes_default_alt(self):
image = Image("my-image.png")
assert_equal("", image.alternate_text)
def test_with_alt(self):
image = Image("my-image.png", "Alternate text")
tag = parse_short_tag(str(image))
assert_equal("img", tag.name)
assert_equal("my-image.png", image.get_attribute("src"))
assert_equal("Alternate text", image.get_attribute("alt"))
def test_without_alt(self):
image = Image("my-image.png")
tag = parse_short_tag(str(image))
assert_equal("img", tag.name)
assert_equal("my-image.png", image.get_attribute("src"))
assert_equal("", image.get_attribute("alt"))
|
Use public API in tests
|
[tests] Use public API in tests
|
Python
|
mit
|
srittau/python-htmlgen
|
8e2b9e1c80e3a91df7e2cce775c19208aa9d4839
|
exam/asserts.py
|
exam/asserts.py
|
IRRELEVANT = object()
class ChangeWatcher(object):
def __init__(self, thing, *args, **kwargs):
self.thing = thing
self.args = args
self.kwargs = kwargs
self.expected_before = kwargs.pop('before', IRRELEVANT)
self.expected_after = kwargs.pop('after', IRRELEVANT)
def __enter__(self):
self.before = self.__apply()
if not self.expected_before is IRRELEVANT:
check = self.before == self.expected_before
assert check, self.__precondition_failure_msg_for('before')
def __exit__(self, type, value, traceback):
self.after = self.__apply()
if not self.expected_after is IRRELEVANT:
check = self.after == self.expected_after
assert check, self.__precondition_failure_msg_for('after')
assert self.before != self.after, self.__equality_failure_message
def __apply(self):
return self.thing(*self.args, **self.kwargs)
@property
def __equality_failure_message(self):
return 'Expected before %r != %r after' % (self.before, self.after)
def __precondition_failure_msg_for(self, condition):
return '%s value did not change (%s)' % (
condition,
getattr(self, condition)
)
class AssertsMixin(object):
assertChanges = ChangeWatcher
|
IRRELEVANT = object()
class ChangeWatcher(object):
def __init__(self, thing, *args, **kwargs):
self.thing = thing
self.args = args
self.kwargs = kwargs
self.expected_before = kwargs.pop('before', IRRELEVANT)
self.expected_after = kwargs.pop('after', IRRELEVANT)
def __enter__(self):
self.before = self.__apply()
if not self.expected_before is IRRELEVANT:
check = self.before == self.expected_before
assert check, self.__precondition_failure_msg_for('before')
def __exit__(self, exec_type, exac_value, traceback):
self.after = self.__apply()
if not self.expected_after is IRRELEVANT:
check = self.after == self.expected_after
assert check, self.__precondition_failure_msg_for('after')
assert self.before != self.after, self.__equality_failure_message
def __apply(self):
return self.thing(*self.args, **self.kwargs)
@property
def __equality_failure_message(self):
return 'Expected before %r != %r after' % (self.before, self.after)
def __precondition_failure_msg_for(self, condition):
return '%s value did not change (%s)' % (
condition,
getattr(self, condition)
)
class AssertsMixin(object):
assertChanges = ChangeWatcher
|
Change __exit__arg names to not be built ins
|
Change __exit__arg names to not be built ins
|
Python
|
mit
|
gterzian/exam,Fluxx/exam,Fluxx/exam,gterzian/exam
|
24d67552f1ae16179fb1aa21a06c191c6d596fb1
|
akhet/urlgenerator.py
|
akhet/urlgenerator.py
|
"""
Contributed by Michael Mericikel.
"""
from pyramid.decorator import reify
import pyramid.url as url
class URLGenerator(object):
def __init__(self, context, request):
self.context = context
self.request = request
@reify
def context(self):
return url.resource_url(self.context, self.request)
@reify
def app(self):
return self.request.application_url
def route(self, route_name, *elements, **kw):
return url.route_url(route_name, self.request, *elements, **kw)
# sugar for calling url('home')
__call__ = route
def current(self, *elements, **kw):
return url.current_route_url(self.request, *elements, **kw)
@reify
def static(self):
return url.static_url('baseline:static/', self.request)
@reify
def deform(self):
return url.static_url('deform:static/', self.request)
|
"""
Contributed by Michael Mericikel.
"""
from pyramid.decorator import reify
import pyramid.url as url
class URLGenerator(object):
def __init__(self, context, request):
self.context = context
self.request = request
@reify
def context(self):
return url.resource_url(self.context, self.request)
@reify
def app(self):
return self.request.application_url
def route(self, route_name, *elements, **kw):
return url.route_url(route_name, self.request, *elements, **kw)
# sugar for calling url('home')
__call__ = route
def current(self, *elements, **kw):
return url.current_route_url(self.request, *elements, **kw)
## Commented because I'm unsure of the long-term API.
## If you want to use this, or a more particular one for your
## static package(s), define it in a subclass.
##
# A future version might make 'path' optional, defaulting to
# a value passed to the constructor ("myapp:static/").
#
#def static(self, path, **kw):
# return url.static_url(path, self.request, **kw)
## If you're using the Deform package you may find this useful.
#
#@reify
#def deform(self):
# return url.static_url("deform:static/", self.request)
|
Comment out 'static' and 'deform' methods; disagreements on long-term API.
|
Comment out 'static' and 'deform' methods; disagreements on long-term API.
|
Python
|
mit
|
hlwsmith/akhet,hlwsmith/akhet,Pylons/akhet,hlwsmith/akhet,Pylons/akhet
|
d7bec88009b73a57124dbfacc91446927328abf9
|
src/command_modules/azure-cli-network/azure/cli/command_modules/network/custom.py
|
src/command_modules/azure-cli-network/azure/cli/command_modules/network/custom.py
|
# pylint: disable=no-self-use,too-many-arguments
from azure.mgmt.network.models import Subnet, SecurityRule
from ._factory import _network_client_factory
def create_update_subnet(resource_group_name, subnet_name, virtual_network_name, address_prefix):
'''Create or update a virtual network (VNet) subnet'''
subnet_settings = Subnet(name=subnet_name, address_prefix=address_prefix)
ncf = _network_client_factory()
return ncf.subnets.create_or_update(
resource_group_name, virtual_network_name, subnet_name, subnet_settings)
def create_update_nsg_rule(resource_group_name, network_security_group_name, security_rule_name,
protocol, source_address_prefix, destination_address_prefix,
access, direction, source_port_range, destination_port_range,
description=None, priority=None, name=None):
settings = SecurityRule(protocol=protocol, source_address_prefix=source_address_prefix,
destination_address_prefix=destination_address_prefix, access=access,
direction=direction,
description=description, source_port_range=source_port_range,
destination_port_range=destination_port_range, priority=priority,
name=name)
ncf = _network_client_factory()
return ncf.security_rules.create_or_update(
resource_group_name, network_security_group_name, security_rule_name, settings)
create_update_nsg_rule.__doc__ = SecurityRule.__doc__
|
# pylint: disable=no-self-use,too-many-arguments
from azure.mgmt.network.models import Subnet, SecurityRule
from ._factory import _network_client_factory
def create_update_subnet(resource_group_name, subnet_name, virtual_network_name, address_prefix):
'''Create or update a virtual network (VNet) subnet'''
subnet_settings = Subnet(name=subnet_name, address_prefix=address_prefix)
ncf = _network_client_factory()
return ncf.subnets.create_or_update(
resource_group_name, virtual_network_name, subnet_name, subnet_settings)
def create_update_nsg_rule(resource_group_name, network_security_group_name, security_rule_name,
protocol, source_address_prefix, destination_address_prefix,
access, direction, source_port_range, destination_port_range,
description=None, priority=None):
settings = SecurityRule(protocol=protocol, source_address_prefix=source_address_prefix,
destination_address_prefix=destination_address_prefix, access=access,
direction=direction,
description=description, source_port_range=source_port_range,
destination_port_range=destination_port_range, priority=priority,
name=security_rule_name)
ncf = _network_client_factory()
return ncf.security_rules.create_or_update(
resource_group_name, network_security_group_name, security_rule_name, settings)
create_update_nsg_rule.__doc__ = SecurityRule.__doc__
|
Fix broken NSG create command (duplicate --name parameter)
|
Fix broken NSG create command (duplicate --name parameter)
|
Python
|
mit
|
QingChenmsft/azure-cli,samedder/azure-cli,BurtBiel/azure-cli,QingChenmsft/azure-cli,yugangw-msft/azure-cli,yugangw-msft/azure-cli,yugangw-msft/azure-cli,samedder/azure-cli,samedder/azure-cli,QingChenmsft/azure-cli,QingChenmsft/azure-cli,yugangw-msft/azure-cli,yugangw-msft/azure-cli,samedder/azure-cli,yugangw-msft/azure-cli,BurtBiel/azure-cli
|
cde815fd3c87cbe000620060f41bf2b29976555d
|
kindergarten-garden/kindergarten_garden.py
|
kindergarten-garden/kindergarten_garden.py
|
CHILDREN = ["Alice", "Bob", "Charlie", "David", "Eve", "Fred",
"Ginny", "Harriet", "Ileana", "Joseph", "Kincaid", "Larry"]
PLANTS = {"C": "Clover", "G": "Grass", "R": "Radishes", "V": "Violets"}
class Garden(object):
def __init__(self, garden, students=CHILDREN):
self.students = sorted(students)
rows = garden.split()
patches = [rows[0][i:i+2] + rows[1][i:i+2]
for i in range(0,2*len(self.students),2)]
self._garden = {s: [PLANTS[ch] for ch in p]
for s, p in zip(self.students, patches)}
def plants(self, student):
return self._garden[student]
|
CHILDREN = ["Alice", "Bob", "Charlie", "David", "Eve", "Fred",
"Ginny", "Harriet", "Ileana", "Joseph", "Kincaid", "Larry"]
PLANTS = {"C": "Clover", "G": "Grass", "R": "Radishes", "V": "Violets"}
class Garden(object):
def __init__(self, garden, students=CHILDREN):
self.students = sorted(students)
row1, row2 = garden.split()
patches = [row1[i:i+2] + row2[i:i+2]
for i in range(0,2*len(self.students),2)]
self._garden = {s: [PLANTS[ch] for ch in p]
for s, p in zip(self.students, patches)}
def plants(self, student):
return self._garden[student]
|
Use unpacking for simpler code
|
Use unpacking for simpler code
|
Python
|
agpl-3.0
|
CubicComet/exercism-python-solutions
|
31f6cc777054f4b48a37bb93453bcf405a9101a3
|
examples/example_import.py
|
examples/example_import.py
|
import xkcdpass.xkcd_password as xp
import random
def random_capitalisation(s, chance):
new_str = []
for i, c in enumerate(s):
new_str.append(c.upper() if random.random() < chance else c)
return "".join(new_str)
words = xp.locate_wordfile()
mywords = xp.generate_wordlist(wordfile=words, min_length=5, max_length=8)
raw_password = xp.generate_xkcdpassword(mywords)
for i in range(5):
print(random_capitalisation(raw_password, i/10.0))
|
import xkcdpass.xkcd_password as xp
import random
def random_capitalisation(s, chance):
new_str = []
for i, c in enumerate(s):
new_str.append(c.upper() if random.random() < chance else c)
return "".join(new_str)
def capitalize_first_letter(s):
new_str = []
s = s.split(" ")
for i, c in enumerate(s):
new_str.append(c.capitalize())
return "".join(new_str)
words = xp.locate_wordfile()
mywords = xp.generate_wordlist(wordfile=words, min_length=5, max_length=8)
raw_password = xp.generate_xkcdpassword(mywords)
for i in range(5):
print(random_capitalisation(raw_password, i/10.0))
print(capitalize_first_letter(raw_password))
|
Add "capitalize first letter" function to examples
|
Add "capitalize first letter" function to examples
|
Python
|
bsd-3-clause
|
amiryal/XKCD-password-generator,amiryal/XKCD-password-generator
|
520487df7b9612e18dc06764ba8632b0ef28aad2
|
solvent/bring.py
|
solvent/bring.py
|
from solvent import config
from solvent import run
from solvent import requirementlabel
import logging
import os
class Bring:
def __init__(self, repositoryBasename, product, hash, destination):
self._repositoryBasename = repositoryBasename
self._product = product
self._hash = hash
self._destination = destination
def go(self):
requirementLabel = requirementlabel.RequirementLabel(
basename=self._repositoryBasename, product=self._product, hash=self._hash)
label = requirementLabel.matching()
self.label(label=label, destination=self._destination)
@classmethod
def label(cls, label, destination):
logging.info("Checking out '%(label)s'", dict(label=label))
if not os.path.isdir(destination):
os.makedirs(destination)
run.run([
"osmosis", "checkout", destination, label,
"--MD5", "--removeUnknownFiles", "--putIfMissing",
"--objectStores=" + config.objectStoresOsmosisParameter()])
|
from solvent import config
from solvent import run
from solvent import requirementlabel
import logging
import os
class Bring:
def __init__(self, repositoryBasename, product, hash, destination):
self._repositoryBasename = repositoryBasename
self._product = product
self._hash = hash
self._destination = destination
def go(self):
requirementLabel = requirementlabel.RequirementLabel(
basename=self._repositoryBasename, product=self._product, hash=self._hash)
label = requirementLabel.matching()
self.label(label=label, destination=self._destination)
@classmethod
def label(cls, label, destination):
logging.info("Checking out '%(label)s'", dict(label=label))
if not os.path.isdir(destination):
os.makedirs(destination)
myUIDandGID = ["--myUIDandGIDcheckout"] if os.getuid() != 0 else []
run.run([
"osmosis", "checkout", destination, label,
"--MD5", "--removeUnknownFiles", "--putIfMissing",
"--objectStores=" + config.objectStoresOsmosisParameter()] + myUIDandGID)
|
Bring now uses --myUIDandGIDCheckout if root is not the invoker
|
Bring now uses --myUIDandGIDCheckout if root is not the invoker
|
Python
|
apache-2.0
|
Stratoscale/solvent,Stratoscale/solvent
|
1be7ac84b951a1e5803bd46de931235e44e40d9a
|
2018/covar/covar-typecheck.py
|
2018/covar/covar-typecheck.py
|
from typing import TypeVar, List
class Mammal:
pass
class Cat(Mammal):
pass
T = TypeVar('T')
def count_mammals(seq : List[Mammal]) -> int:
return len(seq)
lst = [1, 2, 3]
mlst = [Mammal(), Mammal()]
clst = [Cat(), Cat()]
print(count_mammals(clst))
|
# Sample of using typing.TypeVar with covariant settings.
# Run with python3.6+
#
# For type-checking with mypy:
#
# > mypy covar-typecheck.py
#
# Eli Bendersky [https://eli.thegreenplace.net]
# This code is in the public domain.
from typing import List, TypeVar, Iterable, Generic
class Mammal:
pass
class Cat(Mammal):
pass
def count_mammals_list(seq : List[Mammal]) -> int:
return len(seq)
mlst = [Mammal(), Mammal()]
print(count_mammals_list(mlst))
# This will fail a mypy check, because List is not covariant.
clst = [Cat(), Cat()]
print(count_mammals_list(clst))
# Now we define a simplistic immutable list wrapper with a covariant type
# parameter. This will pass type checking
T_co = TypeVar('T_co', covariant=True)
class ImmutableList(Generic[T_co]):
def __init__(self, items: Iterable[T_co]) -> None:
self.lst = list(items)
def __len__(self) -> int:
return len(self.lst)
def count_mammals_ilist(seq : ImmutableList[Mammal]) -> int:
return len(seq)
mimmlst = ImmutableList([Mammal(), Mammal()])
print(count_mammals_ilist(mimmlst))
cimmlst = ImmutableList([Cat(), Cat()])
print(count_mammals_ilist(cimmlst))
|
Update the sample with covariant markings
|
Update the sample with covariant markings
|
Python
|
unlicense
|
eliben/code-for-blog,eliben/code-for-blog,eliben/code-for-blog,eliben/code-for-blog,eliben/code-for-blog,eliben/code-for-blog,eliben/code-for-blog,eliben/code-for-blog,eliben/code-for-blog,eliben/code-for-blog,eliben/code-for-blog
|
7405bda939632a5f8cac93413b4e99939ef716c2
|
ideas/models.py
|
ideas/models.py
|
from __future__ import unicode_literals
from django.db import models
class Idea(models.Model):
name = models.CharField(max_length=200)
description = models.TextField()
votes = models.IntegerField(default=0)
def __unicode__(self):
return self.name
|
from __future__ import unicode_literals
from django.db import models
class Idea(models.Model):
name = models.CharField(max_length=200, unique=True)
description = models.TextField()
votes = models.IntegerField(default=0)
def __unicode__(self):
return self.name
|
Add unique parameter to idea name
|
Add unique parameter to idea name
|
Python
|
mit
|
neosergio/vote_hackatrix_backend
|
7f79645182de6fed4d7f09302cbc31351defe467
|
snippet_parser/fr.py
|
snippet_parser/fr.py
|
#-*- encoding: utf-8 -*-
import base
def handle_date(template):
year = None
if len(template.params) >= 3:
try:
year = int(unicode(template.params[2]))
except ValueError:
pass
if isinstance(year, int):
# assume {{date|d|m|y|...}}
return ' '.join(map(unicode, template.params[:3]))
else:
# assume {{date|d m y|...}}
return unicode(template.params[0])
def handle_s(template):
ret = template.params[0]
if len(template.params) == 2:
ret += template.params[1]
if template.name.matches('-s'):
ret += ' av. J.-C'
return ret
class SnippetParser(base.SnippetParserBase):
def strip_template(self, template, normalize, collapse):
if template.name.matches('unité'):
return ' '.join(map(unicode, template.params[:2]))
elif template.name.matches('date'):
return handle_date(template)
elif template.name.matches('s') or template.name.matches('-s'):
return handle_s(template)
elif self.is_citation_needed(template):
repl = [base.CITATION_NEEDED_MARKER]
if template.params:
repl = [template.params[0].value.strip_code()] + repl
return ''.join(repl)
return ''
|
#-*- encoding: utf-8 -*-
import base
def handle_date(template):
year = None
if len(template.params) >= 3:
try:
year = int(unicode(template.params[2]))
except ValueError:
pass
if isinstance(year, int):
# assume {{date|d|m|y|...}}
return ' '.join(map(unicode, template.params[:3]))
else:
# assume {{date|d m y|...}}
return unicode(template.params[0])
def handle_s(template):
ret = unicode(template.params[0])
if len(template.params) == 2:
ret += unicode(template.params[1])
if template.name.matches('-s'):
ret += ' av. J.-C'
return ret
class SnippetParser(base.SnippetParserBase):
def strip_template(self, template, normalize, collapse):
if template.name.matches('unité'):
return ' '.join(map(unicode, template.params[:2]))
elif template.name.matches('date'):
return handle_date(template)
elif template.name.matches('s') or template.name.matches('-s'):
return handle_s(template)
elif self.is_citation_needed(template):
repl = [base.CITATION_NEEDED_MARKER]
if template.params:
repl = [template.params[0].value.strip_code()] + repl
return ''.join(repl)
return ''
|
Fix params handling in {{s}}.
|
Fix params handling in {{s}}.
Former-commit-id: 15eae70c91cd08f9028944f8b6a3990d3170aa28
|
Python
|
mit
|
guilherme-pg/citationhunt,guilherme-pg/citationhunt,eggpi/citationhunt,guilherme-pg/citationhunt,eggpi/citationhunt,guilherme-pg/citationhunt,eggpi/citationhunt,eggpi/citationhunt
|
53bb27bd88cb59424e231e7cadbbabcc91cc44e2
|
pywikibot/families/commons_family.py
|
pywikibot/families/commons_family.py
|
# -*- coding: utf-8 -*-
"""Family module for Wikimedia Commons."""
#
# (C) Pywikibot team, 2005-2017
#
# Distributed under the terms of the MIT license.
#
from __future__ import absolute_import, unicode_literals
__version__ = '$Id$'
from pywikibot import family
# The Wikimedia Commons family
class Family(family.WikimediaFamily):
"""Family class for Wikimedia Commons."""
name = 'commons'
def __init__(self):
"""Constructor."""
super(Family, self).__init__()
self.langs = {
'commons': 'commons.wikimedia.org',
'beta': 'commons.wikimedia.beta.wmflabs.org'
}
self.interwiki_forward = 'wikipedia'
# Templates that indicate a category redirect
# Redirects to these templates are automatically included
self.category_redirect_templates = {
'_default': (
u'Category redirect',
u'Synonym taxon category redirect',
u'Invalid taxon category redirect',
u'Monotypic taxon category redirect',
),
}
# Subpages for documentation.
self.doc_subpages = {
'_default': ((u'/doc', ), ['commons']),
}
|
# -*- coding: utf-8 -*-
"""Family module for Wikimedia Commons."""
#
# (C) Pywikibot team, 2005-2018
#
# Distributed under the terms of the MIT license.
#
from __future__ import absolute_import, unicode_literals
__version__ = '$Id$'
from pywikibot import family
# The Wikimedia Commons family
class Family(family.WikimediaFamily):
"""Family class for Wikimedia Commons."""
name = 'commons'
def __init__(self):
"""Constructor."""
super(Family, self).__init__()
self.langs = {
'commons': 'commons.wikimedia.org',
'beta': 'commons.wikimedia.beta.wmflabs.org'
}
self.interwiki_forward = 'wikipedia'
# Templates that indicate a category redirect
# Redirects to these templates are automatically included
self.category_redirect_templates = {
'_default': (
u'Category redirect',
u'Synonym taxon category redirect',
u'Invalid taxon category redirect',
u'Monotypic taxon category redirect',
'Endashcatredirect',
),
}
# Subpages for documentation.
self.doc_subpages = {
'_default': ((u'/doc', ), ['commons']),
}
|
Add Endashcatredirect as a new item in list of category redirect templates
|
Add Endashcatredirect as a new item in list of category redirect templates
Bug: T183987
Change-Id: I72b6ded1ccab48d5d905f4edd4ce6b9485703563
|
Python
|
mit
|
magul/pywikibot-core,PersianWikipedia/pywikibot-core,wikimedia/pywikibot-core,wikimedia/pywikibot-core,magul/pywikibot-core
|
17fbd2f3fa24da128cb5cabef4a8c94b59b50b0c
|
sqrl/client/crypt.py
|
sqrl/client/crypt.py
|
#!/usr/bin/env python
import ed25519
import hmac
from sqrl.utils import baseconv
class Crypt:
"""
Crypt
- Creating site specific key pair
- Signing SRQL response
- Providing public key
"""
def __init__(self, masterkey):
self.masterkey = masterkey
def _site_key_pair(self, domain):
seed = self._site_seed(domain)
sk = ed25519.SigningKey(seed)
vk = sk.get_verifying_key()
return sk, vk
def _site_seed(self, domain):
"""
Generates a seed to based on the masterkey
and the current site you authenicating with
The seed is used to generate the key pair
used for signing the request body
"""
key = self.masterkey
local_hmac = hmac.new(key)
local_hmac.update(domain)
return local_hmac.hexdigest()
def sign(self, value):
signed = self.sk.sign(value)
return baseconv.encode(signed)
def getPublicKey(self, domain):
self.sk, self.vk = self._site_key_pair(domain)
key = self.vk.to_bytes()
return baseconv.encode(key)
|
#!/usr/bin/env python
import ed25519
import hmac
import baseconv
class Crypt:
"""
Crypt
- Creating site specific key pair
- Signing SRQL response
- Providing public key
"""
def __init__(self, masterkey):
self.masterkey = masterkey
def _site_key_pair(self, domain):
seed = self._site_seed(domain)
sk = ed25519.SigningKey(seed)
vk = sk.get_verifying_key()
return sk, vk
def _site_seed(self, domain):
"""
Generates a seed to based on the masterkey
and the current site you authenicating with
The seed is used to generate the key pair
used for signing the request body
"""
key = self.masterkey
local_hmac = hmac.new(key)
local_hmac.update(domain)
return local_hmac.hexdigest()
def sign(self, value):
signed = self.sk.sign(value)
return baseconv.encode(signed)
def getPublicKey(self, domain):
self.sk, self.vk = self._site_key_pair(domain)
key = self.vk.to_bytes()
return baseconv.encode(key)
|
Fix up imports after module has moved
|
Fix up imports after module has moved
|
Python
|
mit
|
vegarwe/sqrl,vegarwe/sqrl,vegarwe/sqrl,vegarwe/sqrl
|
a5ce8febd35795a06288291ae67df1a92b4ba664
|
test_knot.py
|
test_knot.py
|
# -*- coding: utf-8 -*-
import unittest
from flask import Flask
from flask.ext.knot import Knot, get_container
def create_app():
app = Flask(__name__)
app.config['TESTING'] = True
return app
class TestKnot(unittest.TestCase):
def test_acts_like_container(self):
app = create_app()
dic = Knot(app)
def foo(c):
return 'bar'
dic.add_factory(foo)
self.assertEqual(dic.provide('foo'), 'bar')
def test_does_use_app_config_on_initialization(self):
app = create_app()
app.config['foo'] = 'bar'
dic = Knot(app)
self.assertEqual(dic['foo'], 'bar')
def test_does_not_use_app_config_after_initialization(self):
app = create_app()
app.config['foo'] = 'bar'
dic = Knot()
dic.init_app(app)
self.assertRaises(KeyError, lambda: dic['foo'])
def test_container_is_shared(self):
app1 = create_app()
app2 = create_app()
dic = Knot()
dic.init_app(app1)
dic.init_app(app2)
dic1 = get_container(app1)
dic2 = get_container(app2)
assert dic1 is dic2
if __name__ == '__main__':
unittest.main()
|
# -*- coding: utf-8 -*-
import unittest
from flask import Flask
from flask.ext.knot import Knot, get_container
def create_app():
app = Flask(__name__)
app.config['TESTING'] = True
return app
class TestKnot(unittest.TestCase):
def test_acts_like_container(self):
app = create_app()
dic = Knot(app)
def foo(c):
return 'bar'
dic.add_factory(foo)
self.assertEqual(dic.provide('foo'), 'bar')
def test_does_use_app_config_on_initialization(self):
app = create_app()
app.config['foo'] = 'bar'
dic = Knot(app)
self.assertEqual(dic['foo'], 'bar')
def test_does_not_use_app_config_after_initialization(self):
app = create_app()
app.config['foo'] = 'bar'
dic = Knot()
dic.init_app(app)
self.assertRaises(KeyError, lambda: dic['foo'])
def test_container_is_shared(self):
app1 = create_app()
app2 = create_app()
dic = Knot()
dic.init_app(app1)
dic.init_app(app2)
dic1 = get_container(app1)
dic2 = get_container(app2)
assert dic1 is dic2
def test_registration_is_required(self):
app = create_app()
self.assertRaises(RuntimeError, lambda: get_container(app))
if __name__ == '__main__':
unittest.main()
|
Add test for required registration.
|
Add test for required registration.
|
Python
|
mit
|
jaapverloop/flask-knot
|
3e1f5adf1402d6e9ddd4ef6a08f4a667be950e1d
|
src/ansible/admin.py
|
src/ansible/admin.py
|
from django.contrib import admin
from .models import Playbook, Registry, Repository
admin.site.register(Playbook)
admin.site.register(Registry)
admin.site.register(Repository)
admin.site.site_header = 'Ansible Admin'
|
from django.contrib import admin
from .models import Playbook, Registry, Repository
admin.site.register(Playbook)
admin.site.register(Registry)
admin.site.register(Repository)
admin.site.site_header = 'Ansible Admin'
admin.site.site_title = 'Ansible Admin'
admin.site.index_title = 'Admin Tool'
|
Add ansible app site title
|
Add ansible app site title
|
Python
|
bsd-3-clause
|
lozadaOmr/ansible-admin,lozadaOmr/ansible-admin,lozadaOmr/ansible-admin
|
98f7c1080765e00954d0c38a98ab1bb3e207c059
|
podcoder.py
|
podcoder.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) Ubuntu Podcast
# http://www.ubuntupodcast.org
# See the file "LICENSE" for the full license governing this code.
from podpublish import configuration
from podpublish import encoder
from podpublish import uploader
def main():
config = configuration.Configuration('podcoder.ini')
encoder.audio_encode(config, 'mp3')
encoder.mp3_tag(config)
encoder.mp3_coverart(config)
encoder.audio_encode(config, 'ogg')
encoder.ogg_tag(config)
encoder.ogg_coverart(config)
encoder.png_header(config)
encoder.png_poster(config)
encoder.mkv_encode(config)
uploader.youtube_upload(config)
if __name__ == '__main__':
main()
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) Ubuntu Podcast
# http://www.ubuntupodcast.org
# See the file "LICENSE" for the full license governing this code.
from podpublish import configuration
from podpublish import encoder
def main():
config = configuration.Configuration('podcoder.ini')
if not config.mp3['skip']:
encoder.audio_encode(config, 'mp3')
encoder.mp3_tag(config)
encoder.mp3_coverart(config)
if not config.ogg['skip']:
encoder.audio_encode(config, 'ogg')
encoder.ogg_tag(config)
encoder.ogg_coverart(config)
if not config.youtube['skip']:
encoder.png_header(config)
encoder.png_poster(config)
encoder.mkv_encode(config)
if __name__ == '__main__':
main()
|
Determine what to encode based in skip options.
|
Determine what to encode based in skip options.
|
Python
|
lgpl-2.1
|
rikai/podpublish
|
1ea51baec10ebc76bfb2be88270df2050a29fbb5
|
http-error-static-pages/5xx-static-html-generator.py
|
http-error-static-pages/5xx-static-html-generator.py
|
import os, errno
# Create build folder if it doesn't exist
try:
os.makedirs('build')
except OSError as e:
if e.errno != errno.EEXIST:
raise
template = open('./5xx.template.html', 'r')
templateString = template.read()
template.close()
# We only use 0-11 according to
# https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#5xx_Server_error
for i in range(12):
numString = str(i) if i >= 10 else '0{}'.format(i)
fileName = './build/5{}.html'.format(numString)
outputFile = open(fileName, 'w')
htmlString = templateString.replace('{ERROR_CODE_PLACEHOLDER}', '5{}'.format(numString))
outputFile.write(htmlString)
outputFile.close()
|
import os, errno
# Create build folder if it doesn't exist
def get_path(relative_path):
cur_dir = os.path.dirname(__file__)
return os.path.join(cur_dir, relative_path)
try:
os.makedirs(get_path('build'))
except OSError as e:
if e.errno != errno.EEXIST:
raise
template = open(get_path('./5xx.template.html'), 'r')
templateString = template.read()
template.close()
# We only use 0-11 according to
# https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#5xx_Server_error
for i in range(12):
numString = str(i) if i >= 10 else '0{}'.format(i)
fileName = './build/5{}.html'.format(numString)
outputFile = open(get_path(fileName), 'w')
htmlString = templateString.replace('{ERROR_CODE_PLACEHOLDER}', '5{}'.format(numString))
outputFile.write(htmlString)
outputFile.close()
|
Make static http error code generator directory agnostic
|
Make static http error code generator directory agnostic
|
Python
|
mit
|
thegazelle-ad/gazelle-server,thegazelle-ad/gazelle-server,thegazelle-ad/gazelle-front-end,thegazelle-ad/gazelle-server,thegazelle-ad/gazelle-server,thegazelle-ad/gazelle-server,thegazelle-ad/gazelle-front-end
|
c7ef5d2c049beba4bd1b12ec2e62a61446746a8a
|
unsubscribe/views.py
|
unsubscribe/views.py
|
from django import http
from mailgun import utils
import models as unsubscribe_model
def unsubscribe_webhook(request):
verified = utils.verify_webhook(
request.POST.get('token'),
request.POST.get('timestamp'),
request.POST.get('signature')
)
if not verified:
return http.HttpResponseForbidden()
address = request.POST.get('recipient')
if request.POST.get('mailing-list'):
unsubscribe_model.unsubscribe_from_sequence(address)
else:
unsubscribe_model.unsubscribe_completely(address)
|
from django import http
from django.views.decorators.csrf import csrf_exempt
from mailgun import utils
import models as unsubscribe_model
@csrf_exempt
def unsubscribe_webhook(request):
verified = utils.verify_webhook(
request.POST.get('token'),
request.POST.get('timestamp'),
request.POST.get('signature')
)
if not verified:
return http.HttpResponseForbidden()
address = request.POST.get('recipient')
try:
if request.POST.get('mailing-list'):
unsubscribe_model.unsubscribe_from_sequence(address)
else:
unsubscribe_model.unsubscribe_user(address)
except:
raise
return http.HttpResponse('')
|
Return http 200 for webhooks
|
Return http 200 for webhooks
|
Python
|
mit
|
p2pu/mechanical-mooc,p2pu/mechanical-mooc,p2pu/mechanical-mooc,p2pu/mechanical-mooc
|
c3479ba8d8486ae9a274367b4601e9e4b6699a1a
|
prj/urls.py
|
prj/urls.py
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns('',
url(r'^djadmin/', include(admin.site.urls)),
)
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns('',
url(r'^djadmin/', include(admin.site.urls)),
# Root
url( r'^$', 'wishlist.views.index' ),
)
|
Add root URL (to serve public wishlist)
|
Add root URL (to serve public wishlist)
|
Python
|
mit
|
cgarvey/django-mywishlist,cgarvey/django-mywishlist
|
465fd8892c177925d8da3080d08676daad866195
|
core/urls.py
|
core/urls.py
|
from django.conf.urls import url
from core import views
urlpatterns = [
url(r'^sensors/$', views.SensorList.as_view()),
url(r'^sensors/(?P<pk>[0-9]+)/$', views.SensorDetail.as_view()),
url(r'^stations/$', views.StationList.as_view()),
url(r'^stations/(?P<pk>[0-9]+)/$', views.StationDetail.as_view()),
url(r'^readings/$', views.ReadingList.as_view()),
url(r'^readings/(?P<pk>[0-9]+)/$', views.ReadingDetail.as_view()),
]
|
from django.conf.urls import url
from core import views
urlpatterns = [
url(r'^$', views.api_root),
url(r'^sensors/$', views.SensorList.as_view(), name='sensor-list'),
url(r'^sensors/(?P<pk>[0-9]+)/$', views.SensorDetail.as_view(), name='sensor-detail'),
url(r'^sensors/(?P<pk>[0-9]+)/data/$', views.SensorData.as_view(), name='sensor-data'),
url(r'^stations/$', views.StationList.as_view(), name='station-list'),
url(r'^stations/(?P<pk>[0-9]+)/$', views.StationDetail.as_view(), name='station-detail'),
url(r'^readings/$', views.ReadingList.as_view(), name='reading-list'),
url(r'^readings/(?P<pk>[0-9]+)/$', views.ReadingDetail.as_view(), name='reading-detail'),
url(r'^messages/$', views.MessageList.as_view(), name='message-list'),
url(r'^messages/(?P<pk>[0-9]+)/$', views.MessageDetail.as_view(), name='message-detail'),
url(r'^users/$', views.UserList.as_view(), name='user-list'),
url(r'^users/(?P<pk>[0-9]+)/$', views.UserDetail.as_view(), name='user-detail'),
]
|
Add URLs for previous views.
|
Add URLs for previous views.
|
Python
|
apache-2.0
|
qubs/climate-data-api,qubs/data-centre,qubs/climate-data-api,qubs/data-centre
|
8255fd2fdee3a7d6b96859eb7b8d1297431c730b
|
utils/00-cinspect.py
|
utils/00-cinspect.py
|
import inspect
from cinspect import getsource, getfile
import IPython.core.oinspect as OI
from IPython.utils.py3compat import cast_unicode
old_find_file = OI.find_file
old_getsource = inspect.getsource
inspect.getsource = getsource
def patch_find_file(obj):
fname = old_find_file(obj)
if fname is None:
try:
fname = cast_unicode(getfile(obj))
except:
pass
return fname
OI.find_file = patch_find_file
|
""" A startup script for IPython to patch it to 'inspect' using cinspect. """
# Place this file in ~/.ipython/<PROFILE_DIR>/startup to patch your IPython to
# use cinspect for the code inspection.
import inspect
from cinspect import getsource, getfile
import IPython.core.oinspect as OI
from IPython.utils.py3compat import cast_unicode
old_find_file = OI.find_file
old_getsource = inspect.getsource
inspect.getsource = getsource
def patch_find_file(obj):
fname = old_find_file(obj)
if fname is None:
try:
fname = cast_unicode(getfile(obj))
except:
pass
return fname
OI.find_file = patch_find_file
|
Add a note for the utility script.
|
Add a note for the utility script.
|
Python
|
bsd-3-clause
|
punchagan/cinspect,punchagan/cinspect
|
41c7d60556dff4be1c5f39cf694470d3af4869f0
|
qual/iso.py
|
qual/iso.py
|
from datetime import date, timedelta
def iso_to_gregorian(year, week, weekday):
jan_8 = date(year, 1, 8).isocalendar()
offset = (week - jan_8[1]) * 7 + (weekday - jan_8[2])
return date(year, 1, 8) + timedelta(days=offset)
|
from datetime import date, timedelta
def iso_to_gregorian(year, week, weekday):
if week < 1 or week > 54:
raise ValueError("Week number %d is invalid for an ISO calendar." % (week, ))
jan_8 = date(year, 1, 8).isocalendar()
offset = (week - jan_8[1]) * 7 + (weekday - jan_8[2])
d = date(year, 1, 8) + timedelta(days=offset)
if d.isocalendar()[0] != year:
raise ValueError("Week number %d is invalid for ISO year %d." % (week, year))
return d
|
Add checks for a reasonable week number.
|
Add checks for a reasonable week number.
|
Python
|
apache-2.0
|
jwg4/calexicon,jwg4/qual
|
09dd2e16ef29b6c79ee344a55bea5bd0e59c7a59
|
fireplace/cards/gvg/shaman.py
|
fireplace/cards/gvg/shaman.py
|
from ..utils import *
##
# Spells
# Ancestor's Call
class GVG_029:
action = [
ForcePlay(CONTROLLER, RANDOM(CONTROLLER_HAND + MINION)),
ForcePlay(OPPONENT, RANDOM(OPPONENT_HAND + MINION)),
]
|
from ..utils import *
##
# Minions
# Vitality Totem
class GVG_039:
OWN_TURN_END = [Heal(FRIENDLY_HERO, 4)]
# Siltfin Spiritwalker
class GVG_040:
def OWN_MINION_DESTROY(self, minion):
if minion.race == Race.MURLOC:
return [Draw(CONTROLLER, 1)]
##
# Spells
# Ancestor's Call
class GVG_029:
action = [
ForcePlay(CONTROLLER, RANDOM(CONTROLLER_HAND + MINION)),
ForcePlay(OPPONENT, RANDOM(OPPONENT_HAND + MINION)),
]
# Crackle
class GVG_038:
def action(self, target):
return [Hit(TARGET, random.randint(3, 6))]
##
# Weapons
# Powermace
class GVG_036:
action = [Buff(RANDOM(FRIENDLY_MINIONS + MECH), "GVG_036e")]
|
Implement Powermace, Crackle, Vitality Totem and Siltfin Spiritwalker
|
Implement Powermace, Crackle, Vitality Totem and Siltfin Spiritwalker
|
Python
|
agpl-3.0
|
oftc-ftw/fireplace,Meerkov/fireplace,Ragowit/fireplace,butozerca/fireplace,smallnamespace/fireplace,smallnamespace/fireplace,amw2104/fireplace,oftc-ftw/fireplace,NightKev/fireplace,butozerca/fireplace,amw2104/fireplace,beheh/fireplace,liujimj/fireplace,jleclanche/fireplace,Ragowit/fireplace,Meerkov/fireplace,liujimj/fireplace
|
0d37a94593a7749dca4b2553334f1b67c946d3f8
|
ambassador/tests/t_lua_scripts.py
|
ambassador/tests/t_lua_scripts.py
|
from kat.harness import Query
from abstract_tests import AmbassadorTest, ServiceType, HTTP
class LuaTest(AmbassadorTest):
target: ServiceType
def init(self):
self.target = HTTP()
def manifests(self) -> str:
return super().manifests() + self.format('''
---
apiVersion: getambassador.io/v1
kind: Module
metadata:
name: ambassador
spec:
ambassador_id: {self.ambassador_id}
config:
lua_scripts: |
function envoy_on_response(response_handle)
response_handle: headers():add("Lua-Scripts-Enabled", "Processed")
end
---
apiVersion: getambassador.io/v1
kind: Mapping
metadata:
name: lua-target-mapping
spec:
ambassador_id: {self.ambassador_id}
prefix: /target/
service: {self.target.path.fqdn}
''')
def queries(self):
yield Query(self.url("target/"))
def check(self):
for r in self.results:
assert r.headers.get('Lua-Scripts-Enabled', None) == ['Processed']
|
from kat.harness import Query
from abstract_tests import AmbassadorTest, ServiceType, HTTP
class LuaTest(AmbassadorTest):
target: ServiceType
def init(self):
self.target = HTTP()
self.env = ["LUA_SCRIPTS_ENABLED=Processed"]
def manifests(self) -> str:
return super().manifests() + self.format('''
---
apiVersion: getambassador.io/v1
kind: Module
metadata:
name: ambassador
spec:
ambassador_id: {self.ambassador_id}
config:
lua_scripts: |
function envoy_on_response(response_handle)
response_handle: headers():add("Lua-Scripts-Enabled", "${LUA_SCRIPTS_ENABLED}")
end
---
apiVersion: getambassador.io/v1
kind: Mapping
metadata:
name: lua-target-mapping
spec:
ambassador_id: {self.ambassador_id}
prefix: /target/
service: {self.target.path.fqdn}
''')
def queries(self):
yield Query(self.url("target/"))
def check(self):
for r in self.results:
assert r.headers.get('Lua-Scripts-Enabled', None) == ['Processed']
|
Update LUA test to perform interpolation
|
Update LUA test to perform interpolation
|
Python
|
apache-2.0
|
datawire/ambassador,datawire/ambassador,datawire/ambassador,datawire/ambassador,datawire/ambassador
|
5c7c1155a6bfe0e1dda8def877bcef9d8c528ee3
|
vaux/api/__init__.py
|
vaux/api/__init__.py
|
import os
from flask import Flask, abort, request
from flask.ext import restful
from vaux.storage import LibreDB
from datetime import datetime
from werkzeug import secure_filename
from cors import crossdomain
app = Flask(__name__)
database = LibreDB('../data', 'localhost', 28015, 'docliber')
from peer import PeerResource, PeerInstance
from document import DocumentResource, DocumentInstance
api = restful.Api(app)
api.decorators=[crossdomain(origin='*')]
api.add_resource(PeerResource, '/peers/')
api.add_resource(PeerInstance, '/peers/<string:id>/')
api.add_resource(DocumentResource, '/documents/')
api.add_resource(DocumentInstance, '/documents/<string:id>/')
|
import os
import ConfigParser
from flask import Flask, abort, request
from flask.ext import restful
from vaux.storage import LibreDB
from datetime import datetime
from werkzeug import secure_filename
from cors import crossdomain
app = Flask(__name__)
config = ConfigParser.SafeConfigParser()
config.read('/etc/vaux.ini')
database = LibreDB(
config.get('data', 'path'),
config.get('database', 'host'),
config.getint('database', 'port'),
config.get('database', 'database'))
from peer import PeerResource, PeerInstance
from document import DocumentResource, DocumentInstance
api = restful.Api(app)
api.decorators=[crossdomain(origin='*')]
api.add_resource(PeerResource, '/peers/')
api.add_resource(PeerInstance, '/peers/<string:id>/')
api.add_resource(DocumentResource, '/documents/')
api.add_resource(DocumentInstance, '/documents/<string:id>/')
|
Read in the database and data options from a config file
|
Read in the database and data options from a config file
I hope this works.
|
Python
|
mit
|
VauxIo/core
|
0948eced6cd551df7f136614b136378e9864b4eb
|
forms.py
|
forms.py
|
from flask import flash
from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField
from wtforms.validators import DataRequired, Email, Length
def flash_errors(form):
""" Universal interface to handle form error.
Handles form error with the help of flash message
"""
for field, errors in form.errors.items():
for error in errors:
flash(u'Error in the %s field - %s' % (
getattr(form, field).label.text,
error
))
class Login(FlaskForm):
username = StringField('Username',
validators=[DataRequired(), Length(max=16)])
password = PasswordField('Password',
validators=[DataRequired()])
|
from flask import flash
from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField
from wtforms.validators import DataRequired, Email, Length, EqualTo
def flash_errors(form):
""" Universal interface to handle form error.
Handles form error with the help of flash message
"""
for field, errors in form.errors.items():
for error in errors:
flash(u'Error in the %s field - %s' % (
getattr(form, field).label.text,
error
))
class Login(FlaskForm):
username = StringField('Username',
validators=[DataRequired(), Length(max=16)])
password = PasswordField('Password',
validators=[DataRequired()])
class AddEmployee(FlaskForm):
username = StringField(
'Username',
validators=[
DataRequired(),
Length(max=16)
]
)
fullname = StringField(
'Full name',
validators=[DataRequired()]
)
nip = StringField(
'Nip',
validators=[DataRequired()]
)
password = PasswordField(
'password',
validators=[
DataRequired(),
EqualTo('verifyPassword', message='Password must match.')
]
)
verifyPassword = PasswordField('Verify password')
|
Add input rule for adding employee
|
Add input rule for adding employee
|
Python
|
mit
|
openedoo/module_employee,openedoo/module_employee,openedoo/module_employee
|
848384b0283538556a231a32e4128d52ba9e1407
|
direlog.py
|
direlog.py
|
#!/usr/bin/env python
# encoding: utf-8
import sys
import re
import argparse
import fileinput
from argparse import RawDescriptionHelpFormatter
from patterns import pre_patterns
def prepare(infile, outfile=sys.stdout):
"""
Apply pre_patterns from patterns to infile
:infile: input file
"""
try:
for line in infile:
result = line
for pattern in pre_patterns:
result = re.sub(pattern[0], pattern[1], result, re.VERBOSE)
outfile.write(result)
except (KeyboardInterrupt):
pass
except:
raise
def main():
parser = argparse.ArgumentParser(description=\
"""
Parse file[s]\n\n
examlpe: cat error_log | tail -n 1000 | ./direlog.py
""", formatter_class=RawDescriptionHelpFormatter)
parser.add_argument('file', nargs='*', default=[],
help='file[s] to do some work')
parser.add_argument('-s', '--stat', action='store_const', const=True,
help='get statistics')
args = parser.parse_args()
input_stream = fileinput.input(args.stat)
prepare(input_stream)
pass
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
# encoding: utf-8
import sys
import re
import argparse
import fileinput
from argparse import RawDescriptionHelpFormatter
from patterns import pre_patterns
def prepare(infile, outfile=sys.stdout):
"""
Apply pre_patterns from patterns to infile
:infile: input file
"""
try:
for line in infile:
result = line
for pattern in pre_patterns:
result = re.sub(pattern[0], pattern[1], result, re.VERBOSE)
outfile.write(result)
except (KeyboardInterrupt):
pass
except:
raise
def main():
parser = argparse.ArgumentParser(description=\
"""
Parse file[s]\n\n
examlpe: cat error_log | tail -n 1000 | ./direlog.py
""", formatter_class=RawDescriptionHelpFormatter)
parser.add_argument('file', nargs='*', default=[],
help='file[s] to do some work')
args = parser.parse_args()
input_stream = fileinput.input(args.file)
prepare(input_stream)
pass
if __name__ == '__main__':
main()
|
Fix args.file and remove stat from argparse
|
Fix args.file and remove stat from argparse
|
Python
|
mit
|
abcdw/direlog,abcdw/direlog
|
ef89d3608b9ab54aef105528f2c15fa9cc437bcd
|
runtests.py
|
runtests.py
|
#!/usr/bin/env python
import sys
from os.path import abspath, dirname
from django.conf import settings
sys.path.insert(0, abspath(dirname(__file__)))
if not settings.configured:
settings.configure(
INSTALLED_APPS=(
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.auth',
'django.contrib.admin',
'email_log',
'email_log.tests',
),
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
EMAIL_LOG_BACKEND = 'django.core.mail.backends.locmem.EmailBackend',
ROOT_URLCONF='email_log.tests.urls',
)
def runtests():
from django.test.simple import DjangoTestSuiteRunner
failures = DjangoTestSuiteRunner(failfast=False).run_tests(['tests'])
sys.exit(failures)
if __name__ == "__main__":
runtests()
|
#!/usr/bin/env python
import sys
from os.path import abspath, dirname
from django.conf import settings
import django
sys.path.insert(0, abspath(dirname(__file__)))
if not settings.configured:
settings.configure(
INSTALLED_APPS=(
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.auth',
'django.contrib.admin',
'email_log',
'email_log.tests',
),
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
EMAIL_LOG_BACKEND = 'django.core.mail.backends.locmem.EmailBackend',
ROOT_URLCONF='email_log.tests.urls',
)
def runtests():
if hasattr(django, 'setup'):
django.setup()
try:
from django.test.runner import DiscoverRunner
runner_class = DiscoverRunner
test_args = ['email_log.tests']
except ImportError:
from django.test.simple import DjangoTestSuiteRunner
runner_class = DjangoTestSuiteRunner
test_args = ['tests']
failures = runner_class(failfast=False).run_tests(test_args)
sys.exit(failures)
if __name__ == "__main__":
runtests()
|
Fix tests for Django 1.7
|
Fix tests for Django 1.7
|
Python
|
mit
|
treyhunner/django-email-log,treyhunner/django-email-log
|
da39bc268e3fe94af348690262fc116e3e0b2c9c
|
attachments/admin.py
|
attachments/admin.py
|
from attachments.models import Attachment
from django.contrib.contenttypes import generic
class AttachmentInlines(generic.GenericStackedInline):
model = Attachment
extra = 1
|
from attachments.models import Attachment
from django.contrib.contenttypes import admin
class AttachmentInlines(admin.GenericStackedInline):
model = Attachment
extra = 1
|
Fix deprecated modules for content types
|
Fix deprecated modules for content types
|
Python
|
bsd-3-clause
|
leotrubach/django-attachments,leotrubach/django-attachments
|
969334fec0822a30d1e5f10a458f79556053836a
|
fabfile.py
|
fabfile.py
|
from fabric.api import *
from fabric.colors import *
env.colorize_errors = True
env.hosts = ['sanaprotocolbuilder.me']
env.user = 'root'
env.virtualenv = 'source /usr/local/bin/virtualenvwrapper.sh'
env.project_root = '/opt/sana.protocol_builder'
def test():
local('python sana_builder/manage.py syncdb')
local('python sana_builder/manage.py test')
def deploy():
local('git push origin master')
with cd(env.project_root), prefix(env.virtualenv), prefix('workon sana_protocol_builder'):
print(green('Pulling latest revision...'))
run('git pull origin master')
print(green('Installing dependencies...'))
run('pip install -qr requirements.txt')
print(green('Creating database tables...'))
run('python sana_builder/manage.py syncdb --noinput')
print(green('Importing fixtures...'))
run('python sana_builder/manage.py loaddata sana_builder/fixtures/pages.json')
print(green('Collecting static files...'))
run('python sana_builder/manage.py collectstatic --noinput')
print(green('Restarting gunicorn...'))
run('supervisorctl restart gunicorn')
|
from fabric.api import *
from fabric.colors import *
env.colorize_errors = True
env.hosts = ['sanaprotocolbuilder.me']
env.user = 'root'
env.virtualenv = 'source /usr/local/bin/virtualenvwrapper.sh'
env.project_root = '/opt/sana.protocol_builder'
def test():
local('python sana_builder/manage.py syncdb')
local('python sana_builder/manage.py test')
def update_host():
with cd(env.project_root), prefix(env.virtualenv), prefix('workon sana_protocol_builder'):
print(green('Pulling latest revision...'))
run('git pull origin master')
print(green('Installing dependencies...'))
run('pip install -qr requirements.txt')
print(green('Creating database tables...'))
run('python sana_builder/manage.py syncdb --noinput')
print(green('Importing fixtures...'))
run('python sana_builder/manage.py loaddata sana_builder/fixtures/pages.json')
print(green('Collecting static files...'))
run('python sana_builder/manage.py collectstatic --noinput')
print(green('Restarting gunicorn...'))
run('supervisorctl restart gunicorn')
def travis_deploy():
update_host()
def local_deploy():
local('git push origin master')
update_host()
|
Update deploy script to support Travis.
|
Update deploy script to support Travis.
|
Python
|
bsd-3-clause
|
SanaMobile/sana.protocol_builder,SanaMobile/sana.protocol_builder,SanaMobile/sana.protocol_builder,SanaMobile/sana.protocol_builder,SanaMobile/sana.protocol_builder
|
8e8370a76c67d7905c73bcb808f89e3cd4b994a3
|
runtests.py
|
runtests.py
|
#!/usr/bin/env python
import django
from django.conf import settings
from django.core.management import call_command
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
ALLOWED_HOSTS=[
'testserver',
],
INSTALLED_APPS=[
'django.contrib.auth',
'django.contrib.contenttypes',
'permissions',
'permissions.tests',
],
MIDDLEWARE_CLASSES=[],
ROOT_URLCONF='permissions.tests.urls',
TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
}],
TEST_RUNNER='django.test.runner.DiscoverRunner',
)
if django.VERSION[:2] >= (1, 7):
from django import setup
else:
setup = lambda: None
setup()
call_command("test")
|
#!/usr/bin/env python
import django
from django.conf import settings
from django.core.management import call_command
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
ALLOWED_HOSTS=[
'testserver',
],
INSTALLED_APPS=[
'django.contrib.auth',
'django.contrib.contenttypes',
'permissions',
'permissions.tests',
],
MIDDLEWARE_CLASSES=[],
PERMISSIONS={
'allow_staff': False,
},
ROOT_URLCONF='permissions.tests.urls',
TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
}],
TEST_RUNNER='django.test.runner.DiscoverRunner',
)
if django.VERSION[:2] >= (1, 7):
from django import setup
else:
setup = lambda: None
setup()
call_command("test")
|
Add PERMISSIONS setting to test settings
|
Add PERMISSIONS setting to test settings
|
Python
|
mit
|
wylee/django-perms,PSU-OIT-ARC/django-perms
|
926731b05f22566e98a02737d673cca3fc0b28ec
|
docs/conf.py
|
docs/conf.py
|
# -*- coding: utf-8 -*-
### General settings
extensions = []
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'Firebase Admin SDK for PHP'
author = u'Jérôme Gamez'
copyright = u'Jérôme Gamez'
version = u'4.x'
html_title = u'Firebase Admin SDK for PHP Documentation'
html_short_title = u'Firebase Admin SDK for PHP'
exclude_patterns = ['_build']
html_static_path = ['_static']
suppress_warnings = ['image.nonlocal_uri']
### Theme settings
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
### Syntax Highlighting
from sphinx.highlighting import lexers
from pygments.lexers.web import PhpLexer
lexers['php'] = PhpLexer(startinline=True, linenos=1)
lexers['php-annotations'] = PhpLexer(startinline=True, linenos=1)
### Integrations
html_context = {
"display_github": True,
"github_user": "kreait",
"github_repo": "firebase-php",
"github_version": "master",
"conf_py_path": "/docs/",
"source_suffix": ".rst",
}
|
# -*- coding: utf-8 -*-
### General settings
extensions = []
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'Firebase Admin SDK for PHP'
author = u'Jérôme Gamez'
copyright = u'Jérôme Gamez'
version = u'4.x'
html_title = u'Firebase Admin SDK for PHP Documentation'
html_short_title = u'Firebase Admin SDK for PHP'
exclude_patterns = ['_build']
html_static_path = ['_static']
suppress_warnings = ['image.nonlocal_uri']
### Theme settings
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
html_theme_options = {
'canonical_url': 'https://firebase-php.readthedocs.io',
'analytics_id': 'UA-82654714-3'
}
### Syntax Highlighting
from sphinx.highlighting import lexers
from pygments.lexers.web import PhpLexer
lexers['php'] = PhpLexer(startinline=True, linenos=1)
lexers['php-annotations'] = PhpLexer(startinline=True, linenos=1)
### Integrations
html_context = {
"display_github": True,
"github_user": "kreait",
"github_repo": "firebase-php",
"github_version": "",
"conf_py_path": "/docs/",
"source_suffix": ".rst",
}
|
Fix "Edit on GitHub" links
|
Fix "Edit on GitHub" links
Using "master" seems to mess it up, see
https://github.com/readthedocs/readthedocs.org/issues/5518
|
Python
|
mit
|
kreait/firebase-php
|
7aaa385da78bef57c8b6339f6db04044ace08807
|
api/taxonomies/serializers.py
|
api/taxonomies/serializers.py
|
from rest_framework import serializers as ser
from api.base.serializers import JSONAPISerializer, LinksField, JSONAPIListField
class TaxonomyField(ser.Field):
def to_representation(self, obj):
if obj is not None:
return {'id': obj._id,
'text': obj.text}
return None
def to_internal_value(self, data):
return data
class TaxonomySerializer(JSONAPISerializer):
filterable_fields = frozenset([
'text',
'parents',
'id'
])
id = ser.CharField(source='_id', required=True)
text = ser.CharField(max_length=200)
parents = JSONAPIListField(child=TaxonomyField())
links = LinksField({
'parents': 'get_parent_urls',
'self': 'get_absolute_url',
})
def get_parent_urls(self, obj):
return [p.get_absolute_url() for p in obj.parents]
def get_absolute_url(self, obj):
return obj.get_absolute_url()
class Meta:
type_ = 'taxonomies'
|
from rest_framework import serializers as ser
from api.base.serializers import JSONAPISerializer, LinksField, JSONAPIListField
class TaxonomyField(ser.Field):
def to_representation(self, obj):
if obj is not None:
return {'id': obj._id,
'text': obj.text}
return None
def to_internal_value(self, data):
return data
class TaxonomySerializer(JSONAPISerializer):
filterable_fields = frozenset([
'text',
'parents',
'id'
])
id = ser.CharField(source='_id', required=True)
text = ser.CharField(max_length=200)
parents = JSONAPIListField(child=TaxonomyField())
child_count = ser.IntegerField()
links = LinksField({
'parents': 'get_parent_urls',
'self': 'get_absolute_url',
})
def get_parent_urls(self, obj):
return [p.get_absolute_url() for p in obj.parents]
def get_absolute_url(self, obj):
return obj.get_absolute_url()
class Meta:
type_ = 'taxonomies'
|
Add child_count taken from new Subject property
|
Add child_count taken from new Subject property
|
Python
|
apache-2.0
|
adlius/osf.io,brianjgeiger/osf.io,chrisseto/osf.io,rdhyee/osf.io,sloria/osf.io,brianjgeiger/osf.io,sloria/osf.io,binoculars/osf.io,mattclark/osf.io,saradbowman/osf.io,aaxelb/osf.io,baylee-d/osf.io,Johnetordoff/osf.io,leb2dg/osf.io,laurenrevere/osf.io,samchrisinger/osf.io,binoculars/osf.io,aaxelb/osf.io,rdhyee/osf.io,alexschiller/osf.io,HalcyonChimera/osf.io,mfraezz/osf.io,cwisecarver/osf.io,CenterForOpenScience/osf.io,acshi/osf.io,cslzchen/osf.io,mattclark/osf.io,crcresearch/osf.io,TomBaxter/osf.io,acshi/osf.io,samchrisinger/osf.io,monikagrabowska/osf.io,laurenrevere/osf.io,adlius/osf.io,acshi/osf.io,mluo613/osf.io,HalcyonChimera/osf.io,felliott/osf.io,baylee-d/osf.io,Johnetordoff/osf.io,felliott/osf.io,mattclark/osf.io,saradbowman/osf.io,erinspace/osf.io,hmoco/osf.io,emetsger/osf.io,alexschiller/osf.io,icereval/osf.io,mfraezz/osf.io,TomBaxter/osf.io,HalcyonChimera/osf.io,hmoco/osf.io,chrisseto/osf.io,erinspace/osf.io,monikagrabowska/osf.io,leb2dg/osf.io,pattisdr/osf.io,CenterForOpenScience/osf.io,monikagrabowska/osf.io,laurenrevere/osf.io,Nesiehr/osf.io,acshi/osf.io,cslzchen/osf.io,caseyrollins/osf.io,monikagrabowska/osf.io,emetsger/osf.io,adlius/osf.io,caneruguz/osf.io,monikagrabowska/osf.io,crcresearch/osf.io,cwisecarver/osf.io,brianjgeiger/osf.io,mluo613/osf.io,Nesiehr/osf.io,sloria/osf.io,cslzchen/osf.io,icereval/osf.io,caseyrollins/osf.io,binoculars/osf.io,aaxelb/osf.io,caseyrollins/osf.io,cslzchen/osf.io,icereval/osf.io,CenterForOpenScience/osf.io,erinspace/osf.io,acshi/osf.io,brianjgeiger/osf.io,adlius/osf.io,caneruguz/osf.io,pattisdr/osf.io,cwisecarver/osf.io,mluo613/osf.io,chrisseto/osf.io,leb2dg/osf.io,mluo613/osf.io,alexschiller/osf.io,samchrisinger/osf.io,chennan47/osf.io,caneruguz/osf.io,Nesiehr/osf.io,CenterForOpenScience/osf.io,rdhyee/osf.io,chrisseto/osf.io,mluo613/osf.io,Johnetordoff/osf.io,rdhyee/osf.io,felliott/osf.io,mfraezz/osf.io,aaxelb/osf.io,chennan47/osf.io,crcresearch/osf.io,alexschiller/osf.io,pattisdr/osf.io,HalcyonChimera/osf.io,emetsger/osf.io,leb2dg/osf.io,emetsger/osf.io,cwisecarver/osf.io,alexschiller/osf.io,Nesiehr/osf.io,hmoco/osf.io,hmoco/osf.io,felliott/osf.io,chennan47/osf.io,samchrisinger/osf.io,mfraezz/osf.io,Johnetordoff/osf.io,caneruguz/osf.io,TomBaxter/osf.io,baylee-d/osf.io
|
6c349621dd3331bf92f803d2d66c96868f8e94c6
|
src/geelweb/django/editos/runtests.py
|
src/geelweb/django/editos/runtests.py
|
import os
import sys
os.environ['DJANGO_SETTINGS_MODULE'] = 'test_settings'
test_dir = os.path.dirname(__file__)
sys.path.insert(0, test_dir)
from django.test.utils import get_runner
from django.conf import settings
def runtests():
TestRunner = get_runner(settings)
test_runner = TestRunner(verbosity=1, interactive=True)
failures = test_runner.run_tests(['geelweb.django.editos'])
sys.exit(bool(failures))
|
import os
import sys
os.environ['DJANGO_SETTINGS_MODULE'] = 'test_settings'
test_dir = os.path.dirname(__file__)
sys.path.insert(0, test_dir)
import django
from django.test.utils import get_runner
from django.conf import settings
def runtests():
if django.VERSION[0] == 1 and django.VERSION[1] < 7:
from django.test.utils import setup_test_environment
setup_test_environment()
if django.VERSION[0] == 1 and django.VERSION[1] >= 7:
django.setup()
TestRunner = get_runner(settings)
test_runner = TestRunner()
failures = test_runner.run_tests(['geelweb.django.editos'])
sys.exit(bool(failures))
|
Upgrade to test using django 1.7 and 1.8
|
Upgrade to test using django 1.7 and 1.8
|
Python
|
mit
|
geelweb/django-editos,geelweb/django-editos
|
de441445dbdade4d937783626f1beeb9f439ee11
|
helpers.py
|
helpers.py
|
import feedparser
import datetime
from .models import RssEntry
class RssSyncHelper(object):
def __init__(self, feed):
self.feed = feed
def save_entry(self, result):
pub_date = result.updated_parsed
published = datetime.date(pub_date[0], pub_date[1], pub_date[2])
return RssEntry.objects.get_or_create(
title=result.title,
feed=self.feed,
summary=result.content[0]['value'],
link=result.link,
date=published,
)
def sync(self):
feed = feedparser.parse(self.feed.url)
for entry in feed.entries:
self.save_entry(entry)
def sync_wordpress_paginated(self, page):
"""Sync a Wordpress paginated feed"""
feed = feedparser.parse('%s&paged=%d' % (self.feed.url, page))
for entry in feed.entries:
self.save_entry(entry)
|
import feedparser
import datetime
from .models import RssEntry
def add_custom_acceptable_elements(elements):
"""
Add custom acceptable elements so iframes and other potential video
elements will get synched.
"""
elements += list(feedparser._HTMLSanitizer.acceptable_elements)
feedparser._HTMLSanitizer.acceptable_elements = set(elements)
custom_acceptable_elements = ['iframe', 'embed', 'object',]
add_custom_acceptable_elements(custom_acceptable_elements)
class RssSyncHelper(object):
def __init__(self, feed):
self.feed = feed
def save_entry(self, result):
pub_date = result.updated_parsed
published = datetime.date(pub_date[0], pub_date[1], pub_date[2])
return RssEntry.objects.get_or_create(
title=result.title,
feed=self.feed,
summary=result.content[0]['value'],
link=result.link,
date=published,
)
def sync(self):
feed = feedparser.parse(self.feed.url)
for entry in feed.entries:
self.save_entry(entry)
def sync_wordpress_paginated(self, page):
"""Sync a Wordpress paginated feed"""
feed = feedparser.parse('%s&paged=%d' % (self.feed.url, page))
for entry in feed.entries:
self.save_entry(entry)
|
Allow iframes to be synched
|
Allow iframes to be synched
|
Python
|
bsd-3-clause
|
ebrelsford/django-rsssync
|
fe5eb7db52725f8d136cbeba4341f5c3a33cf199
|
tensorflow_model_optimization/python/core/api/quantization/keras/quantizers/__init__.py
|
tensorflow_model_optimization/python/core/api/quantization/keras/quantizers/__init__.py
|
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Module containing Quantization abstraction and quantizers."""
# quantize with custom quantization parameterization or implementation, or
# handle custom Keras layers.
from tensorflow_model_optimization.python.core.quantization.keras.quantizers import LastValueQuantizer
from tensorflow_model_optimization.python.core.quantization.keras.quantizers import MovingAverageQuantizer
from tensorflow_model_optimization.python.core.quantization.keras.quantizers import Quantizer
|
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Module containing Quantization abstraction and quantizers."""
# quantize with custom quantization parameterization or implementation, or
# handle custom Keras layers.
from tensorflow_model_optimization.python.core.quantization.keras.quantizers import AllValuesQuantizer
from tensorflow_model_optimization.python.core.quantization.keras.quantizers import LastValueQuantizer
from tensorflow_model_optimization.python.core.quantization.keras.quantizers import MovingAverageQuantizer
from tensorflow_model_optimization.python.core.quantization.keras.quantizers import Quantizer
|
Include AllValuesQuantizer in external APIs
|
Include AllValuesQuantizer in external APIs
PiperOrigin-RevId: 320104499
|
Python
|
apache-2.0
|
tensorflow/model-optimization,tensorflow/model-optimization
|
589e2df8c9af8ce8102904c9cfebbf87ee2df744
|
ckanext/orgdashboards/tests/helpers.py
|
ckanext/orgdashboards/tests/helpers.py
|
from ckan.tests import factories
def create_mock_data(**kwargs):
mock_data = {}
mock_data['organization'] = factories.Organization()
mock_data['organization_name'] = mock_data['organization']['name']
mock_data['organization_id'] = mock_data['organization']['id']
mock_data['dataset'] = factories.Dataset(owner_org=mock_data['organization_id'])
mock_data['dataset_name'] = mock_data['dataset']['name']
mock_data['package_id'] = mock_data['dataset']['id']
mock_data['resource'] = factories.Resource(package_id=mock_data['package_id'])
mock_data['resource_name'] = mock_data['resource']['name']
mock_data['resource_id'] = mock_data['resource']['id']
mock_data['resource_view'] = factories.ResourceView(
resource_id=mock_data['resource_id'])
mock_data['resource_view_title'] = mock_data['resource_view']['title']
mock_data['context'] = {
'user': factories._get_action_user_name(kwargs)
}
return mock_data
|
''' Helper methods for tests '''
import string
import random
from ckan.tests import factories
def create_mock_data(**kwargs):
mock_data = {}
mock_data['organization'] = factories.Organization()
mock_data['organization_name'] = mock_data['organization']['name']
mock_data['organization_id'] = mock_data['organization']['id']
mock_data['dataset'] = factories.Dataset(owner_org=mock_data['organization_id'])
mock_data['dataset_name'] = mock_data['dataset']['name']
mock_data['package_id'] = mock_data['dataset']['id']
mock_data['resource'] = factories.Resource(package_id=mock_data['package_id'])
mock_data['resource_name'] = mock_data['resource']['name']
mock_data['resource_id'] = mock_data['resource']['id']
mock_data['resource_view'] = factories.ResourceView(
resource_id=mock_data['resource_id'])
mock_data['resource_view_title'] = mock_data['resource_view']['title']
mock_data['context'] = {
'user': factories._get_action_user_name(kwargs)
}
return mock_data
def id_generator(size=6, chars=string.ascii_lowercase + string.digits):
''' Create random id which is a combination of letters and numbers '''
return ''.join(random.choice(chars) for _ in range(size))
|
Add function for generating random id
|
Add function for generating random id
|
Python
|
agpl-3.0
|
ViderumGlobal/ckanext-orgdashboards,ViderumGlobal/ckanext-orgdashboards,ViderumGlobal/ckanext-orgdashboards,ViderumGlobal/ckanext-orgdashboards
|
b8abb57f7a0e822ba32be2d379bb967f4abfbc21
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(
name='Zuice',
version='0.2-dev',
description='A dependency injection framework for Python',
author='Michael Williamson',
author_email='mike@zwobble.org',
url='http://gitorious.org/zuice',
packages=['zuice'],
)
|
from distutils.core import setup
setup(
name='Zuice',
version='0.2-dev',
description='A dependency injection framework for Python',
author='Michael Williamson',
author_email='mike@zwobble.org',
url='https://github.com/mwilliamson/zuice',
packages=['zuice'],
)
|
Update package URL to use GitHub
|
Update package URL to use GitHub
|
Python
|
bsd-2-clause
|
mwilliamson/zuice
|
18bd0bcc0d892aef4ea9babfc6ec2af6e40cea62
|
manager/urls.py
|
manager/urls.py
|
from django.conf.urls import url
from manager import views
urlpatterns = [
url(r'^$', views.package_list, name='package_list'),
url(r'^packages/(?P<package_name>[a-zA-Z0-9_+-]+)/$', views.package_detail, name='package_detail'),
url(r'^packages/(?P<package_name>[a-zA-Z0-9_+-]+)/build/$', views.package_build, name='package_build'),
url(r'^packages/(?P<package_name>[a-zA-Z0-9_+-]+)/(?P<build_number>\d+)/$', views.build_detail, name='build_detail')
]
|
from django.conf.urls import url
from manager import views
urlpatterns = [
url(r'^$', views.package_list, name='package_list'),
url(r'^packages/$', views.package_list, name='package_list'),
url(r'^packages/(?P<package_name>[a-zA-Z0-9_+-]+)/$', views.package_detail, name='package_detail'),
url(r'^packages/(?P<package_name>[a-zA-Z0-9_+-]+)/build/$', views.package_build, name='package_build'),
url(r'^packages/(?P<package_name>[a-zA-Z0-9_+-]+)/(?P<build_number>\d+)/$', views.build_detail, name='build_detail')
]
|
Add alternative package list url
|
Add alternative package list url
|
Python
|
mit
|
colajam93/aurpackager,colajam93/aurpackager,colajam93/aurpackager,colajam93/aurpackager
|
646b0f8babf346f3ec21ae688453deee24fb410f
|
tests/core/tests/base_formats_tests.py
|
tests/core/tests/base_formats_tests.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
from django.test import TestCase
from django.utils.text import force_text
from import_export.formats import base_formats
class XLSTest(TestCase):
def test_binary_format(self):
self.assertTrue(base_formats.XLS().is_binary())
class CSVTest(TestCase):
def setUp(self):
self.format = base_formats.CSV()
def test_import_dos(self):
filename = os.path.join(
os.path.dirname(__file__),
os.path.pardir,
'exports',
'books-dos.csv')
in_stream = open(filename, self.format.get_read_mode()).read()
expected = 'id,name,author_email\n1,Some book,test@example.com\n'
self.assertEqual(in_stream, expected)
def test_import_unicode(self):
# importing csv UnicodeEncodeError 347
filename = os.path.join(
os.path.dirname(__file__),
os.path.pardir,
'exports',
'books-unicode.csv')
in_stream = open(filename, self.format.get_read_mode())
data = force_text(in_stream.read())
base_formats.CSV().create_dataset(data)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
from django.test import TestCase
try:
from django.utils.encoding import force_text
except ImportError:
from django.utils.encoding import force_unicode as force_text
from import_export.formats import base_formats
class XLSTest(TestCase):
def test_binary_format(self):
self.assertTrue(base_formats.XLS().is_binary())
class CSVTest(TestCase):
def setUp(self):
self.format = base_formats.CSV()
def test_import_dos(self):
filename = os.path.join(
os.path.dirname(__file__),
os.path.pardir,
'exports',
'books-dos.csv')
in_stream = open(filename, self.format.get_read_mode()).read()
expected = 'id,name,author_email\n1,Some book,test@example.com\n'
self.assertEqual(in_stream, expected)
def test_import_unicode(self):
# importing csv UnicodeEncodeError 347
filename = os.path.join(
os.path.dirname(__file__),
os.path.pardir,
'exports',
'books-unicode.csv')
in_stream = open(filename, self.format.get_read_mode())
data = force_text(in_stream.read())
base_formats.CSV().create_dataset(data)
|
Fix importing force_text tests for 1.4 compatibility
|
Fix importing force_text tests for 1.4 compatibility
use 1.4 compat code
|
Python
|
bsd-2-clause
|
copperleaftech/django-import-export,PetrDlouhy/django-import-export,PetrDlouhy/django-import-export,rhunwicks/django-import-export,copperleaftech/django-import-export,Apkawa/django-import-export,jnns/django-import-export,PetrDlouhy/django-import-export,daniell/django-import-export,django-import-export/django-import-export,django-import-export/django-import-export,pajod/django-import-export,daniell/django-import-export,brillgen/django-import-export,pajod/django-import-export,bmihelac/django-import-export,manelclos/django-import-export,jnns/django-import-export,brillgen/django-import-export,jnns/django-import-export,jnns/django-import-export,copperleaftech/django-import-export,PetrDlouhy/django-import-export,bmihelac/django-import-export,pajod/django-import-export,bmihelac/django-import-export,Apkawa/django-import-export,bmihelac/django-import-export,Apkawa/django-import-export,daniell/django-import-export,daniell/django-import-export,copperleaftech/django-import-export,manelclos/django-import-export,brillgen/django-import-export,django-import-export/django-import-export,rhunwicks/django-import-export,pajod/django-import-export,django-import-export/django-import-export,manelclos/django-import-export,rhunwicks/django-import-export,brillgen/django-import-export
|
a3d655bd311161679bafbcad66f678d412e158f0
|
colour/examples/volume/examples_rgb.py
|
colour/examples/volume/examples_rgb.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Showcases RGB colourspace volume computations.
"""
from __future__ import division, unicode_literals
import colour
from colour.utilities.verbose import message_box
message_box('RGB Colourspace Volume Computations')
message_box('Computing "ProPhoto RGB" RGB colourspace limits.')
limits = colour.RGB_colourspace_limits(colour.PROPHOTO_RGB_COLOURSPACE)
print(limits)
print('\n')
samples = 10e4
message_box(('Computing "ProPhoto RGB" RGB colourspace volume using '
'{0} samples.'.format(samples)))
print(colour.RGB_colourspace_volume_MonteCarlo(
colour.PROPHOTO_RGB_COLOURSPACE,
samples=samples,
limits=limits * 1.1))
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Showcases RGB colourspace volume computations.
"""
from __future__ import division, unicode_literals
import colour
from colour.utilities.verbose import message_box
message_box('RGB Colourspace Volume Computations')
message_box('Computing "ProPhoto RGB" RGB colourspace limits.')
limits = colour.RGB_colourspace_limits(colour.PROPHOTO_RGB_COLOURSPACE)
print(limits)
print('\n')
samples = 10e4
message_box(('Computing "ProPhoto RGB" RGB colourspace volume using '
'{0} samples.'.format(samples)))
print(colour.RGB_colourspace_volume_MonteCarlo(
colour.PROPHOTO_RGB_COLOURSPACE,
samples=samples,
limits=limits * 1.1))
print('\n')
message_box(('Computing "ProPhoto RGB" RGB colourspace coverage of Pointer\'s '
'Gamut using {0} samples.'.format(samples)))
print(colour.RGB_colourspace_pointer_gamut_coverage_MonteCarlo(
colour.PROPHOTO_RGB_COLOURSPACE,
samples=samples))
|
Add "Pointer's Gamut" coverage computation example.
|
Add "Pointer's Gamut" coverage computation example.
|
Python
|
bsd-3-clause
|
colour-science/colour
|
160f29d42086a10bc38d255d8e03a30b1eb01deb
|
medical_prescription_sale/__openerp__.py
|
medical_prescription_sale/__openerp__.py
|
# -*- coding: utf-8 -*-
# © 2016 LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
'name': 'Medical Prescription Sales',
'summary': 'Create Sale Orders from Prescriptions',
'version': '9.0.0.1.0',
'author': "LasLabs, Odoo Community Association (OCA)",
'category': 'Medical',
'depends': [
'sale',
'medical_prescription',
'medical_pharmacy',
'medical_prescription_thread',
],
"website": "https://laslabs.com",
"license": "AGPL-3",
"data": [
'data/ir_sequence.xml',
'data/product_category_data.xml',
'wizards/medical_sale_wizard_view.xml',
'wizards/medical_sale_temp_view.xml',
'views/prescription_order_line_view.xml',
'views/prescription_order_view.xml',
'views/sale_order_view.xml',
'views/medical_physician_view.xml',
'views/medical_patient_view.xml',
],
'demo': [
'demo/medical_medicament_demo.xml',
'demo/medical_medication_demo.xml',
'demo/medical_prescription_order_demo.xml',
'demo/medical_prescription_order_line_demo.xml',
],
'installable': True,
'auto_install': False,
}
|
# -*- coding: utf-8 -*-
# © 2016 LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
'name': 'Medical Prescription Sales',
'summary': 'Create Sale Orders from Prescriptions',
'version': '9.0.0.1.0',
'author': "LasLabs, Odoo Community Association (OCA)",
'category': 'Medical',
'depends': [
'sale',
'stock',
'medical_prescription',
'medical_pharmacy',
'medical_prescription_thread',
],
"website": "https://laslabs.com",
"license": "AGPL-3",
"data": [
'data/ir_sequence.xml',
'data/product_category_data.xml',
'wizards/medical_sale_wizard_view.xml',
'wizards/medical_sale_temp_view.xml',
'views/prescription_order_line_view.xml',
'views/prescription_order_view.xml',
'views/sale_order_view.xml',
'views/medical_physician_view.xml',
'views/medical_patient_view.xml',
],
'demo': [
'demo/medical_medicament_demo.xml',
'demo/medical_medication_demo.xml',
'demo/medical_prescription_order_demo.xml',
'demo/medical_prescription_order_line_demo.xml',
],
'installable': True,
'auto_install': False,
}
|
Add dependency * Add dependency on stock to manifest file. This is needed by some of the demo data in the module, which was not installing due to its absence.
|
[FIX] medical_prescription_sale: Add dependency
* Add dependency on stock to manifest file. This is needed by some of the demo
data in the module, which was not installing due to its absence.
|
Python
|
agpl-3.0
|
laslabs/vertical-medical,laslabs/vertical-medical
|
81b77db1a455a976a5c516decb5fdd141f10bc31
|
Lib/test/test_fork1.py
|
Lib/test/test_fork1.py
|
"""This test checks for correct fork() behavior.
"""
import os
import time
import unittest
from test.fork_wait import ForkWait
from test.test_support import run_unittest, reap_children
try:
os.fork
except AttributeError:
raise unittest.SkipTest, "os.fork not defined -- skipping test_fork1"
class ForkTest(ForkWait):
def wait_impl(self, cpid):
for i in range(10):
# waitpid() shouldn't hang, but some of the buildbots seem to hang
# in the forking tests. This is an attempt to fix the problem.
spid, status = os.waitpid(cpid, os.WNOHANG)
if spid == cpid:
break
time.sleep(1.0)
self.assertEqual(spid, cpid)
self.assertEqual(status, 0, "cause = %d, exit = %d" % (status&0xff, status>>8))
def test_main():
run_unittest(ForkTest)
reap_children()
if __name__ == "__main__":
test_main()
|
"""This test checks for correct fork() behavior.
"""
import os
import time
from test.fork_wait import ForkWait
from test.test_support import run_unittest, reap_children, import_module
import_module('os.fork')
class ForkTest(ForkWait):
def wait_impl(self, cpid):
for i in range(10):
# waitpid() shouldn't hang, but some of the buildbots seem to hang
# in the forking tests. This is an attempt to fix the problem.
spid, status = os.waitpid(cpid, os.WNOHANG)
if spid == cpid:
break
time.sleep(1.0)
self.assertEqual(spid, cpid)
self.assertEqual(status, 0, "cause = %d, exit = %d" % (status&0xff, status>>8))
def test_main():
run_unittest(ForkTest)
reap_children()
if __name__ == "__main__":
test_main()
|
Convert import try/except to use test_support.import_module().
|
Convert import try/except to use test_support.import_module().
|
Python
|
mit
|
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
|
9ea9d111c8b6a20015f9ad6149f690c9e8c0774d
|
tools/tiny-test-fw/Utility/__init__.py
|
tools/tiny-test-fw/Utility/__init__.py
|
from __future__ import print_function
import sys
_COLOR_CODES = {
"white": '\033[0m',
"red": '\033[31m',
"green": '\033[32m',
"orange": '\033[33m',
"blue": '\033[34m',
"purple": '\033[35m',
"W": '\033[0m',
"R": '\033[31m',
"G": '\033[32m',
"O": '\033[33m',
"B": '\033[34m',
"P": '\033[35m'
}
def console_log(data, color="white", end="\n"):
"""
log data to console.
(if not flush console log, Gitlab-CI won't update logs during job execution)
:param data: data content
:param color: color
"""
if color not in _COLOR_CODES:
color = "white"
color_codes = _COLOR_CODES[color]
print(color_codes + data, end=end)
if color not in ["white", "W"]:
# reset color to white for later logs
print(_COLOR_CODES["white"] + "\r")
sys.stdout.flush()
|
from __future__ import print_function
import sys
_COLOR_CODES = {
"white": u'\033[0m',
"red": u'\033[31m',
"green": u'\033[32m',
"orange": u'\033[33m',
"blue": u'\033[34m',
"purple": u'\033[35m',
"W": u'\033[0m',
"R": u'\033[31m',
"G": u'\033[32m',
"O": u'\033[33m',
"B": u'\033[34m',
"P": u'\033[35m'
}
def console_log(data, color="white", end="\n"):
"""
log data to console.
(if not flush console log, Gitlab-CI won't update logs during job execution)
:param data: data content
:param color: color
"""
if color not in _COLOR_CODES:
color = "white"
color_codes = _COLOR_CODES[color]
if type(data) is type(b''):
data = data.decode('utf-8', 'replace')
print(color_codes + data, end=end)
if color not in ["white", "W"]:
# reset color to white for later logs
print(_COLOR_CODES["white"] + u"\r")
sys.stdout.flush()
|
Make Utility.console_log accept Unicode and byte strings as well
|
tools: Make Utility.console_log accept Unicode and byte strings as well
|
Python
|
apache-2.0
|
mashaoze/esp-idf,espressif/esp-idf,armada-ai/esp-idf,www220/esp-idf,www220/esp-idf,mashaoze/esp-idf,www220/esp-idf,www220/esp-idf,mashaoze/esp-idf,espressif/esp-idf,www220/esp-idf,espressif/esp-idf,espressif/esp-idf,armada-ai/esp-idf,mashaoze/esp-idf,armada-ai/esp-idf,armada-ai/esp-idf,mashaoze/esp-idf
|
7baaac652f74ea44817cd48eb1a4b3aa36f94e23
|
armstrong/hatband/sites.py
|
armstrong/hatband/sites.py
|
from django.contrib.admin.sites import AdminSite as DjangoAdminSite
from django.contrib.admin.sites import site as django_site
class AdminSite(DjangoAdminSite):
def get_urls(self):
from django.conf.urls.defaults import patterns, url
return patterns('',
# Custom hatband Views here
) + super(AdminSite, self).get_urls()
site = AdminSite()
site._registry = django_site._registry
|
from django.contrib.admin.sites import AdminSite as DjangoAdminSite
from django.contrib.admin.sites import site as django_site
class HatbandAndDjangoRegistry(object):
def __init__(self, site, default_site=None):
if default_site is None:
default_site = django_site
super(HatbandAndDjangoRegistry, self).__init__()
self._site = site
self._registry = {}
self.dicts = [self._registry, default_site._registry]
def items(self):
for d in self.dicts:
for item in d.items():
yield item
def iteritems(self):
return iter(self.items())
def __contains__(self, k):
for d in self.dicts:
if k in d:
return True
return False
class AdminSite(DjangoAdminSite):
def __init__(self, default_site=None, *args, **kwargs):
if default_site is None:
default_site = django_site
super(AdminSite, self).__init__(*args, **kwargs)
self._registry = HatbandAndDjangoRegistry(self,
default_site=default_site)
def get_urls(self):
from django.conf.urls.defaults import patterns, url
return patterns('',
# Custom hatband Views here
) + super(AdminSite, self).get_urls()
site = AdminSite()
|
Revert "Simplify this code and make sure AdminSite doesn't act like a singleton"
|
Revert "Simplify this code and make sure AdminSite doesn't act like a singleton"
Unfortunately, it's not that simple. Without the runtime merging from
inside hatband.AdminSite, this doesn't seem to pick up everything else.
This reverts commit 122b4e6982fe7a74ee668c1b146c32a61c72ec7b.
|
Python
|
apache-2.0
|
texastribune/armstrong.hatband,armstrong/armstrong.hatband,armstrong/armstrong.hatband,armstrong/armstrong.hatband,texastribune/armstrong.hatband,texastribune/armstrong.hatband
|
812d456599e1540e329a4ddc05a7541b5bfdc149
|
labonneboite/conf/__init__.py
|
labonneboite/conf/__init__.py
|
import imp
import os
from labonneboite.conf.common import settings_common
# Settings
# --------
# Default settings of the application are defined in `labonneboite/conf/common/settings_common.py`.
# A specific environment (staging, production...) can define its custom settings by:
# - creating a specific `settings` file, e.g. `lbb_staging_settings.py`
# - defining an environment variable containing the path to this specific `settings` file
#
# Specific and default settings will be merged, and values found in specific settings will take precedence.
# When no specific settings are found, `labonneboite/conf/local_settings.py` is used.
# Dynamically import LBB_SETTINGS environment variable as the `settings`
# module, or import `local_settings.py` as the `settings` module if it does not
# exist.
settings = settings_common
if settings_common.get_current_env() != settings_common.ENV_TEST:
# Don't override settings in tests
settings_module = os.path.join(os.path.dirname(__file__), 'local_settings.py')
settings_module = os.environ.get('LBB_SETTINGS', settings_module)
settings = imp.load_source('settings', settings_module)
# Iterate over each setting defined in the `settings_common` module and add them to the dynamically
# imported `settings` module if they don't already exist.
for setting in dir(settings_common):
if not hasattr(settings, setting):
setattr(settings, setting, getattr(settings_common, setting))
|
import imp
import os
from labonneboite.conf.common import settings_common
# Settings
# --------
# Default settings of the application are defined in `labonneboite/conf/common/settings_common.py`.
# A specific environment (staging, production...) can define its custom settings by:
# - creating a specific `settings` file, e.g. `lbb_staging_settings.py`
# - defining an environment variable containing the path to this specific `settings` file
#
# Specific and default settings will be merged, and values found in specific settings will take precedence.
# When no specific settings are found, `labonneboite/conf/local_settings.py` is used.
# Dynamically import LBB_SETTINGS environment variable as the `settings`
# module, or import `local_settings.py` as the `settings` module if it does not
# exist.
settings = settings_common
if settings_common.get_current_env() != settings_common.ENV_TEST:
# Don't override settings in tests
settings_module = os.path.join(os.path.dirname(__file__), 'local_settings.py')
settings_module = os.environ.get('LBB_SETTINGS', settings_module)
try:
settings = imp.load_source('settings', settings_module)
except FileNotFoundError:
pass
else:
# Iterate over each setting defined in the `settings_common` module and add them to the dynamically
# imported `settings` module if they don't already exist.
for setting in dir(settings_common):
if not hasattr(settings, setting):
setattr(settings, setting, getattr(settings_common, setting))
|
Fix FileNotFoundError on missing local_settings.py
|
Fix FileNotFoundError on missing local_settings.py
This has been broken for a long time... When running LBB without a
local_settings.py and without an LBB_ENV environment variable, importing
local_settings.py was resulting in a FileNotFoundError.
|
Python
|
agpl-3.0
|
StartupsPoleEmploi/labonneboite,StartupsPoleEmploi/labonneboite,StartupsPoleEmploi/labonneboite,StartupsPoleEmploi/labonneboite
|
1f4ff058d14a32e7e7b9a28daee54a2e8ea1eb02
|
media.py
|
media.py
|
# media.py
class Movie(object):
def __init__(self,
title,
storyline,
poster_image_url,
trailer_youtube_url,
lead_actors,
release_date,
mpaa_rating,
language,
runtime,
production_companies,
trivia
):
self.title = title
self.storyline = storyline
self.poster_image_url = poster_image_url
self.trailer_youtube_url = trailer_youtube_url
self.lead_actors = lead_actors
self.release_date = release_date
self.mpaa_rating = mpaa_rating
self.language = language
self.runtime = runtime
self.production_companies = production_companies
self.trivia = trivia
|
# media.py
class Movie(object):
""" Movie class for creating a movie """
def __init__(self,
title,
storyline,
poster_image_url,
trailer_youtube_url,
lead_actors,
release_date,
mpaa_rating,
language,
runtime,
production_companies,
trivia
):
"""
Args:
title (str): Title of the movie.
storyline (str): Brief storyline of the movie.
poster_image_url (str): URL of the movie's poster image.
trailer_youtube_url (str): Youtube URL of the movie's trailer.
lead_actors (List[str]): A list of the main cast.
release_date (str): Release date of the movie.
mpaa_rating (str): MPAA rating of the movie.
language (str): Language of the movie.
runtime (str): Runtime of the movie.
production_companies (List[str]): A list of the production companies.
trivia (List[str]): A list of some movie trivia.
"""
self.title = title
self.storyline = storyline
self.poster_image_url = poster_image_url
self.trailer_youtube_url = trailer_youtube_url
self.lead_actors = lead_actors
self.release_date = release_date
self.mpaa_rating = mpaa_rating
self.language = language
self.runtime = runtime
self.production_companies = production_companies
self.trivia = trivia
|
Add docstring for class Movie
|
Add docstring for class Movie
|
Python
|
mit
|
vishallama/udacity-fullstack-movie-trailer,vishallama/udacity-fullstack-movie-trailer
|
4dcb0e56627d3b801b5377d77fca721c43090ce2
|
bom_data_parser/axf_parser.py
|
bom_data_parser/axf_parser.py
|
import csv
def read_axf(axf_string):
blocks = {}
state = 'new_block'
for line in axf_string.split('\n'):
if line == '[$]' or line == '':
pass
elif line.startswith('['):
block_key = line.replace('[',"").replace(']',"")
print block_key
else:
if block_key not in blocks:
blocks[block_key] = []
blocks[block_key].append(line)
for k in blocks.keys():
if k == 'data':
is_data = False
data_block = {}
for row in csv.reader(blocks[k]):
if is_data:
data_block[row[1]] = {}
for col_name, col_value in zip(header_row, row):
data_block[row[1]][col_name] = col_value
else:
header_row = row
is_data = True
else:
# probably notice or header ... do something awesome with them
pass
return data_block
def read_axf_file(axf_file):
with open(axf_file, 'r') as f:
return read_axf(f.read())
if __name__ == "__main__":
print read_axf_file('../tests/data/IDV60700.axf')
|
import csv
def read_axf(axf_string):
blocks = {}
state = 'new_block'
for line in axf_string.split('\n'):
if line == '[$]' or line == '':
pass
elif line.startswith('['):
block_key = line.replace('[',"").replace(']',"")
else:
if block_key not in blocks:
blocks[block_key] = []
blocks[block_key].append(line)
for k in blocks.keys():
if k == 'data':
is_data = False
data_block = {}
for row in csv.reader(blocks[k]):
if is_data:
data_block[row[1]] = {}
for col_name, col_value in zip(header_row, row):
data_block[row[1]][col_name] = col_value
else:
header_row = row
is_data = True
else:
# probably notice or header ... do something awesome with them
pass
return data_block
def read_axf_file(axf_file):
with open(axf_file, 'r') as f:
return read_axf(f.read())
if __name__ == "__main__":
print(read_axf_file('../tests/data/IDV60700.axf'))
|
Fix some print statments for Python3 compatibility.
|
Fix some print statments for Python3 compatibility.
|
Python
|
bsd-3-clause
|
amacd31/bom_data_parser,amacd31/bom_data_parser
|
828d03d7a49d65e8584d4bc373ae4d429b291104
|
tests/test_tensorflow_addons.py
|
tests/test_tensorflow_addons.py
|
import unittest
import tensorflow as tf
import tensorflow_addons as tfa
class TestTensorflowAddons(unittest.TestCase):
def test_tfa_image(self):
img_raw = tf.io.read_file('/input/tests/data/dot.png')
img = tf.io.decode_image(img_raw)
img = tf.image.convert_image_dtype(img, tf.float32)
mean = tfa.image.mean_filter2d(img, filter_shape=1)
self.assertEqual(1, len(mean))
|
import unittest
import numpy as np
import tensorflow as tf
import tensorflow_addons as tfa
class TestTensorflowAddons(unittest.TestCase):
def test_tfa_image(self):
img_raw = tf.io.read_file('/input/tests/data/dot.png')
img = tf.io.decode_image(img_raw)
img = tf.image.convert_image_dtype(img, tf.float32)
mean = tfa.image.mean_filter2d(img, filter_shape=1)
self.assertEqual(1, len(mean))
# This test exercises TFA Custom Op. See: b/145555176
def test_gelu(self):
x = tf.constant([[0.5, 1.2, -0.3]])
layer = tfa.layers.GELU()
result = layer(x)
self.assertEqual((1, 3), result.shape)
|
Add a test exercising TFA custom op.
|
Add a test exercising TFA custom op.
To prevent future regression.
BUG=145555176
|
Python
|
apache-2.0
|
Kaggle/docker-python,Kaggle/docker-python
|
c9ca274a1a5e9596de553d2ae16950a845359321
|
examples/plotting/file/geojson_points.py
|
examples/plotting/file/geojson_points.py
|
from bokeh.io import output_file, show
from bokeh.models import GeoJSONDataSource
from bokeh.plotting import figure
from bokeh.sampledata.sample_geojson import geojson
output_file("geojson_points.html", title="GeoJSON Points")
p = figure()
p.circle(line_color=None, fill_alpha=0.8, source=GeoJSONDataSource(geojson=geojson))
show(p)
|
from bokeh.io import output_file, show
from bokeh.models import GeoJSONDataSource, HoverTool
from bokeh.plotting import figure
from bokeh.sampledata.sample_geojson import geojson
output_file("geojson_points.html", title="GeoJSON Points")
p = figure()
p.circle(line_color=None, fill_alpha=0.8, size=20, source=GeoJSONDataSource(geojson=geojson))
p.add_tools(HoverTool(tooltips=[("Organisation Name", "@OrganisationName")]))
show(p)
|
Add HoverTool to show off properties availability
|
Add HoverTool to show off properties availability
|
Python
|
bsd-3-clause
|
draperjames/bokeh,bokeh/bokeh,timsnyder/bokeh,dennisobrien/bokeh,maxalbert/bokeh,phobson/bokeh,jakirkham/bokeh,stonebig/bokeh,jakirkham/bokeh,bokeh/bokeh,DuCorey/bokeh,justacec/bokeh,dennisobrien/bokeh,clairetang6/bokeh,azjps/bokeh,timsnyder/bokeh,aiguofer/bokeh,timsnyder/bokeh,ericmjl/bokeh,DuCorey/bokeh,rs2/bokeh,bokeh/bokeh,jakirkham/bokeh,justacec/bokeh,Karel-van-de-Plassche/bokeh,azjps/bokeh,mindriot101/bokeh,philippjfr/bokeh,aiguofer/bokeh,timsnyder/bokeh,draperjames/bokeh,clairetang6/bokeh,percyfal/bokeh,stonebig/bokeh,philippjfr/bokeh,DuCorey/bokeh,schoolie/bokeh,ptitjano/bokeh,mindriot101/bokeh,schoolie/bokeh,jakirkham/bokeh,aiguofer/bokeh,stonebig/bokeh,percyfal/bokeh,phobson/bokeh,phobson/bokeh,dennisobrien/bokeh,azjps/bokeh,quasiben/bokeh,clairetang6/bokeh,draperjames/bokeh,ericmjl/bokeh,aavanian/bokeh,msarahan/bokeh,aiguofer/bokeh,ericmjl/bokeh,philippjfr/bokeh,htygithub/bokeh,dennisobrien/bokeh,ericmjl/bokeh,rs2/bokeh,percyfal/bokeh,quasiben/bokeh,ptitjano/bokeh,KasperPRasmussen/bokeh,timsnyder/bokeh,dennisobrien/bokeh,maxalbert/bokeh,aavanian/bokeh,msarahan/bokeh,percyfal/bokeh,htygithub/bokeh,rs2/bokeh,DuCorey/bokeh,msarahan/bokeh,ptitjano/bokeh,philippjfr/bokeh,clairetang6/bokeh,draperjames/bokeh,maxalbert/bokeh,azjps/bokeh,aiguofer/bokeh,Karel-van-de-Plassche/bokeh,ptitjano/bokeh,mindriot101/bokeh,KasperPRasmussen/bokeh,jakirkham/bokeh,rs2/bokeh,bokeh/bokeh,schoolie/bokeh,phobson/bokeh,KasperPRasmussen/bokeh,draperjames/bokeh,aavanian/bokeh,phobson/bokeh,Karel-van-de-Plassche/bokeh,htygithub/bokeh,philippjfr/bokeh,justacec/bokeh,rs2/bokeh,Karel-van-de-Plassche/bokeh,quasiben/bokeh,aavanian/bokeh,DuCorey/bokeh,stonebig/bokeh,ericmjl/bokeh,schoolie/bokeh,msarahan/bokeh,justacec/bokeh,Karel-van-de-Plassche/bokeh,KasperPRasmussen/bokeh,KasperPRasmussen/bokeh,htygithub/bokeh,mindriot101/bokeh,azjps/bokeh,bokeh/bokeh,maxalbert/bokeh,percyfal/bokeh,schoolie/bokeh,aavanian/bokeh,ptitjano/bokeh
|
a0fc801130fa9068c5acc0a48d389f469cdb4bb2
|
tasks.py
|
tasks.py
|
"""
Automation tasks, aided by the Invoke package.
"""
import os
import webbrowser
from invoke import task, run
DOCS_DIR = 'docs'
DOCS_OUTPUT_DIR = os.path.join(DOCS_DIR, '_build')
@task
def docs(show=True):
"""Build the docs and show them in default web browser."""
run('sphinx-build docs docs/_build')
if show:
webbrowser.open_new_tab(os.path.join(DOCS_OUTPUT_DIR, 'index.html'))
|
"""
Automation tasks, aided by the Invoke package.
"""
import os
import webbrowser
from invoke import task, run
DOCS_DIR = 'docs'
DOCS_OUTPUT_DIR = os.path.join(DOCS_DIR, '_build')
@task
def docs(output='html', rebuild=False, show=True):
"""Build the docs and show them in default web browser."""
build_cmd = 'sphinx-build -b {output} {all} docs docs/_build'.format(
output=output,
all='-a -E' if rebuild else '')
run(build_cmd)
if show:
webbrowser.open_new_tab(os.path.join(DOCS_OUTPUT_DIR, 'index.html'))
|
Add more options to the `docs` task
|
Add more options to the `docs` task
|
Python
|
bsd-3-clause
|
Xion/callee
|
704439e7ae99d215948c94a5dfa61ee1f3f57971
|
fireplace/cards/tgt/neutral_legendary.py
|
fireplace/cards/tgt/neutral_legendary.py
|
from ..utils import *
##
# Minions
# Confessor Paletress
class AT_018:
inspire = Summon(CONTROLLER, RandomMinion(rarity=Rarity.LEGENDARY))
# Skycap'n Kragg
class AT_070:
cost = lambda self, i: i - len(self.controller.field.filter(race=Race.PIRATE))
|
from ..utils import *
##
# Minions
# Confessor Paletress
class AT_018:
inspire = Summon(CONTROLLER, RandomMinion(rarity=Rarity.LEGENDARY))
# Skycap'n Kragg
class AT_070:
cost = lambda self, i: i - len(self.controller.field.filter(race=Race.PIRATE))
# Gormok the Impaler
class AT_122:
play = Hit(TARGET, 4)
# Chillmaw
class AT_123:
deathrattle = HOLDING_DRAGON & Hit(ALL_MINIONS, 3)
# Nexus-Champion Saraad
class AT_127:
inspire = Give(CONTROLLER, RandomSpell())
# The Skeleton Knight
class AT_128:
deathrattle = JOUST & Bounce(SELF)
# Fjola Lightbane
class AT_129:
events = Play(CONTROLLER, SPELL, SELF).on(SetTag(SELF, {GameTag.DIVINE_SHIELD: True}))
# Eydis Darkbane
class AT_131:
events = Play(CONTROLLER, SPELL, SELF).on(Hit(RANDOM_ENEMY_CHARACTER, 3))
|
Implement more TGT Neutral Legendaries
|
Implement more TGT Neutral Legendaries
|
Python
|
agpl-3.0
|
Ragowit/fireplace,liujimj/fireplace,oftc-ftw/fireplace,amw2104/fireplace,amw2104/fireplace,beheh/fireplace,smallnamespace/fireplace,smallnamespace/fireplace,NightKev/fireplace,liujimj/fireplace,oftc-ftw/fireplace,jleclanche/fireplace,Ragowit/fireplace,Meerkov/fireplace,Meerkov/fireplace
|
d86a4b75b1d8b4d18aadbb2bc98387b5ce78f939
|
tests/web/splinter/test_view_album.py
|
tests/web/splinter/test_view_album.py
|
from __future__ import unicode_literals
from tests import with_settings
from tests.web.splinter import TestCase
from catsnap import Client
from catsnap.table.image import Image
from catsnap.table.album import Album
from nose.tools import eq_
class TestUploadImage(TestCase):
@with_settings(bucket='humptydump')
def test_view_an_album(self):
session = Client().session()
album = Album(name="photo sesh")
session.add(album)
session.flush()
silly = Image(album_id=album.album_id, filename="silly")
serious = Image(album_id=album.album_id, filename="serious")
session.add(silly)
session.add(serious)
session.flush()
self.visit_url('/album/{0}'.format(album.album_id))
images = self.browser.find_by_tag('img')
eq_(map(lambda i: i['src'], images), [
'https://s3.amazonaws.com/humptydump/silly',
'https://s3.amazonaws.com/humptydump/serious',
])
eq_(map(lambda i: i['alt'], images), ['silly', 'serious'])
assert self.browser.is_text_present('silly')
assert self.browser.is_text_present('serious')
|
from __future__ import unicode_literals
from tests import with_settings
from tests.web.splinter import TestCase
from catsnap import Client
from catsnap.table.image import Image
from catsnap.table.album import Album
from nose.tools import eq_
class TestViewAlbum(TestCase):
@with_settings(bucket='humptydump')
def test_view_an_album(self):
session = Client().session()
album = Album(name="photo sesh")
session.add(album)
session.flush()
silly = Image(album_id=album.album_id, filename="silly")
serious = Image(album_id=album.album_id, filename="serious")
session.add(silly)
session.add(serious)
session.flush()
self.visit_url('/album/{0}'.format(album.album_id))
images = self.browser.find_by_tag('img')
eq_(map(lambda i: i['src'], images), [
'https://s3.amazonaws.com/humptydump/silly',
'https://s3.amazonaws.com/humptydump/serious',
])
eq_(map(lambda i: i['alt'], images), ['silly', 'serious'])
assert self.browser.is_text_present('silly')
assert self.browser.is_text_present('serious')
|
Fix a test class name
|
Fix a test class name
|
Python
|
mit
|
ErinCall/catsnap,ErinCall/catsnap,ErinCall/catsnap
|
eac78bcb95e2c34a5c2de75db785dd6532306819
|
ibei/main.py
|
ibei/main.py
|
# -*- coding: utf-8 -*-
import numpy as np
from astropy import constants
from astropy import units
from sympy.mpmath import polylog
def uibei(order, energy_lo, temp, chem_potential):
"""
Upper incomplete Bose-Einstein integral.
"""
kT = temp * constants.k_B
reduced_energy_lo = energy_lo / kT
reduced_chem_potential = chem_potential / kT
prefactor = (2 * np.pi * np.math.factorial(order) * kT**(order + 1)) / \
(constants.h**3 * constants.c**2)
summand = 0
for indx in range(1, order + 2):
expt = (reduced_chem_potential - reduced_energy_lo).decompose()
term = reduced_energy_lo**(order - indx + 1) * polylog(indx, np.exp(expt)) / np.math.factorial(order - indx + 1)
summand += term
return summand
def bb_rad_power(temp):
"""
Blackbody radiant power (Stefan-Boltzmann).
"""
return constants.sigma_sb * temp**4
|
# -*- coding: utf-8 -*-
import numpy as np
from astropy import constants
from astropy import units
from sympy.mpmath import polylog
def uibei(order, energy_lo, temp, chem_potential):
"""
Upper incomplete Bose-Einstein integral.
"""
kT = temp * constants.k_B
reduced_energy_lo = energy_lo / kT
reduced_chem_potential = chem_potential / kT
prefactor = (2 * np.pi * np.math.factorial(order) * kT**(order + 1)) / \
(constants.h**3 * constants.c**2)
summand = 0
for indx in range(1, order + 2):
expt = (reduced_chem_potential - reduced_energy_lo).decompose()
term = reduced_energy_lo**(order - indx + 1) * polylog(indx, np.exp(expt)) / np.math.factorial(order - indx + 1)
summand += term
return summand
def bb_rad_power(temp):
"""
Blackbody radiant power (Stefan-Boltzmann).
"""
return constants.sigma_sb * temp**4
def devos_power(bandgap, temp_sun, temp_planet, voltage):
"""
Power calculated according to DeVos Eq. 6.4.
"""
sun = uibei(2, bandgap, temp_sun, 0)
solar_cell = uibei(2, bandgap, temp_sun, constants.q * voltage)
return voltage * constants.e * (sun - solar_cell)
|
Add draft of DeVos solar cell power function
|
Add draft of DeVos solar cell power function
|
Python
|
mit
|
jrsmith3/tec,jrsmith3/ibei,jrsmith3/tec
|
1650c9d9620ba9b9262598d3a47208c6c8180768
|
app/views.py
|
app/views.py
|
from flask import render_template, jsonify
from app import app
from models import Show, Episode
@app.route('/')
@app.route('/index')
def index():
return render_template('index.html')
@app.route("/api/new-episodes/")
def new_episodes():
data = {
"items": []
}
for episode in Episode.query.filter_by(showcase=True).all():
d = episode.to_api_dict()
d['show'] = episode.getShow()
d['show_slug'] = episode.getShowSlug()
d['image'] = episode.getImage()
data["items"].append(d)
return jsonify(data)
@app.route("/api/shows/")
def shows():
shows_dict = {
"shows": []
}
for show in Show.query.order_by('name').all():
shows_dict["shows"].append(show.to_api_dict())
return jsonify(shows_dict)
@app.route("/api/shows/<slug>")
def episodes(slug):
show = Show.query.filter_by(slug=slug).first()
show_dict = show.to_api_dict()
show_dict['episodes'] = []
for episode in show.get_episodes():
show_dict['episodes'].append(episode.to_api_dict())
return jsonify(show_dict)
|
from flask import render_template, jsonify
from app import app
from models import Show, Episode
@app.route('/')
@app.route('/index')
def index():
return render_template('index.html')
@app.route("/api/new-episodes/")
def new_episodes():
# Return all episodes with showcase set to True.
data = {
"items": []
}
for episode in Episode.query.filter_by(showcase=True).all():
d = episode.to_api_dict()
d['show'] = episode.getShow()
d['show_slug'] = episode.getShowSlug()
d['image'] = episode.getImage()
data["items"].append(d)
return jsonify(data)
@app.route("/api/shows/")
def shows():
shows_dict = {
"shows": []
}
for show in Show.query.order_by('name').all():
if show.published:
shows_dict["shows"].append(show.to_api_dict())
return jsonify(shows_dict)
@app.route("/api/shows/<slug>")
def episodes(slug):
show = Show.query.filter_by(slug=slug).first()
show_dict = show.to_api_dict()
show_dict['episodes'] = []
for episode in show.get_episodes():
if show.published:
show_dict['episodes'].append(episode.to_api_dict())
return jsonify(show_dict)
|
Make shows and episodes only appear if published is true.
|
Make shows and episodes only appear if published is true.
|
Python
|
mit
|
frequencyasia/website,frequencyasia/website
|
f32685ef4ad847bd237845da6b5b8c44dac0ea9b
|
tests/skipif_markers.py
|
tests/skipif_markers.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
skipif_markers
--------------
Contains pytest skipif markers to be used in the suite.
"""
import pytest
import os
try:
travis = os.environ[u'TRAVIS']
except KeyError:
travis = False
try:
no_network = os.environ[u'DISABLE_NETWORK_TESTS']
except KeyError:
no_network = False
# For some reason pytest incorrectly uses the first reason text regardless of
# which condition matches. Using a unified message for now
# travis_reason = 'Works locally with tox but fails on Travis.'
# no_network_reason = 'Needs a network connection to GitHub.'
reason = 'Fails on Travis or else there is no network connection to GitHub'
skipif_travis = pytest.mark.skipif(travis, reason=reason)
skipif_no_network = pytest.mark.skipif(no_network, reason=reason)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
skipif_markers
--------------
Contains pytest skipif markers to be used in the suite.
"""
import pytest
import os
try:
os.environ[u'TRAVIS']
except KeyError:
travis = False
else:
travis = True
try:
os.environ[u'DISABLE_NETWORK_TESTS']
except KeyError:
no_network = False
else:
no_network = True
# For some reason pytest incorrectly uses the first reason text regardless of
# which condition matches. Using a unified message for now
# travis_reason = 'Works locally with tox but fails on Travis.'
# no_network_reason = 'Needs a network connection to GitHub.'
reason = 'Fails on Travis or else there is no network connection to GitHub'
skipif_travis = pytest.mark.skipif(travis, reason=reason)
skipif_no_network = pytest.mark.skipif(no_network, reason=reason)
|
Fix travis, make sure skipif condition resolves to a bool
|
Fix travis, make sure skipif condition resolves to a bool
|
Python
|
bsd-3-clause
|
audreyr/cookiecutter,dajose/cookiecutter,kkujawinski/cookiecutter,hackebrot/cookiecutter,venumech/cookiecutter,vintasoftware/cookiecutter,pjbull/cookiecutter,lucius-feng/cookiecutter,ramiroluz/cookiecutter,venumech/cookiecutter,0k/cookiecutter,christabor/cookiecutter,hackebrot/cookiecutter,Vauxoo/cookiecutter,Springerle/cookiecutter,moi65/cookiecutter,pjbull/cookiecutter,agconti/cookiecutter,audreyr/cookiecutter,cguardia/cookiecutter,letolab/cookiecutter,terryjbates/cookiecutter,luzfcb/cookiecutter,vincentbernat/cookiecutter,takeflight/cookiecutter,letolab/cookiecutter,Vauxoo/cookiecutter,nhomar/cookiecutter,janusnic/cookiecutter,benthomasson/cookiecutter,stevepiercy/cookiecutter,vintasoftware/cookiecutter,foodszhang/cookiecutter,Springerle/cookiecutter,benthomasson/cookiecutter,dajose/cookiecutter,atlassian/cookiecutter,0k/cookiecutter,vincentbernat/cookiecutter,takeflight/cookiecutter,janusnic/cookiecutter,terryjbates/cookiecutter,nhomar/cookiecutter,agconti/cookiecutter,jhermann/cookiecutter,willingc/cookiecutter,luzfcb/cookiecutter,tylerdave/cookiecutter,lucius-feng/cookiecutter,ionelmc/cookiecutter,willingc/cookiecutter,moi65/cookiecutter,michaeljoseph/cookiecutter,atlassian/cookiecutter,jhermann/cookiecutter,lgp171188/cookiecutter,sp1rs/cookiecutter,stevepiercy/cookiecutter,drgarcia1986/cookiecutter,foodszhang/cookiecutter,tylerdave/cookiecutter,michaeljoseph/cookiecutter,ramiroluz/cookiecutter,ionelmc/cookiecutter,christabor/cookiecutter,cguardia/cookiecutter,cichm/cookiecutter,cichm/cookiecutter,kkujawinski/cookiecutter,lgp171188/cookiecutter,sp1rs/cookiecutter,drgarcia1986/cookiecutter
|
114a6eb827c0e3dd4557aee8f76fde1bbd111bb9
|
archalice.py
|
archalice.py
|
#!/usr/bin/env python
import os
import re
import time
import sys
from threading import Thread
class testit(Thread):
def __init__ (self,ip):
Thread.__init__(self)
self.ip = ip
self.status = -1
self.responsetime = -1
def run(self):
pingaling = os.popen("ping -q -c2 "+self.ip,"r")
while 1:
line = pingaling.readline()
if not line: break
igot = re.findall(testit.lifeline,line)
if igot:
self.status = int(igot[0])
line = pingaling.readline()
restime = re.search(testit.response, line)
if restime:
self.responsetime = restime.group(1)
testit.lifeline = re.compile(r"(\d) received")
testit.response = re.compile(r'((\d+(\.\d*)?|\.\d+)([eE][-+]?\d+)?) ms')
report = ("No response","Partial Response","Alive")
print time.ctime()
pinglist = []
for host in range(1,10):
ip = "192.168.11."+str(host)
current = testit(ip)
pinglist.append(current)
current.start()
for pingle in pinglist:
pingle.join()
print "Status from ",pingle.ip,"is",report[pingle.status],"time:",pingle.responsetime
print time.ctime()
|
#!/usr/bin/env python
import os
import re
import time
import sys
from threading import Thread
class testit(Thread):
def __init__ (self,ip):
Thread.__init__(self)
self.ip = ip
self.status = -1
self.responsetime = -1
def run(self):
pingaling = os.popen("ping -q -c2 "+self.ip,"r")
while 1:
line = pingaling.readline()
if not line: break
igot = re.findall(testit.lifeline,line)
if igot:
self.status = int(igot[0])
line = pingaling.readline()
restime = re.search(testit.response, line)
if restime:
self.responsetime = restime.group(1)
testit.lifeline = re.compile(r"(\d) received")
testit.response = re.compile(r'((\d+(\.\d*)?|\.\d+)([eE][-+]?\d+)?) ms')
report = ("No response","Partial Response","Alive")
print time.ctime()
pinglist = []
for host in range(1,10):
ip = "192.168.11."+str(host)
current = testit(ip)
pinglist.append(current)
current.start()
for pingle in pinglist:
pingle.join()
print "Status from ",pingle.ip,"is",report[pingle.status],"time:",pingle.responsetime
print time.ctime()
|
Update indentation to 4 spaces
|
Update indentation to 4 spaces
|
Python
|
mit
|
imrehg/archalice
|
b63bda37aa2e9b5251cf6c54d59785d2856659ca
|
tests/python/unittest/test_random.py
|
tests/python/unittest/test_random.py
|
import os
import mxnet as mx
import numpy as np
def same(a, b):
return np.sum(a != b) == 0
def check_with_device(device):
with mx.Context(device):
a, b = -10, 10
mu, sigma = 10, 2
shape = (100, 100)
mx.random.seed(128)
ret1 = mx.random.normal(mu, sigma, shape)
un1 = mx.random.uniform(a, b, shape)
mx.random.seed(128)
ret2 = mx.random.normal(mu, sigma, shape)
un2 = mx.random.uniform(a, b, shape)
assert same(ret1.asnumpy(), ret2.asnumpy())
assert same(un1.asnumpy(), un2.asnumpy())
assert abs(np.mean(ret1.asnumpy()) - mu) < 0.1
assert abs(np.std(ret1.asnumpy()) - sigma) < 0.1
assert abs(np.mean(un1.asnumpy()) - (a+b)/2) < 0.1
def test_random():
check_with_device(mx.cpu())
if __name__ == '__main__':
test_random()
|
import os
import mxnet as mx
import numpy as np
def same(a, b):
return np.sum(a != b) == 0
def check_with_device(device):
with mx.Context(device):
a, b = -10, 10
mu, sigma = 10, 2
for i in range(5):
shape = (100 + i, 100 + i)
mx.random.seed(128)
ret1 = mx.random.normal(mu, sigma, shape)
un1 = mx.random.uniform(a, b, shape)
mx.random.seed(128)
ret2 = mx.random.normal(mu, sigma, shape)
un2 = mx.random.uniform(a, b, shape)
assert same(ret1.asnumpy(), ret2.asnumpy())
assert same(un1.asnumpy(), un2.asnumpy())
assert abs(np.mean(ret1.asnumpy()) - mu) < 0.1
assert abs(np.std(ret1.asnumpy()) - sigma) < 0.1
assert abs(np.mean(un1.asnumpy()) - (a+b)/2) < 0.1
def test_random():
check_with_device(mx.cpu())
if __name__ == '__main__':
test_random()
|
Update random number generator test
|
Update random number generator test
|
Python
|
apache-2.0
|
sxjscience/mxnet,sxjscience/mxnet,sxjscience/mxnet,sxjscience/mxnet,sxjscience/mxnet,sxjscience/mxnet,sxjscience/mxnet,sxjscience/mxnet,sxjscience/mxnet
|
221413b5715286bb7b61e18f8e678f2ca097a5e1
|
rover.py
|
rover.py
|
class Rover:
compass = ['N', 'E', 'S', 'W']
def __init__(self, x=0, y=0, direction='N'):
self.x = x
self.y = y
self.direction = direction
@property
def position(self):
return self.x, self.y, self.direction
|
class Rover:
compass = ['N', 'E', 'S', 'W']
def __init__(self, x=0, y=0, direction='N'):
self.x = x
self.y = y
self.direction = direction
@property
def position(self):
return self.x, self.y, self.direction
def set_position(self, x, y, direction):
self.x = x
self.y = y
self.direction = direction
|
Add set_position method to Rover
|
Add set_position method to Rover
|
Python
|
mit
|
authentik8/rover
|
64038fad35e7a1b9756921a79b6b13d59925e682
|
tests/test_endpoints.py
|
tests/test_endpoints.py
|
import unittest
from soccermetrics.rest import SoccermetricsRestClient
class ClientEndpointTest(unittest.TestCase):
"""
Test endpoints of API resources in client.
"""
def setUp(self):
self.client = SoccermetricsRestClient(account="APP_ID",api_key="APP_KEY")
def test_service_root(self):
self.assertEqual(self.client.root.endpoint, "/v1/")
|
import unittest
from soccermetrics.rest import SoccermetricsRestClient
class ClientEndpointTest(unittest.TestCase):
"""
Test endpoints of API resources in client.
"""
def setUp(self):
self.client = SoccermetricsRestClient(account="APP_ID",api_key="APP_KEY")
def test_service_root(self):
self.assertEqual(self.client.root.endpoint, "/v1/")
def test_base_endpoints(self):
self.assertEqual(self.client.validation.phases.endpoint, "/v1/phases")
self.assertEqual(self.client.validation.groupRounds.endpoint, '/v1/grouprounds')
self.assertEqual(self.client.validation.knockoutRounds.endpoint, '/v1/knockoutrounds')
self.assertEqual(self.client.validation.confederations.endpoint, '/v1/confederations')
self.assertEqual(self.client.validation.countries.endpoint, '/v1/countries')
self.assertEqual(self.client.validation.seasons.endpoint, '/v1/seasons')
self.assertEqual(self.client.validation.teams.endpoint, '/v1/teams')
self.assertEqual(self.client.validation.venues.endpoint, '/v1/venues')
self.assertEqual(self.client.validation.timezones.endpoint, '/v1/timezones')
self.assertEqual(self.client.validation.persons.endpoint, '/v1/persons')
self.assertEqual(self.client.validation.positions.endpoint, '/v1/positions')
self.assertEqual(self.client.validation.fouls.endpoint, '/v1/fouls')
self.assertEqual(self.client.validation.cards.endpoint, '/v1/cards')
self.assertEqual(self.client.validation.bodyparts.endpoint, '/v1/bodyparts')
self.assertEqual(self.client.validation.shotevents.endpoint, '/v1/shotevents')
self.assertEqual(self.client.validation.penaltyOutcomes.endpoint, '/v1/penalty_outcomes')
self.assertEqual(self.client.validation.weather.endpoint, '/v1/weather')
self.assertEqual(self.client.validation.surfaces.endpoint, '/v1/surfaces')
def test_personnel_endpoints(self):
self.assertEqual(self.client.players.endpoint, '/v1/personnel/players')
self.assertEqual(self.client.managers.endpoint, '/v1/personnel/managers')
self.assertEqual(self.client.referees.endpoint, '/v1/personnel/referees')
|
Expand test on URL endpoints in API client
|
Expand test on URL endpoints in API client
|
Python
|
mit
|
soccermetrics/soccermetrics-client-py
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.