commit
stringlengths 40
40
| old_file
stringlengths 4
236
| new_file
stringlengths 4
236
| old_contents
stringlengths 1
3.26k
| new_contents
stringlengths 16
4.43k
| subject
stringlengths 16
624
| message
stringlengths 17
3.29k
| lang
stringclasses 5
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
|
|---|---|---|---|---|---|---|---|---|---|
15f482fbb7b1b98b48545f6e5ab3986859c38e55
|
watchman/main.py
|
watchman/main.py
|
from __future__ import print_function
import sys
import os
from sh import cd, hg
def _get_subdirectories(current_dir):
return [directory for directory in os.listdir(current_dir)
if os.path.isdir(os.path.join(current_dir, directory))
and directory[0] != '.']
def check():
current_working_directory = os.getcwd()
child_dirs = _get_subdirectories(current_working_directory)
for child in child_dirs:
try:
change_dir = '%s/%s' % (current_working_directory, child)
cd(change_dir); current_branch = hg('branch')
output = '%-25s is on branch: %s' % (child, current_branch)
print(output, end=''); cd('..') # print and step back one dir
except Exception:
continue
def main():
arguments = sys.argv
if 'check' == arguments[1]:
check()
else:
print("type watchman help for, you know, help.")
if __name__ == '__main__':
main()
|
from __future__ import print_function
import sys
import os
from sh import cd, hg
def _get_subdirectories(current_dir):
return [directory for directory in os.listdir(current_dir)
if os.path.isdir(os.path.join(current_dir, directory))
and directory[0] != '.']
def check():
current_working_directory = os.getcwd()
child_dirs = _get_subdirectories(current_working_directory)
for child in child_dirs:
try:
current_branch = hg('branch', '-R', './%s' % child)
output = '%-25s is on branch: %s' % (child, current_branch)
print(output, end='')
except Exception as e:
continue
def main():
arguments = sys.argv
if 'check' == arguments[1]:
check()
else:
print("type watchman help for, you know, help.")
if __name__ == '__main__':
main()
|
Remove change dir commands and now it sends directly.
|
Remove change dir commands and now it sends directly.
|
Python
|
mit
|
alephmelo/watchman
|
88647bd762da9619c066c9bd79e48cb234247707
|
geotagging/views.py
|
geotagging/views.py
|
from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.contrib.contenttypes.models import ContentType
from geotagging.models import Point
def add_edit_point(request, content_type_id, object_id,
template=None, form_class=None):
model_class = ContentType.objects.get(id=content_type_id).model_class()
object = model_class.objects.get(id=object_id)
object_content_type = ContentType.objects.get_for_model(object)
geotag = Point.objects.get(content_type__pk=object_content_type.id,
object_id=object.id)
if request.method == "POST":
form = form_class(request.POST, instance=geotag)
if form.is_valid():
new_object = form.save(commit=False)
new_object.object = object
new_object.save()
return HttpResponseRedirect("/admin/%s/%s/%s/"
%(object_content_type.app_label,
object_content_type.model,
object.id))
form = form_class(instance=geotag)
#import ipdb; ipdb.set_trace()
context = RequestContext(request, {
'form': form,
'object' : object,
'object_content_type' : object_content_type,
'geotag' : geotag,
})
return render_to_response(template, context_instance=context )
|
from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ObjectDoesNotExist
from geotagging.models import Point
def add_edit_point(request, content_type_id, object_id,
template=None, form_class=None):
model_class = ContentType.objects.get(id=content_type_id).model_class()
object = model_class.objects.get(id=object_id)
object_content_type = ContentType.objects.get_for_model(object)
try:
geotag = Point.objects.get(content_type__pk=object_content_type.id,
object_id=object.id)
except ObjectDoesNotExist:
geotag = None
if request.method == "POST":
form = form_class(request.POST, instance=geotag)
if form.is_valid():
new_object = form.save(commit=False)
new_object.object = object
new_object.save()
return HttpResponseRedirect("/admin/%s/%s/%s/"
%(object_content_type.app_label,
object_content_type.model,
object.id))
form = form_class(instance=geotag)
#import ipdb; ipdb.set_trace()
context = RequestContext(request, {
'form': form,
'object' : object,
'object_content_type' : object_content_type,
'geotag' : geotag,
})
return render_to_response(template, context_instance=context )
|
Fix a bug when you try to add a geo tag to an object that does not have already one
|
Fix a bug when you try to add a geo tag to an object that does not have already one
|
Python
|
bsd-3-clause
|
uclastudentmedia/django-geotagging,uclastudentmedia/django-geotagging,uclastudentmedia/django-geotagging
|
ef11a6388dabd07afb3d11f7b097226e68fdf243
|
project/estimation/models.py
|
project/estimation/models.py
|
from .. import db
class Question(db.Model):
id = db.Column(db.Integer, primary_key=True)
text = db.Column(db.String(240), unique=True, index=True)
answer = db.Column(db.Numeric)
|
from .. import db
class Question(db.Model):
id = db.Column(db.Integer, primary_key=True)
text = db.Column(db.String(240), unique=True, index=True)
answer = db.Column(db.Numeric)
class Estimate(db.Model):
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.Integer, db.ForeignKey('user.id'))
question_id = db.Column(db.Integer, db.ForeignKey('question.id'))
lowerbound = db.Column(db.Numeric)
upperbound = db.Column(db.Numeric)
created_on = db.Column(db.DateTime, default=db.func.now())
|
Add model to keep track of users' estimates.
|
Add model to keep track of users' estimates.
|
Python
|
mit
|
rahimnathwani/measure-anything
|
224269d0794d1037213b429c0fcb7c5953129230
|
aldryn_config.py
|
aldryn_config.py
|
# -*- coding: utf-8 -*-
from aldryn_client import forms
class Form(forms.BaseForm):
def to_settings(self, cleaned_data, settings_dict):
settings_dict['MIDDLEWARE_CLASSES'].append(
'country_segment.middleware.ResolveCountryCodeMiddleware')
return settings_dict
|
# -*- coding: utf-8 -*-
from aldryn_client import forms
class Form(forms.BaseForm):
def to_settings(self, cleaned_data, settings_dict):
country_mw = 'country_segment.middleware.ResolveCountryCodeMiddleware'
if country_mw not in settings_dict['MIDDLEWARE_CLASSES']:
for position, mw in enumerate(settings_dict['MIDDLEWARE_CLASSES']):
#
# Its not a strict requirement that the
# ResolveCountryCodeMiddleware go after SessionMiddleware,
# but, it seems like a pretty nice place.
#
if mw == 'django.contrib.sessions.middleware.SessionMiddleware':
settings_dict['MIDDLEWARE_CLASSES'].insert(position + 1, country_mw)
break
else:
#
# B'okay, not sure how this CMS installation works, but...
# let's just put it at the top.
#
settings_dict['MIDDLEWARE_CLASSES'].insert(0, country_mw)
return settings_dict
|
Put the middleware near the top (again).
|
Put the middleware near the top (again).
|
Python
|
bsd-3-clause
|
aldryn/aldryn-country-segment
|
be7c5fc964ce3386df2bf246f12838e4ba2a2cb6
|
saleor/core/utils/filters.py
|
saleor/core/utils/filters.py
|
from __future__ import unicode_literals
def get_sort_by_choices(filter):
return [(choice[0], choice[1].lower()) for choice in
filter.filters['sort_by'].field.choices[1::2]]
def get_now_sorted_by(filter, fields):
sort_by = filter.form.cleaned_data.get('sort_by')
if sort_by:
sort_by = fields[sort_by[0].strip('-')]
else:
sort_by = fields['name']
return sort_by
|
from __future__ import unicode_literals
def get_sort_by_choices(filter):
return [(choice[0], choice[1].lower()) for choice in
filter.filters['sort_by'].field.choices[1::2]]
def get_now_sorted_by(filter, fields, default_sort='name'):
sort_by = filter.form.cleaned_data.get('sort_by')
if sort_by:
sort_by = fields[sort_by[0].strip('-')]
else:
sort_by = fields[default_sort]
return sort_by
|
Add default_sort param to get_now_sorting_by
|
Add default_sort param to get_now_sorting_by
|
Python
|
bsd-3-clause
|
UITools/saleor,UITools/saleor,UITools/saleor,maferelo/saleor,mociepka/saleor,maferelo/saleor,mociepka/saleor,mociepka/saleor,UITools/saleor,maferelo/saleor,UITools/saleor
|
2ad47f6ce00246cbf54639438d9279b8a7fa9b29
|
python/tests/t_envoy_logs.py
|
python/tests/t_envoy_logs.py
|
import pytest, re
from kat.utils import ShellCommand
from abstract_tests import AmbassadorTest, ServiceType, HTTP
access_log_entry_regex = re.compile('^ACCESS \\[.*?\\] \\\"GET \\/ambassador')
class EnvoyLogPathTest(AmbassadorTest):
target: ServiceType
log_path: str
def init(self):
self.target = HTTP()
self.log_path = '/tmp/ambassador/ambassador.log'
def config(self):
yield self, self.format("""
---
apiVersion: ambassador/v1
kind: Module
name: ambassador
ambassador_id: {self.ambassador_id}
config:
envoy_log_path: {self.log_path}
""")
def check(self):
cmd = ShellCommand("kubectl", "exec", self.path.k8s, "cat", self.log_path)
if not cmd.check("check envoy access log"):
pytest.exit("envoy access log does not exist")
for line in cmd.stdout.splitlines():
assert access_log_entry_regex.match(line)
|
import pytest, re
from kat.utils import ShellCommand
from abstract_tests import AmbassadorTest, ServiceType, HTTP
access_log_entry_regex = re.compile('^MY_REQUEST 200 .*')
class EnvoyLogTest(AmbassadorTest):
target: ServiceType
log_path: str
def init(self):
self.target = HTTP()
self.log_path = '/tmp/ambassador/ambassador.log'
self.log_format = 'MY_REQUEST %RESPONSE_CODE% \"%REQ(:AUTHORITY)%\" \"%REQ(USER-AGENT)%\" \"%REQ(X-REQUEST-ID)%\" \"%UPSTREAM_HOST%\"'
def config(self):
yield self, self.format("""
---
apiVersion: ambassador/v1
kind: Module
name: ambassador
ambassador_id: {self.ambassador_id}
config:
envoy_log_path: {self.log_path}
envoy_log_format: {self.log_format}
""")
def check(self):
cmd = ShellCommand("kubectl", "exec", self.path.k8s, "cat", self.log_path)
if not cmd.check("check envoy access log"):
pytest.exit("envoy access log does not exist")
for line in cmd.stdout.splitlines():
assert access_log_entry_regex.match(line), f"{line} does not match {access_log_entry_regex}"
|
Test for Envoy logs format
|
Test for Envoy logs format
Signed-off-by: Alvaro Saurin <5b2d0c210c4a9fd6aeaf2eaedf8273be993c90c2@datawire.io>
|
Python
|
apache-2.0
|
datawire/ambassador,datawire/ambassador,datawire/ambassador,datawire/ambassador,datawire/ambassador
|
7adfe4822bf75d1df2dc2a566b3b26c9fd494431
|
rest_framework_jwt/compat.py
|
rest_framework_jwt/compat.py
|
from distutils.version import StrictVersion
import rest_framework
from rest_framework import serializers
from django.forms import widgets
if StrictVersion(rest_framework.VERSION) < StrictVersion('3.0.0'):
class Serializer(serializers.Serializer):
pass
class PasswordField(serializers.CharField):
widget = widgets.PasswordInput
else:
class Serializer(serializers.Serializer):
@property
def object(self):
return self.validated_data
class PasswordField(serializers.CharField):
def __init__(self, *args, **kwargs):
if 'style' not in kwargs:
kwargs['style'] = {'input_type': 'password'}
else:
kwargs['style']['input_type'] = 'password'
super(PasswordField, self).__init__(*args, **kwargs)
def get_user_model():
try:
from django.contrib.auth import get_user_model
except ImportError: # Django < 1.5
from django.contrib.auth.models import User
else:
User = get_user_model()
return User
def get_username_field():
try:
username_field = get_user_model().USERNAME_FIELD
except:
username_field = 'username'
return username_field
def get_username(user):
try:
username = user.get_username()
except AttributeError:
username = user.username
return username
def get_request_data(request):
if getattr(request, 'data', None):
data = request.data
else:
# DRF < 3.2
data = request.DATA
return data
|
from distutils.version import StrictVersion
import rest_framework
from rest_framework import serializers
from django.forms import widgets
DRF_VERSION_INFO = StrictVersion(rest_framework.VERSION).version
DRF2 = DRF_VERSION_INFO[0] == 2
DRF3 = DRF_VERSION_INFO[0] == 3
if DRF2:
class Serializer(serializers.Serializer):
pass
class PasswordField(serializers.CharField):
widget = widgets.PasswordInput
else:
class Serializer(serializers.Serializer):
@property
def object(self):
return self.validated_data
class PasswordField(serializers.CharField):
def __init__(self, *args, **kwargs):
if 'style' not in kwargs:
kwargs['style'] = {'input_type': 'password'}
else:
kwargs['style']['input_type'] = 'password'
super(PasswordField, self).__init__(*args, **kwargs)
def get_user_model():
try:
from django.contrib.auth import get_user_model
except ImportError: # Django < 1.5
from django.contrib.auth.models import User
else:
User = get_user_model()
return User
def get_username_field():
try:
username_field = get_user_model().USERNAME_FIELD
except:
username_field = 'username'
return username_field
def get_username(user):
try:
username = user.get_username()
except AttributeError:
username = user.username
return username
def get_request_data(request):
if DRF2:
data = request.DATA
else:
data = request.data
return data
|
Use request.data in DRF >= 3
|
Use request.data in DRF >= 3
|
Python
|
mit
|
orf/django-rest-framework-jwt,shanemgrey/django-rest-framework-jwt,GetBlimp/django-rest-framework-jwt,blaklites/django-rest-framework-jwt,plentific/django-rest-framework-jwt,ArabellaTech/django-rest-framework-jwt
|
b8139440a2509d5b197889664f9ec34be9296210
|
form_designer/contrib/cms_plugins/form_designer_form/cms_plugins.py
|
form_designer/contrib/cms_plugins/form_designer_form/cms_plugins.py
|
from form_designer.contrib.cms_plugins.form_designer_form.models import CMSFormDefinition
from form_designer.views import process_form
from form_designer import settings
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from django.utils.translation import ugettext as _
class FormDesignerPlugin(CMSPluginBase):
model = CMSFormDefinition
module = _('Form Designer')
name = _('Form')
admin_preview = False
render_template = False
def render(self, context, instance, placeholder):
if instance.form_definition.form_template_name:
self.render_template = instance.form_definition.form_template_name
else:
self.render_template = settings.DEFAULT_FORM_TEMPLATE
# Redirection does not work with CMS plugin, hence disable:
return process_form(context['request'], instance.form_definition, context, disable_redirection=True)
plugin_pool.register_plugin(FormDesignerPlugin)
|
from form_designer.contrib.cms_plugins.form_designer_form.models import CMSFormDefinition
from form_designer.views import process_form
from form_designer import settings
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from django.utils.translation import ugettext as _
class FormDesignerPlugin(CMSPluginBase):
model = CMSFormDefinition
module = _('Form Designer')
name = _('Form')
admin_preview = False
render_template = False
cache = False
def render(self, context, instance, placeholder):
if instance.form_definition.form_template_name:
self.render_template = instance.form_definition.form_template_name
else:
self.render_template = settings.DEFAULT_FORM_TEMPLATE
# Redirection does not work with CMS plugin, hence disable:
return process_form(context['request'], instance.form_definition, context, disable_redirection=True)
plugin_pool.register_plugin(FormDesignerPlugin)
|
Disable caching so CSRF tokens are not cached.
|
Disable caching so CSRF tokens are not cached.
|
Python
|
bsd-3-clause
|
USGM/django-form-designer,USGM/django-form-designer
|
21f209b618850d15734c476bd3c1b359b9a7426e
|
infosystem/queue.py
|
infosystem/queue.py
|
import flask
from pika import BlockingConnection, PlainCredentials, ConnectionParameters
class RabbitMQ:
def __init__(self):
self.url = flask.current_app.config['ORMENU_QUEUE_URL']
self.port = flask.current_app.config['ORMENU_QUEUE_PORT']
self.virtual_host = \
flask.current_app.config['ORMENU_QUEUE_VIRTUAL_HOST']
self.username = flask.current_app.config['ORMENU_QUEUE_USERNAME']
self.password = flask.current_app.config['ORMENU_QUEUE_PASSWORD']
credentials = PlainCredentials(self.username, self.password)
self.params = ConnectionParameters(
self.url, self.port, self.virtual_host, credentials)
def connect(self):
try:
return BlockingConnection(self.params)
except Exception as e:
raise
class ProducerQueue:
def __init__(self, exchange, exchange_type):
rabbitMQ = RabbitMQ()
self.connection = rabbitMQ.connect()
self.exchange = exchange
self.channel = self.connection.channel()
self.channel.exchange_declare(
exchange=exchange, exchange_type=exchange_type, durable=True)
def publish(self, routing_key):
body = ""
self.channel.basic_publish(
exchange=self.exchange, routing_key=routing_key, body=body)
self.close()
def close(self):
self.channel.close()
self.connection.close()
|
import flask
from pika import BlockingConnection, PlainCredentials, ConnectionParameters
class RabbitMQ:
def __init__(self):
self.url = flask.current_app.config['INFOSYSTEM_QUEUE_URL']
self.port = flask.current_app.config['INFOSYSTEM_QUEUE_PORT']
self.virtual_host = \
flask.current_app.config['INFOSYSTEM_QUEUE_VIRTUAL_HOST']
self.username = flask.current_app.config['INFOSYSTEM_QUEUE_USERNAME']
self.password = flask.current_app.config['INFOSYSTEM_QUEUE_PASSWORD']
credentials = PlainCredentials(self.username, self.password)
self.params = ConnectionParameters(
self.url, self.port, self.virtual_host, credentials)
def connect(self):
try:
return BlockingConnection(self.params)
except Exception as e:
raise
class ProducerQueue:
def __init__(self, exchange, exchange_type):
rabbitMQ = RabbitMQ()
self.connection = rabbitMQ.connect()
self.exchange = exchange
self.channel = self.connection.channel()
self.channel.exchange_declare(
exchange=exchange, exchange_type=exchange_type, durable=True)
def publish(self, routing_key):
body = ""
self.channel.basic_publish(
exchange=self.exchange, routing_key=routing_key, body=body)
self.close()
def close(self):
self.channel.close()
self.connection.close()
|
Use INFOSYSTEM enviroment for Queue
|
Use INFOSYSTEM enviroment for Queue
|
Python
|
apache-2.0
|
samueldmq/infosystem
|
305ba7ee3fff41a7d866968c5332394301c0e83f
|
digi/wagtail_hooks.py
|
digi/wagtail_hooks.py
|
from wagtail.contrib.modeladmin.options import \
ModelAdmin, ModelAdminGroup, modeladmin_register
from .models import Indicator, FooterLinkSection
class IndicatorAdmin(ModelAdmin):
model = Indicator
menu_icon = 'user'
class FooterLinkSectionAdmin(ModelAdmin):
model = FooterLinkSection
menu_icon = 'redirect'
class DigiHelAdminGroup(ModelAdminGroup):
label = "DigiHel"
items = (IndicatorAdmin, FooterLinkSectionAdmin)
modeladmin_register(DigiHelAdminGroup)
|
from wagtail.contrib.modeladmin.options import \
ModelAdmin, ModelAdminGroup, modeladmin_register
from .models import Indicator, FooterLinkSection
from django.utils.html import format_html
from wagtail.wagtailcore import hooks
class IndicatorAdmin(ModelAdmin):
model = Indicator
menu_icon = 'user'
class FooterLinkSectionAdmin(ModelAdmin):
model = FooterLinkSection
menu_icon = 'redirect'
class DigiHelAdminGroup(ModelAdminGroup):
label = "DigiHel"
items = (IndicatorAdmin, FooterLinkSectionAdmin)
modeladmin_register(DigiHelAdminGroup)
# Enable editing of raw HTML
@hooks.register('insert_editor_js')
def enable_source_editing():
return format_html(
"""
<script>
registerHalloPlugin('hallohtml');
</script>
"""
)
|
Enable HTML source editing in the content editor
|
Enable HTML source editing in the content editor
|
Python
|
mit
|
terotic/digihel,City-of-Helsinki/digihel,terotic/digihel,City-of-Helsinki/digihel,City-of-Helsinki/digihel,terotic/digihel,City-of-Helsinki/digihel
|
60497ba61c80863cd0414e39a9cd12b42b519897
|
chainer/training/extensions/value_observation.py
|
chainer/training/extensions/value_observation.py
|
from chainer.training import extension
import time
def observe_value(key, target_func):
"""Returns a trainer extension to continuously record a value.
Args:
key (str): Key of observation to record.
target_func (function): Function that returns the value to record.
It must take one argument: trainer object.
Returns:
The extension function.
"""
@extension.make_extension(
trigger=(1, 'epoch'), priority=extension.PRIORITY_WRITER)
def _observe_value(trainer):
trainer.observation[key] = target_func(trainer)
return _observe_value
def observe_time(key='time'):
"""Returns a trainer extension to record the elapsed time.
Args:
key (str): Key of observation to record.
Returns:
The extension function.
"""
start_time = time.time()
return observe_value(key, lambda _: time.time() - start_time)
def observe_lr(optimizer, key='lr'):
"""Returns a trainer extension to record the learning rate.
Args:
optimizer: Optimizer object whose learning rate is recorded.
key (str): Key of observation to record.
Returns:
The extension function.
"""
return observe_value(key, lambda _: optimizer.lr)
|
import time
from chainer.training import extension
def observe_value(key, target_func):
"""Returns a trainer extension to continuously record a value.
Args:
key (str): Key of observation to record.
target_func (function): Function that returns the value to record.
It must take one argument: trainer object.
Returns:
The extension function.
"""
@extension.make_extension(
trigger=(1, 'epoch'), priority=extension.PRIORITY_WRITER)
def _observe_value(trainer):
trainer.observation[key] = target_func(trainer)
return _observe_value
def observe_time(key='time'):
"""Returns a trainer extension to record the elapsed time.
Args:
key (str): Key of observation to record.
Returns:
The extension function.
"""
start_time = time.time()
return observe_value(key, lambda _: time.time() - start_time)
def observe_lr(optimizer, key='lr'):
"""Returns a trainer extension to record the learning rate.
Args:
optimizer: Optimizer object whose learning rate is recorded.
key (str): Key of observation to record.
Returns:
The extension function.
"""
return observe_value(key, lambda _: optimizer.lr)
|
Split system import and project import
|
Split system import and project import
|
Python
|
mit
|
cupy/cupy,chainer/chainer,keisuke-umezawa/chainer,wkentaro/chainer,okuta/chainer,ktnyt/chainer,wkentaro/chainer,jnishi/chainer,tkerola/chainer,keisuke-umezawa/chainer,jnishi/chainer,niboshi/chainer,delta2323/chainer,keisuke-umezawa/chainer,ktnyt/chainer,chainer/chainer,ysekky/chainer,chainer/chainer,keisuke-umezawa/chainer,niboshi/chainer,aonotas/chainer,rezoo/chainer,hvy/chainer,pfnet/chainer,okuta/chainer,jnishi/chainer,hvy/chainer,kiyukuta/chainer,cupy/cupy,niboshi/chainer,anaruse/chainer,hvy/chainer,chainer/chainer,niboshi/chainer,ronekko/chainer,jnishi/chainer,ktnyt/chainer,hvy/chainer,okuta/chainer,cupy/cupy,cupy/cupy,kashif/chainer,okuta/chainer,wkentaro/chainer,wkentaro/chainer,ktnyt/chainer
|
14ea472acfce8b5317a8c8c970db901501ea34c0
|
_tests/macro_testing/runner.py
|
_tests/macro_testing/runner.py
|
# -*- coding: utf-8 -*-
import os, os.path
import sys
import unittest
from macrotest import JSONSpecMacroTestCaseFactory
def JSONTestCaseLoader(tests_path, recursive=False):
"""
Load JSON specifications for Jinja2 macro test cases from the given
path and returns the resulting test classes.
This function will create a MacroTestCase subclass (using
JSONSpecMacrosTestCaseFactory) for each JSON file in the given path.
If `recursive` is True, it will also look in subdirectories. This is
not yet supported.
"""
json_files = [f for f in os.listdir(tests_path) if f.endswith('.json')]
for json_file in json_files:
# Create a camelcased name for the test. This is a minor thing, but I
# think it's nice.
name, extension = os.path.splitext(json_file)
class_name = ''.join(x for x in name.title() if x not in ' _-') + 'TestCase'
# Get the full path to the file and create a test class
json_file_path = os.path.join(tests_path, json_file)
test_class = JSONSpecMacroTestCaseFactory(class_name, json_file_path)
# Add the test class to globals() so that unittest.main() picks it up
globals()[class_name] = test_class
if __name__ == '__main__':
JSONTestCaseLoader('./tests/')
unittest.main()
|
# -*- coding: utf-8 -*-
import os, os.path
import sys
import unittest
from macrotest import JSONSpecMacroTestCaseFactory
def JSONTestCaseLoader(tests_path, recursive=False):
"""
Load JSON specifications for Jinja2 macro test cases from the given
path and returns the resulting test classes.
This function will create a MacroTestCase subclass (using
JSONSpecMacrosTestCaseFactory) for each JSON file in the given path.
If `recursive` is True, it will also look in subdirectories. This is
not yet supported.
"""
path = os.path.abspath(os.path.join(os.path.dirname( __file__ ), tests_path))
json_files = [f for f in os.listdir(path) if f.endswith('.json')]
for json_file in json_files:
# Create a camelcased name for the test. This is a minor thing, but I
# think it's nice.
name, extension = os.path.splitext(json_file)
class_name = ''.join(x for x in name.title() if x not in ' _-') + 'TestCase'
# Get the full path to the file and create a test class
json_file_path = os.path.join(path, json_file)
test_class = JSONSpecMacroTestCaseFactory(class_name, json_file_path)
# Add the test class to globals() so that unittest.main() picks it up
globals()[class_name] = test_class
if __name__ == '__main__':
JSONTestCaseLoader('./tests/')
unittest.main()
|
Make the paths not relative, so tests can be run from anywhere.
|
Make the paths not relative, so tests can be run from anywhere.
|
Python
|
cc0-1.0
|
kave/cfgov-refresh,kave/cfgov-refresh,kave/cfgov-refresh,kave/cfgov-refresh
|
90699f4fa6c1ce2b02e81a8fef9bfafd2175fa7f
|
kmapper/__init__.py
|
kmapper/__init__.py
|
from .kmapper import KeplerMapper
from .kmapper import cluster
from .kmapper import Cover
from .kmapper import GraphNerve
|
from .kmapper import KeplerMapper
from .kmapper import cluster
from .cover import Cover
from .nerve import GraphNerve
import pkg_resources
__version__ = pkg_resources.get_distribution('kmapper').version
|
Add __version__ variable to package
|
Add __version__ variable to package
|
Python
|
mit
|
MLWave/kepler-mapper,MLWave/kepler-mapper,MLWave/kepler-mapper
|
884852eeb2dec07dccefc26595f097ec9ae8532b
|
forum/forms.py
|
forum/forms.py
|
from django.forms import ModelForm,Textarea
from .models import Post
class PostForm(ModelForm):
class Meta:
model = Post
fields = ('subject','body')
widgets = {
'body': Textarea(
attrs={
'data-provide':'markdown',
'data-hidden-buttons':'cmdHeading',
}),
}
|
from django.forms import ModelForm,Textarea,TextInput
from .models import Post
class PostForm(ModelForm):
class Meta:
model = Post
fields = ('subject','body')
widgets = {
'subject': TextInput(attrs={'autofocus':'autofocus'}),
'body': Textarea(
attrs={
'data-provide':'markdown',
'data-hidden-buttons':'cmdHeading',
}),
}
|
Add autofocus to subject field
|
Add autofocus to subject field
|
Python
|
mit
|
Kromey/fbxnano,Kromey/akwriters,Kromey/fbxnano,Kromey/akwriters,Kromey/fbxnano,Kromey/fbxnano,Kromey/akwriters,Kromey/akwriters
|
1b7634e3a98919df5f2f4d54c57bb72dfbf308df
|
py3-test/tests.py
|
py3-test/tests.py
|
# -*- coding: utf-8 -*-
import nose.tools as nt
from asyncio import get_event_loop
from asyncio import sleep as async_sleep
from pyee import EventEmitter
def test_async_emit():
"""Test that event_emitters can handle wrapping coroutines
"""
ee = EventEmitter()
loop = get_event_loop()
class SenseWasCalled():
def __init__(self):
self.was_called = False
def am_calling(self):
self.was_called = True
def assert_was_called(self):
nt.assert_true(self.was_called)
sensor = SenseWasCalled()
@ee.on('event')
async def event_handler():
sensor.am_calling()
ee.emit('event')
loop.run_until_complete(async_sleep(1))
sensor.assert_was_called()
|
# -*- coding: utf-8 -*-
import nose.tools as nt
from asyncio import Future, gather, get_event_loop, sleep
from pyee import EventEmitter
def test_async_emit():
"""Test that event_emitters can handle wrapping coroutines
"""
loop = get_event_loop()
ee = EventEmitter(loop=loop)
future = Future()
@ee.on('event')
async def event_handler():
future.set_result(True)
async def create_timeout(loop=loop):
await sleep(1, loop=loop)
future.cancel()
timeout = create_timeout(loop=loop)
@future.add_done_callback
def _done(result):
nt.assert_true(result)
ee.emit('event')
loop.run_until_complete(gather(future, timeout))
|
Rewrite asyncio test to use futures
|
Rewrite asyncio test to use futures
|
Python
|
mit
|
jfhbrook/pyee
|
a8bb719061a68b5d322868768203476c4ee1e9b9
|
gnocchi/cli.py
|
gnocchi/cli.py
|
# Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo.config import cfg
from gnocchi.indexer import sqlalchemy as sql_db
from gnocchi.rest import app
from gnocchi import service
def storage_dbsync():
service.prepare_service()
indexer = sql_db.SQLAlchemyIndexer(cfg.CONF)
indexer.upgrade()
def api():
service.prepare_service()
app.build_server()
|
# Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo.config import cfg
from gnocchi.indexer import sqlalchemy as sql_db
from gnocchi.rest import app
from gnocchi import service
def storage_dbsync():
service.prepare_service()
indexer = sql_db.SQLAlchemyIndexer(cfg.CONF)
indexer.connect()
indexer.upgrade()
def api():
service.prepare_service()
app.build_server()
|
Connect to database before upgrading it
|
Connect to database before upgrading it
This change ensure we are connected to the database before
we upgrade it.
Change-Id: Ia0be33892a99897ff294d004f4d935f3753e6200
|
Python
|
apache-2.0
|
idegtiarov/gnocchi-rep,leandroreox/gnocchi,sileht/gnocchi,idegtiarov/gnocchi-rep,gnocchixyz/gnocchi,sileht/gnocchi,idegtiarov/gnocchi-rep,gnocchixyz/gnocchi,leandroreox/gnocchi
|
82740c7956a2bae0baceedd658b9ad9352254ad0
|
nlppln/wfgenerator.py
|
nlppln/wfgenerator.py
|
from scriptcwl import WorkflowGenerator as WFGenerator
from .utils import CWL_PATH
class WorkflowGenerator(WFGenerator):
def __init__(self, working_dir=None, copy_steps=True):
WFGenerator.__init__(self, steps_dir=CWL_PATH, working_dir=working_dir,
copy_steps=copy_steps)
def save(self, fname, inline=True, relative=False, validate=True,
encoding='utf-8'):
"""Save workflow to file
For nlppln, the default is to save steps inline.
"""
super(WorkflowGenerator, self).save(fname,
inline=inline,
relative=relative,
validate=validate,
encoding=encoding)
|
from scriptcwl import WorkflowGenerator as WFGenerator
from .utils import CWL_PATH
class WorkflowGenerator(WFGenerator):
def __init__(self, working_dir=None):
WFGenerator.__init__(self, steps_dir=CWL_PATH, working_dir=working_dir)
def save(self, fname, validate=True, wd=True, inline=False, relative=False,
pack=False, encoding='utf-8'):
"""Save workflow to file
For nlppln, the default is to use a working directory (and save steps
using the ``wd`` option).
"""
super(WorkflowGenerator, self).save(fname,
validate=validate,
wd=wd,
inline=inline,
relative=relative,
pack=pack,
encoding=encoding)
|
Update to use newest (unreleased) scriptcwl options
|
Update to use newest (unreleased) scriptcwl options
|
Python
|
apache-2.0
|
WhatWorksWhenForWhom/nlppln,WhatWorksWhenForWhom/nlppln,WhatWorksWhenForWhom/nlppln
|
b728470c61fbd742052e5befb4c27adbacef1a7e
|
pinax_theme_bootstrap/templatetags/pinax_theme_bootstrap_tags.py
|
pinax_theme_bootstrap/templatetags/pinax_theme_bootstrap_tags.py
|
from django import template
from django.contrib.messages.utils import get_level_tags
LEVEL_TAGS = get_level_tags()
register = template.Library()
@register.simple_tag()
def get_message_tags(message):
"""
Returns tags for a message
"""
level_name = LEVEL_TAGS[message.level]
if level_name == u"error":
level_name = u"danger"
level_tag = u"alert-{name}".format(name=level_name)
tags = [level_tag]
if message.extra_tags:
tags.append(message.extra_tags)
return u" ".join(tags)
|
from django import template
from django.contrib.messages.utils import get_level_tags
LEVEL_TAGS = get_level_tags()
register = template.Library()
@register.simple_tag()
def get_message_tags(message):
"""
Returns the message's level_tag prefixed with Bootstrap's "alert-" prefix
along with any tags included in message.extra_tags
Messages in Django >= 1.7 have a message.level_tag attr
"""
level_tag = LEVEL_TAGS[message.level]
if level_tag == u"error":
level_tag = u"danger"
alert_level_tag = u"alert-{tag}".format(tag=level_tag)
tags = [alert_level_tag]
if message.extra_tags:
tags.append(message.extra_tags)
return u" ".join(tags)
|
Use level_tag to be consistent with Django >= 1.7
|
Use level_tag to be consistent with Django >= 1.7
|
Python
|
mit
|
grahamu/pinax-theme-bootstrap,jacobwegner/pinax-theme-bootstrap,foraliving/foraliving,jacobwegner/pinax-theme-bootstrap,druss16/danslist,druss16/danslist,foraliving/foraliving,foraliving/foraliving,grahamu/pinax-theme-bootstrap,jacobwegner/pinax-theme-bootstrap,grahamu/pinax-theme-bootstrap,druss16/danslist
|
fec7885d2632b887002f0071f4898faf52dd927c
|
chainerx/__init__.py
|
chainerx/__init__.py
|
import sys
if sys.version_info[0] < 3:
_available = False
else:
try:
from chainerx import _core
_available = True
except Exception:
_available = False
if _available:
from numpy import dtype, bool_, int8, int16, int32, int64, uint8, float32, float64 # NOQA
from chainerx._core import * # NOQA
from builtins import bool, int, float # NOQA
from chainerx.creation.from_data import asanyarray # NOQA
from chainerx.creation.from_data import fromfile # NOQA
from chainerx.creation.from_data import fromfunction # NOQA
from chainerx.creation.from_data import fromiter # NOQA
from chainerx.creation.from_data import fromstring # NOQA
from chainerx.creation.from_data import loadtxt # NOQA
_global_context = _core.Context()
_core.set_global_default_context(_global_context)
# Add workaround implementation for NumPy-compatible functions
from chainerx import _numpy_compat_workarounds
_numpy_compat_workarounds.populate()
else:
class ndarray(object):
pass # for type testing
def is_available():
return _available
|
import sys
if sys.version_info[0] < 3:
_available = False
else:
try:
from chainerx import _core
_available = True
except Exception:
_available = False
if _available:
from numpy import dtype, bool_, int8, int16, int32, int64, uint8, float32, float64 # NOQA
from chainerx._core import * # NOQA
from builtins import bool, int, float # NOQA
from chainerx.creation.from_data import asanyarray # NOQA
from chainerx.creation.from_data import fromfile # NOQA
from chainerx.creation.from_data import fromfunction # NOQA
from chainerx.creation.from_data import fromiter # NOQA
from chainerx.creation.from_data import fromstring # NOQA
from chainerx.creation.from_data import loadtxt # NOQA
_global_context = _core.Context()
_core.set_global_default_context(_global_context)
# Add workaround implementation for NumPy-compatible functions
from chainerx import _numpy_compat_workarounds
_numpy_compat_workarounds.populate()
else:
class ndarray(object):
"""Dummy class for type testing."""
def __init__(self, *args, **kwargs):
raise RuntimeError('chainerx is not available.')
def is_available():
return _available
|
Raise an error on dummy class init
|
Raise an error on dummy class init
|
Python
|
mit
|
okuta/chainer,jnishi/chainer,chainer/chainer,ktnyt/chainer,ktnyt/chainer,okuta/chainer,hvy/chainer,niboshi/chainer,ktnyt/chainer,chainer/chainer,ktnyt/chainer,hvy/chainer,wkentaro/chainer,jnishi/chainer,jnishi/chainer,okuta/chainer,wkentaro/chainer,keisuke-umezawa/chainer,niboshi/chainer,chainer/chainer,keisuke-umezawa/chainer,tkerola/chainer,hvy/chainer,jnishi/chainer,pfnet/chainer,wkentaro/chainer,keisuke-umezawa/chainer,niboshi/chainer,keisuke-umezawa/chainer,hvy/chainer,wkentaro/chainer,chainer/chainer,niboshi/chainer,okuta/chainer
|
f16c8f696a282da6c04de6b7530f1d0316eda88b
|
providers/edu/harvarddataverse/normalizer.py
|
providers/edu/harvarddataverse/normalizer.py
|
import arrow
import dateparser
from share.normalize import *
class Person(Parser):
given_name = ParseName(ctx).first
family_name = ParseName(ctx).last
additional_name = ParseName(ctx).middle
suffix = ParseName(ctx).suffix
class Contributor(Parser):
person = Delegate(Person, ctx)
cited_name = ctx
order_cited = ctx('index')
class Link(Parser):
url = ctx
type = RunPython('get_link_type', ctx)
def get_link_type(self, link):
if 'dx.doi.org' in link:
return 'doi'
elif 'dataverse.harvard.edu' in link:
return 'provider'
return 'misc'
class ThroughLinks(Parser):
link = Delegate(Link, ctx)
class CreativeWork(Parser):
title = ctx.name
description = ctx.description
contributors = Map(Delegate(Contributor), ctx.authors)
date_published = ParseDate(ctx.published_at)
links = Concat(
Delegate(ThroughLinks, ctx.url),
Delegate(ThroughLinks, ctx.image_url),
)
|
import arrow
import dateparser
from share.normalize import *
class Person(Parser):
given_name = ParseName(ctx).first
family_name = ParseName(ctx).last
additional_name = ParseName(ctx).middle
suffix = ParseName(ctx).suffix
class Contributor(Parser):
person = Delegate(Person, ctx)
cited_name = ctx
order_cited = ctx('index')
class Link(Parser):
url = ctx
type = RunPython('get_link_type', ctx)
def get_link_type(self, link):
if 'dx.doi.org' in link:
return 'doi'
elif 'dataverse.harvard.edu' in link:
return 'provider'
return 'misc'
class ThroughLinks(Parser):
link = Delegate(Link, ctx)
class CreativeWork(Parser):
title = ctx.name
description = Try(ctx.description)
contributors = Map(Delegate(Contributor), Try(ctx.authors))
date_published = ParseDate(ctx.published_at)
links = Concat(
Delegate(ThroughLinks, ctx.url),
Delegate(ThroughLinks, ctx.image_url),
)
|
Handle missing fields in dataverse
|
Handle missing fields in dataverse
|
Python
|
apache-2.0
|
CenterForOpenScience/SHARE,laurenbarker/SHARE,aaxelb/SHARE,aaxelb/SHARE,laurenbarker/SHARE,zamattiac/SHARE,zamattiac/SHARE,CenterForOpenScience/SHARE,laurenbarker/SHARE,CenterForOpenScience/SHARE,zamattiac/SHARE,aaxelb/SHARE
|
3327c204f34a725a2d070beb24a7a5a66d414930
|
migrations/versions/538eeb160af6_.py
|
migrations/versions/538eeb160af6_.py
|
"""empty message
Revision ID: 538eeb160af6
Revises: 1727fb4309d8
Create Date: 2015-09-17 04:22:21.262285
"""
# revision identifiers, used by Alembic.
revision = '538eeb160af6'
down_revision = '1727fb4309d8'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('user', sa.Column('role', sa.String(length=30), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('user', 'role')
### end Alembic commands ###
|
"""empty message
Revision ID: 538eeb160af6
Revises: 1727fb4309d8
Create Date: 2015-09-17 04:22:21.262285
"""
# revision identifiers, used by Alembic.
revision = '538eeb160af6'
down_revision = '6b9d673d8e30'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('user', sa.Column('role', sa.String(length=30), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('user', 'role')
### end Alembic commands ###
|
Update alembic order for merging
|
Update alembic order for merging
|
Python
|
apache-2.0
|
bunjiboys/security_monkey,stackArmor/security_monkey,markofu/security_monkey,bunjiboys/security_monkey,bunjiboys/security_monkey,markofu/security_monkey,markofu/security_monkey,Netflix/security_monkey,stackArmor/security_monkey,Netflix/security_monkey,Netflix/security_monkey,stackArmor/security_monkey,Netflix/security_monkey,bunjiboys/security_monkey,Netflix/security_monkey,bunjiboys/security_monkey,stackArmor/security_monkey,stackArmor/security_monkey,markofu/security_monkey,markofu/security_monkey
|
d2d822a9fb60bbc8ded7f9e3c70d91cf25f794b2
|
src/volunteers/models.py
|
src/volunteers/models.py
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.core.validators import MinValueValidator
class Volunteer(models.Model):
first_name = models.CharField(_('First name'), max_length=100)
last_name = models.CharField(_('Last name'), max_length=100)
age = models.PositiveIntegerField(_('Age'))
phone = models.CharField(_('Phone'), max_length=100)
email = models.EmailField(_('E-mail'), unique=True)
is_group = models.BooleanField(_('Is group representative'), default=False)
group_name = models.CharField(_('Group/organization name'), max_length=100,
blank=True)
participant_count = models.PositiveIntegerField(_('Participant count'),
default=1, validators=[MinValueValidator(1)])
class Meta:
verbose_name = _('Volunteer')
verbose_name_plural = _('Volunteers')
@property
def name(self):
template = u'{first_name} {last_name}'
if self.is_group:
template += u' (grupp, {participant_count} osalejat)'
return template.format(**self.__dict__)
def __unicode__(self):
return self.name
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.core.validators import MinValueValidator
class Volunteer(models.Model):
first_name = models.CharField(_('First name'), max_length=100)
last_name = models.CharField(_('Last name'), max_length=100)
age = models.PositiveIntegerField(_('Age'))
phone = models.CharField(_('Phone'), max_length=100)
email = models.EmailField(_('E-mail'), unique=True)
is_group = models.BooleanField(_('Is group representative'), default=False)
group_name = models.CharField(_('Group/organization name'), max_length=100,
blank=True)
participant_count = models.PositiveIntegerField(_('Participant count'),
default=1, validators=[MinValueValidator(1)])
class Meta:
verbose_name = _('Volunteer')
verbose_name_plural = _('Volunteers')
@property
def name(self):
template = u'{first_name} {last_name}'
if self.is_group:
template += u' ({group_name} grupp, {participant_count} osalejat)'
return template.format(**self.__dict__)
def __unicode__(self):
return self.name
|
Add group name to volunteer string representation
|
Add group name to volunteer string representation
|
Python
|
mit
|
mrts/foodbank-campaign,mrts/foodbank-campaign,mrts/foodbank-campaign,mrts/foodbank-campaign
|
e1111ad6e8802b3c90df55e05eb695d6db9005e4
|
import_script/create_users.py
|
import_script/create_users.py
|
#!/usr/bin/python
import django.contrib.auth.models as auth_models
import django.contrib.contenttypes as contenttypes
def main():
# Read only user:
# auth_models.User.objects.create_user('cube', 'toolkit_admin_readonly@localhost', '***REMOVED***')
# Read/write user:
user_rw = auth_models.User.objects.create_user('admin', 'toolkit_admin@localhost', '***REMOVED***')
# Create dummy ContentType:
ct = contenttypes.models.ContentType.objects.get_or_create(
model='',
app_label='toolkit'
)[0]
# Create 'write' permission:
write_permission = auth_models.Permission.objects.get_or_create(
name='Write access to all toolkit content',
content_type=ct,
codename='write'
)[0]
# Give "admin" user the write permission:
user_rw.user_permissions.add(write_permission)
if __name__ == "__main__":
main()
|
#!/usr/bin/python
import django.contrib.auth.models as auth_models
import django.contrib.contenttypes as contenttypes
def get_password():
print "*" * 80
password = raw_input("Please enter string to use as admin password: ")
check_password = None
while check_password != password:
print
check_password = raw_input("Please re-enter for confirmation: ")
return password
def main():
# Read only user:
# auth_models.User.objects.create_user('cube', 'toolkit_admin_readonly@localhost', '********')
# Read/write user:
cube_password = get_password()
user_rw = auth_models.User.objects.create_user('admin', 'toolkit_admin@localhost', cube_password)
# Create dummy ContentType:
ct = contenttypes.models.ContentType.objects.get_or_create(
model='',
app_label='toolkit'
)[0]
# Create 'write' permission:
write_permission = auth_models.Permission.objects.get_or_create(
name='Write access to all toolkit content',
content_type=ct,
codename='write'
)[0]
# Give "admin" user the write permission:
user_rw.user_permissions.add(write_permission)
if __name__ == "__main__":
main()
|
Remove cube credentials from import script
|
Remove cube credentials from import script
|
Python
|
agpl-3.0
|
BenMotz/cubetoolkit,BenMotz/cubetoolkit,BenMotz/cubetoolkit,BenMotz/cubetoolkit
|
c89abd6a285225313c91ba03c0fd8ab2cfed399d
|
setup.py
|
setup.py
|
#!/usr/bin/env python
import os
import urllib
import zipfile
script_path = os.path.dirname(os.path.realpath(__file__))
packer_archive_path = script_path + "/packer.zip"
bin_path = script_path + "/bin"
if not os.path.isfile(bin_path + "/packer"):
if not os.path.exists(bin_path):
os.makedirs(bin_path)
try:
urllib.urlretrieve("https://dl.bintray.com/mitchellh/packer/packer_0.8.6_linux_amd64.zip", packer_archive_path)
with zipfile.ZipFile(packer_archive_path, "r") as packer_archive:
packer_archive.extractall(path=bin_path)
finally:
os.remove(packer_archive_path)
for root, subdirectories, files in os.walk(bin_path):
for f in files:
os.chmod("%s/%s" % (root, f), 0755)
|
#!/usr/bin/env python
import os
import urllib
import zipfile
script_path = os.path.dirname(os.path.realpath(__file__))
packer_archive_path = script_path + "/packer.zip"
bin_path = script_path + "/bin"
if not os.path.isfile(bin_path + "/packer"):
if not os.path.exists(bin_path):
os.makedirs(bin_path)
try:
urllib.urlretrieve("https://dl.bintray.com/mitchellh/packer/packer_0.8.6_linux_amd64.zip", packer_archive_path)
with zipfile.ZipFile(packer_archive_path, "r") as packer_archive:
packer_archive.extractall(path=bin_path)
finally:
os.remove(packer_archive_path)
for root, subdirectories, files in os.walk(bin_path):
for f in files:
os.chmod(root + "/" + f, 755)
|
Fix false positive octal syntax warning
|
Fix false positive octal syntax warning
|
Python
|
unlicense
|
dharmab/centos-vagrant
|
3d888afa88326c97246947141c357509c2f72bbc
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(
name='firebase-token-generator',
version='1.2',
author='Greg Soltis',
author_email='greg@firebase.com',
py_modules=['firebase_token_generator'],
license='LICENSE',
url='https://github.com/firebase/firebase-token-generator-python',
description='A utility to generate signed Firebase Authentication Tokens',
long_description=open('README.md').read()
)
|
from distutils.core import setup
setup(
name='firebase-token-generator',
version='1.3',
author='Greg Soltis',
author_email='greg@firebase.com',
zip_safe=False,
py_modules=['firebase_token_generator'],
license='LICENSE',
url='https://github.com/firebase/firebase-token-generator-python',
description='A utility to generate signed Firebase Authentication Tokens',
long_description=open('README.md').read()
)
|
Set zip_safe=False. Bump version to 1.3.
|
Set zip_safe=False. Bump version to 1.3.
|
Python
|
mit
|
googlearchive/firebase-token-generator-python
|
ee2d27eca45768a07a562405cf4431cb8d2b09bf
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(name='pyresttest',
version='0.1',
description='Python Rest Testing',
maintainer='Naveen Malik',
maintainer_email='jewzaam@gmail.com',
url='https://github.com/svanoort/pyresttest',
py_modules=['resttest','pycurl_benchmark','test_resttest'],
license='Apache License, Version 2.0'
)
|
from distutils.core import setup
setup(name='pyresttest',
version='0.1',
description='Python Rest Testing',
maintainer='Sam Van Oort',
maintainer_email='acetonespam@gmail.com',
url='https://github.com/svanoort/pyresttest',
py_modules=['resttest','test_resttest'],
license='Apache License, Version 2.0',
requires=['argparse','yaml','pycurl']
)
|
Set maintainer and add dependencies to distutils config
|
Set maintainer and add dependencies to distutils config
|
Python
|
apache-2.0
|
sunyanhui/pyresttest,satish-suradkar/pyresttest,suvarnaraju/pyresttest,wirewit/pyresttest,netjunki/pyresttest,MorrisJobke/pyresttest,wirewit/pyresttest,suvarnaraju/pyresttest,svanoort/pyresttest,alazaro/pyresttest,sunyanhui/pyresttest,TimYi/pyresttest,MorrisJobke/pyresttest,holdenweb/pyresttest,TimYi/pyresttest,alazaro/pyresttest,janusnic/pyresttest,janusnic/pyresttest,holdenweb/pyresttest,netjunki/pyresttest,svanoort/pyresttest,satish-suradkar/pyresttest
|
8fea58292e41352b0b58947f4182dd32ff4f225d
|
opps/fields/models.py
|
opps/fields/models.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.db import models
from django.utils.translation import ugettext_lazy as _
from opps.boxes.models import OPPS_APPS
FIELD_TYPE = (
('checkbox', _('CheckBox')),
('radio', _('Radio')),
('text', _('Text')),
('textarea', _('TextArea')),
)
class Field(models.Model):
name = models.CharField(_('Name'), max_length=100)
slug = models.SlugField(_('Slug'), max_length=255)
application = models.CharField(_('Application'),
max_length=255,
choices=OPPS_APPS,
db_index=True)
type = models.CharField(_("Type"), max_length=15,
choices=FIELD_TYPE,
db_index=True)
def __unicode__(self):
return u"{} - {}".format(self.application, self.name)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.db import models
from django.utils.translation import ugettext_lazy as _
from opps.boxes.models import OPPS_APPS
FIELD_TYPE = (
('checkbox', _('CheckBox')),
('radio', _('Radio')),
('text', _('Text')),
('textarea', _('TextArea')),
)
class Field(models.Model):
name = models.CharField(_('Name'), max_length=100)
slug = models.SlugField(_('Slug'), max_length=255)
application = models.CharField(_('Application'),
max_length=255,
choices=OPPS_APPS,
db_index=True)
type = models.CharField(_("Type"), max_length=15,
choices=FIELD_TYPE,
db_index=True)
def __unicode__(self):
return u"{} - {}".format(self.application, self.name)
class Option(models.Model):
field = models.ForeignKey('fields.Field')
name = models.CharField(_('Name'), max_length=100)
slug = models.SlugField(_('Slug'), max_length=140)
value = models.CharField(_('Value'), max_length=255)
def __unicode__(self):
return u"{} - {}".format(self.field.slug, self.name)
class FieldOption(models.Model):
field = models.ForeignKey('fields.Field')
option = models.ForeignKey('fields.Option')
order = models.PositiveIntegerField(_(u'Order'), default=0)
def __unicode__(self):
return u"{} - {}".format(self.field.slug, self.option.slug)
class Meta:
ordering = ['-order']
|
Add new model option to add field options if exist (radio/checkbox)
|
Add new model option to add field options if exist (radio/checkbox)
|
Python
|
mit
|
williamroot/opps,jeanmask/opps,jeanmask/opps,williamroot/opps,YACOWS/opps,williamroot/opps,opps/opps,opps/opps,YACOWS/opps,jeanmask/opps,jeanmask/opps,YACOWS/opps,opps/opps,williamroot/opps,YACOWS/opps,opps/opps
|
6f83fb7dd071786dc01a015addbdb541e7eaf7db
|
meinberlin/apps/documents/migrations/0002_rename_document_to_chapter.py
|
meinberlin/apps/documents/migrations/0002_rename_document_to_chapter.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from django.db import models
class Migration(migrations.Migration):
dependencies = [
('meinberlin_documents', '0001_initial'),
]
operations = [
migrations.RenameModel(
old_name='Document',
new_name='Chapter',
),
migrations.RenameField(
model_name='paragraph',
old_name='document',
new_name='chapter',
),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from django.db import models
class Migration(migrations.Migration):
atomic=False
dependencies = [
('meinberlin_documents', '0001_initial'),
]
operations = [
migrations.RenameModel(
old_name='Document',
new_name='Chapter',
),
migrations.RenameField(
model_name='paragraph',
old_name='document',
new_name='chapter',
),
]
|
Work around a migration issue in sqlite
|
apps/documents: Work around a migration issue in sqlite
|
Python
|
agpl-3.0
|
liqd/a4-meinberlin,liqd/a4-meinberlin,liqd/a4-meinberlin,liqd/a4-meinberlin
|
3d2f19ff097cf144efd9135c52e4d584193f9ddb
|
tohu/v7/custom_generator/tohu_items_class.py
|
tohu/v7/custom_generator/tohu_items_class.py
|
import attr
__all__ = ["make_tohu_items_class"]
def make_tohu_items_class(clsname, field_names):
"""
Parameters
----------
clsname: string
Name of the class to be created.
field_names: list of strings
Names of the field attributes of the class to be created.
"""
item_cls = attr.make_class(clsname, {name: attr.ib() for name in field_names}, repr=True, cmp=True, frozen=True)
func_eq_orig = item_cls.__eq__
def func_eq_new(self, other):
"""
Custom __eq__() method which also allows comparisons with
tuples and dictionaries. This is mostly for convenience
during testing.
"""
if isinstance(other, self.__class__):
return func_eq_orig(self, other)
else:
if isinstance(other, tuple):
return attr.astuple(self) == other
elif isinstance(other, dict):
return attr.asdict(self) == other
else:
raise TypeError(
f"Tohu items have types that cannot be compared: "
"{self.__class__.__name__}, {other.__class__.__name__}"
)
item_cls.__eq__ = func_eq_new
item_cls.field_names = field_names
item_cls.as_dict = lambda self: attr.asdict(self)
item_cls.as_tuple = lambda self: attr.astuple(self)
return item_cls
|
import attr
__all__ = ["make_tohu_items_class"]
def make_tohu_items_class(clsname, field_names):
"""
Parameters
----------
clsname: string
Name of the class to be created.
field_names: list of strings
Names of the field attributes of the class to be created.
"""
item_cls = attr.make_class(clsname, {name: attr.ib() for name in field_names}, repr=True, cmp=True, frozen=True)
func_eq_orig = item_cls.__eq__
def func_eq_new(self, other):
"""
Custom __eq__() method which also allows comparisons with
tuples and dictionaries. This is mostly for convenience
during testing.
"""
if isinstance(other, self.__class__):
return func_eq_orig(self, other)
else:
if isinstance(other, tuple):
return attr.astuple(self) == other
elif isinstance(other, dict):
return attr.asdict(self) == other
else:
raise TypeError(
f"Tohu items have types that cannot be compared: "
"{self.__class__.__name__}, {other.__class__.__name__}"
)
item_cls.__eq__ = func_eq_new
item_cls.field_names = field_names
item_cls.as_dict = lambda self: attr.asdict(self)
item_cls.as_tuple = lambda self: attr.astuple(self)
item_cls.is_unset = False
return item_cls
|
Add attribute 'is_unset' so that the interface is consistent with MissingTohuItemsCls
|
Add attribute 'is_unset' so that the interface is consistent with MissingTohuItemsCls
|
Python
|
mit
|
maxalbert/tohu
|
445b80562e038bc3749930d44e00eda55edaa180
|
ci_scripts/buildLinuxWheels.py
|
ci_scripts/buildLinuxWheels.py
|
from subprocess import call, check_output
import sys
import os
isPython3 = sys.version_info.major == 3
# https://stackoverflow.com/a/3357357
command = 'git log --format=%B -n 1'.split()
out = check_output(command)
if b'build wheels' not in out.lower() or not isPython3:
exit(0)
path = os.path.abspath(argv[1])
call('pip install cibuildwheel==0.7.0'.split())
call('cibuildwheel --output-dir {}'.format(path).split())
from dropboxUpload import uploadAll
uploadAll(path)
|
from subprocess import call, check_output
import sys
import os
isPython3 = sys.version_info.major == 3
# https://stackoverflow.com/a/3357357
command = 'git log --format=%B -n 1'.split()
out = check_output(command)
if b'build wheels' not in out.lower() or not isPython3:
exit(0)
path = os.path.abspath(sys.argv[1])
call('pip install cibuildwheel==0.7.0'.split())
call('cibuildwheel --output-dir {}'.format(sys.argv[1]).split())
from dropboxUpload import uploadAll
uploadAll(path)
|
Fix build wheels and upload 4.
|
Fix build wheels and upload 4.
|
Python
|
bsd-3-clause
|
jr-garcia/AssimpCy,jr-garcia/AssimpCy
|
a10407bf4d9dd404d734985717aa7bcebfa0981d
|
api/digital_ocean.py
|
api/digital_ocean.py
|
"""
@fileoverview Digital Ocean API
@author David Parlevliet
@version 20130315
@preserve Copyright 2013 David Parlevliet.
Digital Ocean API
=================
Class to get the server details via the Digital Ocean API.
"""
import urllib2
import json
class Api():
group_name = "Digital Ocean"
client_key = None
api_key = None
servers = {}
def __init__(self, **kwargs):
for key in kwargs:
setattr(self, key, kwargs[key])
def grab_servers(self):
DROPLETS_URL = 'https%s/droplets/?client_id=%s&api_key=%s' % \
('://api.digitalocean.com',
self.client_key,
self.api_key)
droplets = urllib2.urlopen(DROPLETS_URL)
try:
data = json.loads(droplets.read())
except:
raise Exception("Fatal error: No droplets found")
for droplet in data['droplets']:
if droplet['status'] == 'active':
name = droplet['name']
if name not in self.servers:
self.servers[name] = []
self.servers[name].append(droplet['ip_address'])
def get_servers(self, name):
return self.servers[name] if name in self.servers else None
|
"""
@fileoverview Digital Ocean API
@author David Parlevliet
@version 20130315
@preserve Copyright 2013 David Parlevliet.
Digital Ocean API
=================
Class to get the server details via the Digital Ocean API.
"""
import urllib2
import json
class Api():
group_name = "Digital Ocean"
client_key = None
api_key = None
servers = {}
def __init__(self, **kwargs):
for key in kwargs:
setattr(self, key, kwargs[key])
def grab_servers(self):
DROPLETS_URL = 'https%s/droplets/?client_id=%s&api_key=%s' % \
('://api.digitalocean.com',
self.client_key,
self.api_key)
try:
droplets = urllib2.urlopen(DROPLETS_URL)
except urllib2.URLError:
raise Exception("Fatal error: Unable to connect to API")
try:
data = json.loads(droplets.read())
except:
raise Exception("Fatal error: No droplets found")
for droplet in data['droplets']:
if droplet['status'] == 'active':
name = droplet['name']
if name not in self.servers:
self.servers[name] = []
self.servers[name].append(droplet['ip_address'])
def get_servers(self, name):
return self.servers[name] if name in self.servers else None
|
Return a helpful exception if API is uncontactable
|
Return a helpful exception if API is uncontactable
|
Python
|
mit
|
dparlevliet/elastic-firewall,dparlevliet/elastic-firewall,dparlevliet/elastic-firewall
|
7ff6c9d85eef03c225b511f39bbb07796b47659f
|
datapipe/history.py
|
datapipe/history.py
|
class History:
def __init__(self):
self.conn = sqlite3.connect('.history.db')
|
import sqlite3
class History:
def __init__(self, path):
self.conn = sqlite3.connect(path)
|
Make database filepath configurable on History
|
Make database filepath configurable on History
|
Python
|
mit
|
ibab/datapipe
|
fd4539942dafe622d3f7a7d183db3d69f95a00c4
|
shop/urls/cart.py
|
shop/urls/cart.py
|
from django.conf.urls.defaults import url, patterns
from shop.views.cart import CartDetails, CartItemDetail
urlpatterns = patterns('',
url(r'^delete/$', CartDetails.as_view(action='delete'), # DELETE
name='cart_delete'),
url('^item/$', CartDetails.as_view(action='post'), # POST
name='cart_item_add'),
url(r'^$', CartDetails.as_view(), name='cart'), # GET
url(r'^update/$', CartDetails.as_view(action='put'),
name='cart_update'),
# CartItems
url('^item/(?P<id>[0-9A-Za-z-_.//]+)$', CartItemDetail.as_view(),
name='cart_item'),
url('^item/(?P<id>[0-9A-Za-z-_.//]+)/delete$',
CartItemDetail.as_view(action='delete'),
name='cart_item_delete'),
)
|
from django.conf.urls.defaults import url, patterns
from shop.views.cart import CartDetails, CartItemDetail
urlpatterns = patterns('',
url(r'^delete/$', CartDetails.as_view(action='delete'), # DELETE
name='cart_delete'),
url('^item/$', CartDetails.as_view(action='post'), # POST
name='cart_item_add'),
url(r'^$', CartDetails.as_view(), name='cart'), # GET
url(r'^update/$', CartDetails.as_view(action='put'),
name='cart_update'),
# CartItems
url('^item/(?P<id>[0-9]+)$', CartItemDetail.as_view(),
name='cart_item'),
url('^item/(?P<id>[0-9]+)/delete$',
CartItemDetail.as_view(action='delete'),
name='cart_item_delete'),
)
|
Make sure that ID will not match the first CartItems rule EVERY time ("//" was in regex).
|
Make sure that ID will not match the first CartItems rule EVERY time ("//" was in regex).
|
Python
|
bsd-3-clause
|
schacki/django-shop,khchine5/django-shop,khchine5/django-shop,dwx9/test,febsn/django-shop,DavideyLee/django-shop,awesto/django-shop,jrief/django-shop,dwx9/test,thenewguy/django-shop,thenewguy/django-shop,bmihelac/django-shop,pjdelport/django-shop,creimers/django-shop,creimers/django-shop,jrief/django-shop,bmihelac/django-shop,awesto/django-shop,awesto/django-shop,febsn/django-shop,febsn/django-shop,nimbis/django-shop,khchine5/django-shop,pjdelport/django-shop,rfleschenberg/django-shop,rfleschenberg/django-shop,dwx9/test,rfleschenberg/django-shop,fusionbox/django-shop,chriscauley/django-shop,jrief/django-shop,divio/django-shop,creimers/django-shop,DavideyLee/django-shop,pjdelport/django-shop,schacki/django-shop,schacki/django-shop,nimbis/django-shop,atheiste/django-shop,nimbis/django-shop,katomaso/django-shop,fusionbox/django-shop,chriscauley/django-shop,atheiste/django-shop,chriscauley/django-shop,jrutila/django-shop,jrutila/django-shop,khchine5/django-shop,schacki/django-shop,nimbis/django-shop,jrutila/django-shop,divio/django-shop,divio/django-shop,katomaso/django-shop,katomaso/django-shop,rfleschenberg/django-shop,atheiste/django-shop,jrief/django-shop
|
10948cd88d51383e13af0a116703984752092c6a
|
jenkinsapi_tests/systests/test_jenkins_matrix.py
|
jenkinsapi_tests/systests/test_jenkins_matrix.py
|
'''
System tests for `jenkinsapi.jenkins` module.
'''
import re
import time
import unittest
from jenkinsapi_tests.systests.base import BaseSystemTest
from jenkinsapi_tests.systests.job_configs import MATRIX_JOB
from jenkinsapi_tests.test_utils.random_strings import random_string
class TestMatrixJob(BaseSystemTest):
def test_invoke_matrix_job(self):
job_name = 'create_%s' % random_string()
job = self.jenkins.create_job(job_name, MATRIX_JOB)
job.invoke(block=True)
b = job.get_last_build()
while b.is_running():
time.sleep(1)
s = set()
for r in b.get_matrix_runs():
self.assertEquals(r.get_number(), b.get_number())
self.assertEquals(r.get_upstream_build(), b)
m = re.search(u'\xbb (.*) #\\d+$', r.name)
self.assertIsNotNone(m)
s.add(m.group(1))
# This is a bad test, it simply verifies that this function does
# not crash on a build from a matrix job.
self.assertFalse(b.get_master_job_name())
self.assertEqual(s, set(['one', 'two', 'three']))
if __name__ == '__main__':
unittest.main()
|
'''
System tests for `jenkinsapi.jenkins` module.
'''
import re
import time
import unittest
from jenkinsapi_tests.systests.base import BaseSystemTest
from jenkinsapi_tests.systests.job_configs import MATRIX_JOB
from jenkinsapi_tests.test_utils.random_strings import random_string
class TestMatrixJob(BaseSystemTest):
def test_invoke_matrix_job(self):
job_name = 'create_%s' % random_string()
job = self.jenkins.create_job(job_name, MATRIX_JOB)
job.invoke(block=True)
build = job.get_last_build()
while build.is_running():
time.sleep(1)
set_of_groups = set()
for run in build.get_matrix_runs():
self.assertEquals(run.get_number(), build.get_number())
self.assertEquals(run.get_upstream_build(), build)
match_result = re.search(u'\xbb (.*) #\\d+$', run.name)
self.assertIsNotNone(match_result)
set_of_groups.add(match_result.group(1))
build.get_master_job_name()
# This is a bad test, it simply verifies that this function does
# not crash on a build from a matrix job.
self.assertFalse(build.get_master_job_name())
self.assertEqual(set_of_groups, set(['one', 'two', 'three']))
if __name__ == '__main__':
unittest.main()
|
Tidy up this test - still quite bad & useless.
|
Tidy up this test - still quite bad & useless.
|
Python
|
mit
|
imsardine/jenkinsapi,salimfadhley/jenkinsapi,JohnLZeller/jenkinsapi,JohnLZeller/jenkinsapi,aerickson/jenkinsapi,domenkozar/jenkinsapi,zaro0508/jenkinsapi,imsardine/jenkinsapi,zaro0508/jenkinsapi,jduan/jenkinsapi,mistermocha/jenkinsapi,domenkozar/jenkinsapi,salimfadhley/jenkinsapi,zaro0508/jenkinsapi,mistermocha/jenkinsapi,aerickson/jenkinsapi,jduan/jenkinsapi,JohnLZeller/jenkinsapi,mistermocha/jenkinsapi,imsardine/jenkinsapi
|
238ba8cec34ec02dc521f25ef1ada6e230194c32
|
kitsune/kbadge/migrations/0002_auto_20181023_1319.py
|
kitsune/kbadge/migrations/0002_auto_20181023_1319.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('kbadge', '0001_initial'),
]
operations = [
migrations.RunSQL(
"UPDATE badger_badge SET image = CONCAT('uploads/', image)"
)
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('kbadge', '0001_initial'),
]
operations = [
migrations.RunSQL(
"UPDATE badger_badge SET image = CONCAT('uploads/', image) WHERE image NOT LIKE 'uploads/%'"
)
]
|
Add WHERE clause to SQL data migration.
|
Add WHERE clause to SQL data migration.
|
Python
|
bsd-3-clause
|
mozilla/kitsune,anushbmx/kitsune,anushbmx/kitsune,anushbmx/kitsune,mozilla/kitsune,mozilla/kitsune,mozilla/kitsune,anushbmx/kitsune
|
4651d3b5666fe3ddf3bd92b31ee6ffe4a72ce94e
|
core/api/__init__.py
|
core/api/__init__.py
|
import os
from flask import Flask, jsonify
from flask_pymongo import PyMongo, BSONObjectIdConverter
from werkzeug.exceptions import HTTPException, default_exceptions
from core.api import settings
def create_app(environment=None):
app = Flask('veritrans')
app.url_map.converters['ObjectId'] = BSONObjectIdConverter
# Config app for environment
if not environment:
environment = os.environ.get('BACKEND_ENVIRONMENT', 'Prod')
app.config.from_object('core.api.settings.%s' % environment)
# convert exceptions to JSON
def make_json_error(ex):
response = jsonify(
message=str(ex)
)
response.status_code = (ex.code
if isinstance(ex, HTTPException)
else 500)
return response
for code in default_exceptions.items():
app.error_handler_spec[None][code] = make_json_error
from core.api.views.endpoints import api
app.register_module(api)
return app
class API(object):
app = None
mongo_client = None
@staticmethod
def init():
env = os.environ.get('SITE_NAME', 'Dev')
API.app = create_app(env)
API.mongo_client = PyMongo(API.app)
|
import os
from flask import Flask, jsonify
from flask_pymongo import PyMongo, BSONObjectIdConverter
from werkzeug.exceptions import HTTPException, default_exceptions
from core.api import settings
def create_app(environment=None):
app = Flask('veritrans')
app.url_map.converters['ObjectId'] = BSONObjectIdConverter
# Config app for environment
if not environment:
environment = os.environ.get('BACKEND_ENVIRONMENT', 'Prod')
app.config.from_object('core.api.settings.%s' % environment)
# convert exceptions to JSON
def make_json_error(ex):
response = jsonify(
message=str(ex)
)
response.status_code = (ex.code
if isinstance(ex, HTTPException)
else 500)
return response
for code in default_exceptions.items():
app.error_handler_spec[None][code] = make_json_error
from core.api.views.endpoints import api
app.register_module(api)
return app
class API(object):
app = None
mongo_client = None
@staticmethod
def init():
API.app = create_app()
API.mongo_client = PyMongo(API.app)
|
Use Production config unless specified
|
Use Production config unless specified
|
Python
|
mit
|
onyb/veritrans-payment-portals
|
ced218643784838d68961a926cc0dd18c3a3f01f
|
skald/geometry.py
|
skald/geometry.py
|
# -*- coding: utf-8 -*-
from collections import namedtuple
Size = namedtuple("Size", ["width", "height"])
Rectangle = namedtuple("Rectangle", ["x0", "y0", "x1", "y1"])
class Point(namedtuple("Point", ["x", "y"])):
"""Point in a two-dimensional space.
Named tuple implementation that allows for addition and subtraction.
"""
__slots__ = ()
def __add__(self, other):
x = self.x + other.x
y = self.y + other.y
return Point(x, y)
def __sub__(self, other):
x = self.x - other.x
y = self.y - other.y
return Point(x, y)
class Box(namedtuple("Box", ["point", "size"])):
__slots__ = ()
@property
def rectangle(self):
return Rectangle(
x0=self.point.x,
y0=self.point.y,
x1=self.point.x+self.size.width,
y1=self.point.y+self.size.height
)
|
# -*- coding: utf-8 -*-
from collections import namedtuple
Size = namedtuple("Size", ["width", "height"])
class Rectangle(namedtuple("Rectangle", ["x0", "y0", "x1", "y1"])):
def __contains__(self, other):
"""Check if this rectangle and `other` overlaps eachother.
Essentially this is a bit of a hack to be able to write
`rect1 in rect2`.
"""
if self.x0 < other.x0 and self.x1 > other.x1 and \
self.y0 < other.y0 and self.y1 > other.y1:
return True
return False
class Point(namedtuple("Point", ["x", "y"])):
"""Point in a two-dimensional space.
Named tuple implementation that allows for addition and subtraction.
"""
__slots__ = ()
def __add__(self, other):
x = self.x + other.x
y = self.y + other.y
return Point(x, y)
def __sub__(self, other):
x = self.x - other.x
y = self.y - other.y
return Point(x, y)
class Box(namedtuple("Box", ["point", "size"])):
__slots__ = ()
@property
def rectangle(self):
return Rectangle(
x0=self.point.x,
y0=self.point.y,
x1=self.point.x+self.size.width,
y1=self.point.y+self.size.height
)
|
Add intersection test for rectangles
|
Add intersection test for rectangles
|
Python
|
mit
|
bjornarg/skald,bjornarg/skald
|
8f03f51c89aeea44943f9cb0b39330e676ae0089
|
utils.py
|
utils.py
|
import vx
from contextlib import contextmanager
from functools import partial
import sys
from io import StringIO
def _expose(f=None, name=None):
if f is None:
return partial(_expose, name=name)
if name is None:
name = f.__name__.lstrip('_')
if getattr(vx, name, None) is not None:
raise AttributeError("Cannot expose duplicate name: '{}'".format(name))
setattr(vx, name, f)
return f
vx.expose = _expose
@vx.expose
def _repeat(c, times=4):
for _ in range(times):
c()
@vx.expose
@contextmanager
def _cursor_wander(command=None, window=None):
if window is None:
window = vx.window.focused_window
y, x = vx.get_linecol_window(window)
if command is not None:
command()
yp, xp = vx.get_linecol_window(window)
yield (yp, xp)
vx.set_linecol_window(window, y, x)
@contextmanager
def stdoutIO(stdout=None):
old = sys.stdout
if stdout is None:
stdout = StringIO()
sys.stdout = stdout
yield stdout
sys.stdout = old
|
import vx
from contextlib import contextmanager
from functools import partial
import sys
from io import StringIO
def _expose(f=None, name=None):
if f is None:
return partial(_expose, name=name)
if name is None:
name = f.__name__.lstrip('_')
if getattr(vx, name, None) is not None:
raise AttributeError("Cannot expose duplicate name: '{}'".format(name))
setattr(vx, name, f)
return f
vx.expose = _expose
@vx.expose
def _repeat(c, times=4):
res = []
for _ in range(times):
res.append(c())
return res
@vx.expose
@contextmanager
def _cursor_wander(command=None, window=None):
if window is None:
window = vx.window.focused_window
y, x = vx.get_linecol_window(window)
if command is not None:
command()
yp, xp = vx.get_linecol_window(window)
yield (yp, xp)
vx.set_linecol_window(window, y, x)
@contextmanager
def stdoutIO(stdout=None):
old = sys.stdout
if stdout is None:
stdout = StringIO()
sys.stdout = stdout
yield stdout
sys.stdout = old
|
Change repeat command to return a list of the results of the repeated commands
|
Change repeat command to return a list of the results of the repeated commands
|
Python
|
mit
|
philipdexter/vx,philipdexter/vx
|
822e6123cc598b4f6a0eafedfb2f0d0cbfba5f37
|
currencies/migrations/0003_auto_20151216_1906.py
|
currencies/migrations/0003_auto_20151216_1906.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from extra_countries.models import ExtraCountry
def add_currencies_with_countries(apps, schema_editor):
# We can't import the model directly as it may be a newer
# version than this migration expects. We use the historical version.
Currency = apps.get_model("currencies", "Currency")
for extra_country in ExtraCountry.objects.all():
print("seeding currency for county: %s" % extra_country.country.name)
# trying to find a currency with the same code first
try:
currency = Currency.objects.get(code=extra_country.country.currency)
except Currency.DoesNotExist: # no such currency yet
currency = Currency(code=extra_country.country.currency,
name=extra_country.country.currency_name)
currency.save()
currency.countries.add(extra_country.pk)
def reverse_data(apps, schema_editor):
Currency = apps.get_model("currencies", "Currency")
Currency.objects.all().delete()
class Migration(migrations.Migration):
dependencies = [
('currencies', '0002_currency_countries'),
]
operations = [
migrations.RunPython(add_currencies_with_countries, reverse_data)
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from extra_countries.models import ExtraCountry
def add_currencies_with_countries(apps, schema_editor):
# We can't import the model directly as it may be a newer
# version than this migration expects. We use the historical version.
Currency = apps.get_model("currencies", "Currency")
for extra_country in ExtraCountry.objects.all():
print("seeding currency for county: %s" % extra_country.country.name)
# trying to find a currency with the same code first
try:
currency = Currency.objects.get(code=extra_country.country.currency)
except Currency.DoesNotExist: # no such currency yet
currency = Currency(code=extra_country.country.currency,
name=extra_country.country.currency_name)
if (str(extra_country.country.currency) == '') or (str(extra_country.country.currency_name) == ''):
pass
else:
currency.save()
currency.countries.add(extra_country.pk)
def reverse_data(apps, schema_editor):
Currency = apps.get_model("currencies", "Currency")
Currency.objects.all().delete()
class Migration(migrations.Migration):
dependencies = [
('currencies', '0002_currency_countries'),
]
operations = [
migrations.RunPython(add_currencies_with_countries, reverse_data)
]
|
Fix currencies seeding, so it won't have empty currencies
|
Fix currencies seeding, so it won't have empty currencies
|
Python
|
mit
|
openspending/cosmopolitan,kiote/cosmopolitan
|
041b271baa7ae0bbd20c30ac4f70b42fda267e93
|
mozillians/groups/__init__.py
|
mozillians/groups/__init__.py
|
from django.apps import AppConfig
CIS_GROUPS = [
'cis_whitelist',
'nda'
]
default_app_config = 'mozillians.groups.GroupConfig'
class GroupConfig(AppConfig):
name = 'mozillians.groups'
|
from django.apps import AppConfig
CIS_GROUPS = [
'cis_whitelist',
'nda',
'open-innovation-reps-council'
]
default_app_config = 'mozillians.groups.GroupConfig'
class GroupConfig(AppConfig):
name = 'mozillians.groups'
|
Add a group in the whitelist.
|
Add a group in the whitelist.
|
Python
|
bsd-3-clause
|
mozilla/mozillians,akatsoulas/mozillians,mozilla/mozillians,johngian/mozillians,mozilla/mozillians,mozilla/mozillians,akatsoulas/mozillians,akatsoulas/mozillians,johngian/mozillians,johngian/mozillians,johngian/mozillians,akatsoulas/mozillians
|
199c9bae8e2ad42ee1c8699c678dd56d6074b2de
|
main/models.py
|
main/models.py
|
from django.db import models
from django.contrib.auth.models import User
import string, random
from django import forms
# Create your models here.
def _generate_default_hashtag():
return "".join(random.choice(string.lowercase) for i in range(3))
class Wall(models.Model):
hashtag = models.CharField(max_length=20, help_text='Twitter hashtag to tweet to', default=_generate_default_hashtag())
user = models.ForeignKey(User, editable=False)
sms_keyword = models.CharField(max_length=20)
def __unicode__(self):
return self.name
TRAFFIC_SOURCE = (
('BG', 'Blog'),
('FR', 'Friend'),
('OT', 'Other',)
)
|
from django.db import models
from django.contrib.auth.models import User
import string, random
from django import forms
# Create your models here.
def _generate_default_hashtag():
return "".join(random.choice(string.lowercase) for i in range(3))
class Wall(models.Model):
hashtag = models.CharField(max_length=20, help_text='Twitter hashtag to tweet to', default=_generate_default_hashtag())
user = models.ForeignKey(User, editable=False)
sms_keyword = models.CharField(max_length=20)
def __unicode__(self):
return self.sms_keyword
TRAFFIC_SOURCE = (
('BG', 'Blog'),
('FR', 'Friend'),
('OT', 'Other',)
)
|
Return sms_keyword as wall name
|
Return sms_keyword as wall name
|
Python
|
mit
|
Aaron1011/texting_wall
|
b6ec3ba9efae7b6b291391b0333e80f2e9fc6fa0
|
src/waldur_mastermind/invoices/migrations/0053_invoiceitem_uuid.py
|
src/waldur_mastermind/invoices/migrations/0053_invoiceitem_uuid.py
|
import uuid
from django.db import migrations
import waldur_core.core.fields
def gen_uuid(apps, schema_editor):
InvoiceItem = apps.get_model('invoices', 'InvoiceItem')
for row in InvoiceItem.objects.all():
row.uuid = uuid.uuid4().hex
row.save(update_fields=['uuid'])
class Migration(migrations.Migration):
dependencies = [
('invoices', '0052_delete_servicedowntime'),
]
operations = [
migrations.AddField(
model_name='invoiceitem',
name='uuid',
field=waldur_core.core.fields.UUIDField(null=True),
),
migrations.RunPython(gen_uuid, elidable=True),
migrations.AlterField(
model_name='invoiceitem',
name='uuid',
field=waldur_core.core.fields.UUIDField(),
),
]
|
import uuid
from django.db import migrations, models
import waldur_core.core.fields
def gen_uuid(apps, schema_editor):
InvoiceItem = apps.get_model('invoices', 'InvoiceItem')
for row in InvoiceItem.objects.all():
row.uuid = uuid.uuid4().hex
row.save(update_fields=['uuid'])
class Migration(migrations.Migration):
dependencies = [
('invoices', '0052_delete_servicedowntime'),
]
operations = [
migrations.AddField(
model_name='invoiceitem', name='uuid', field=models.UUIDField(null=True),
),
migrations.RunPython(gen_uuid, elidable=True),
migrations.AlterField(
model_name='invoiceitem',
name='uuid',
field=waldur_core.core.fields.UUIDField(),
),
]
|
Fix database migration script for UUID field in invoice item model.
|
Fix database migration script for UUID field in invoice item model.
|
Python
|
mit
|
opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/waldur-mastermind
|
9b19d366c7e1cf41ffc6af4eaed789995ddc5cc2
|
byceps/blueprints/core_admin/views.py
|
byceps/blueprints/core_admin/views.py
|
"""
byceps.blueprints.core_admin.views
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2018 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from ...services.brand import service as brand_service
from ...util.framework.blueprint import create_blueprint
from ..authorization.registry import permission_registry
from .authorization import AdminPermission
blueprint = create_blueprint('core_admin', __name__)
permission_registry.register_enum(AdminPermission)
@blueprint.app_context_processor
def inject_brands():
brands = brand_service.get_brands()
return {
'all_brands': brands,
}
|
"""
byceps.blueprints.core_admin.views
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2018 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from ...services.brand import service as brand_service
from ...util.framework.blueprint import create_blueprint
from ..authorization.registry import permission_registry
from .authorization import AdminPermission
blueprint = create_blueprint('core_admin', __name__)
permission_registry.register_enum(AdminPermission)
@blueprint.app_context_processor
def inject_template_variables():
brands = brand_service.get_brands()
return {
'all_brands': brands,
}
|
Generalize name of function to inject admin template variables
|
Generalize name of function to inject admin template variables
|
Python
|
bsd-3-clause
|
homeworkprod/byceps,m-ober/byceps,m-ober/byceps,homeworkprod/byceps,homeworkprod/byceps,m-ober/byceps
|
6ac683ca1905fbf17dd63c1264609e770439fa7f
|
test/integration/targets/module_utils/library/test_env_override.py
|
test/integration/targets/module_utils/library/test_env_override.py
|
#!/usr/bin/python
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.json_utils import data
from ansible.module_utils.mork import data as mork_data
results = {"json_utils": data, "mork": mork_data}
AnsibleModule(argument_spec=dict()).exit_json(**results)
|
#!/usr/bin/python
# Most of these names are only available via PluginLoader so pylint doesn't
# know they exist
# pylint: disable=no-name-in-module
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.json_utils import data
from ansible.module_utils.mork import data as mork_data
results = {"json_utils": data, "mork": mork_data}
AnsibleModule(argument_spec=dict()).exit_json(**results)
|
Disable pylint check for names existing in modules for test data
|
Disable pylint check for names existing in modules for test data
This test data imports from modules which are only available via
PluginLoader for this test case. So pylint doesn't know anything about
them
|
Python
|
mit
|
thaim/ansible,thaim/ansible
|
7872abf00b24a504fccba576b13ecdd140e0135f
|
pybb/read_tracking.py
|
pybb/read_tracking.py
|
def update_read_tracking(topic, user):
tracking = user.readtracking
#if last_read > last_read - don't check topics
if tracking.last_read and tracking.last_read > (topic.last_post.updated or
topic.last_post.created):
return
if isinstance(tracking.topics, dict):
#clear topics if len > 5Kb and set last_read to current time
if len(tracking.topics) > 5120:
tracking.topics = None
tracking.last_read = datetime.now()
tracking.save()
#update topics if exist new post or does't exist in dict
if topic.last_post.pk > tracking.topics.get(str(topic.pk), 0):
tracking.topics.setdefault(str(topic.pk), topic.last_post.pk)
tracking.save()
else:
#initialize topic tracking dict
tracking.topics = {topic.pk: topic.last_post.pk}
tracking.save()
|
def update_read_tracking(topic, user):
tracking = user.readtracking
#if last_read > last_read - don't check topics
if tracking.last_read and tracking.last_read > (topic.last_post.updated or
topic.last_post.created):
return
if isinstance(tracking.topics, dict):
#clear topics if len > 5Kb and set last_read to current time
if len(tracking.topics) > 5120:
tracking.topics = None
tracking.last_read = datetime.now()
tracking.save()
#update topics if new post exists or cache entry is empty
if topic.last_post.pk > tracking.topics.get(str(topic.pk), 0):
tracking.topics[str(topic.pk)] = topic.last_post.pk
tracking.save()
else:
#initialize topic tracking dict
tracking.topics = {topic.pk: topic.last_post.pk}
tracking.save()
|
Fix bug in read tracking system
|
Fix bug in read tracking system
|
Python
|
bsd-2-clause
|
ttyS15/pybbm,onecue/pybbm,katsko/pybbm,katsko/pybbm,wengole/pybbm,wengole/pybbm,webu/pybbm,acamposruiz/quecoins,springmerchant/pybbm,NEERAJIITKGP/pybbm,webu/pybbm,concentricsky/pybbm,skolsuper/pybbm,hovel/pybbm,NEERAJIITKGP/pybbm,hovel/pybbm,webu/pybbm,artfinder/pybbm,onecue/pybbm,katsko/pybbm,ttyS15/pybbm,wengole/pybbm,jonsimington/pybbm,jonsimington/pybbm,ttyS15/pybbm,skolsuper/pybbm,just-work/pybbm,zekone/dj_pybb,onecue/pybbm,hovel/pybbm,just-work/pybbm,acamposruiz/quecoins,acamposruiz/quecoins,springmerchant/pybbm,springmerchant/pybbm,skolsuper/pybbm,artfinder/pybbm,zekone/dj_pybb,concentricsky/pybbm,DylannCordel/pybbm,just-work/pybbm,zekone/dj_pybb,DylannCordel/pybbm,jonsimington/pybbm,artfinder/pybbm,concentricsky/pybbm,DylannCordel/pybbm,NEERAJIITKGP/pybbm
|
346ffdb3e3836e2931f838a6dd929a325da0d5e6
|
tests/test_arithmetic.py
|
tests/test_arithmetic.py
|
from intervals import Interval
class TestArithmeticOperators(object):
def test_add_operator(self):
assert Interval(1, 2) + Interval(1, 2) == Interval(2, 4)
def test_sub_operator(self):
assert Interval(1, 3) - Interval(1, 2) == Interval(-1, 2)
def test_isub_operator(self):
range_ = Interval(1, 3)
range_ -= Interval(1, 2)
assert range_ == Interval(-1, 2)
def test_iadd_operator(self):
range_ = Interval(1, 2)
range_ += Interval(1, 2)
assert range_ == Interval(2, 4)
|
from pytest import mark
from intervals import Interval
class TestArithmeticOperators(object):
def test_add_operator(self):
assert Interval(1, 2) + Interval(1, 2) == Interval(2, 4)
def test_sub_operator(self):
assert Interval(1, 3) - Interval(1, 2) == Interval(-1, 2)
def test_isub_operator(self):
range_ = Interval(1, 3)
range_ -= Interval(1, 2)
assert range_ == Interval(-1, 2)
def test_iadd_operator(self):
range_ = Interval(1, 2)
range_ += Interval(1, 2)
assert range_ == Interval(2, 4)
@mark.parametrize(('first', 'second', 'intersection'), (
('[1, 5]', '[2, 9]', '[2, 5]'),
('[3, 4]', '[3, 9]', '[3, 4]'),
('(3, 6]', '[2, 6)', '(3, 6)')
))
def test_intersection(self, first, second, intersection):
Interval(first) & Interval(second) == Interval(intersection)
|
Add some tests for intersection
|
Add some tests for intersection
|
Python
|
bsd-3-clause
|
kvesteri/intervals
|
4cfd8771b91c7c2b9f28ca4b9776e9770683093b
|
frigg/builds/admin.py
|
frigg/builds/admin.py
|
# -*- coding: utf8 -*-
from django.contrib import admin
from .models import Build, BuildResult, Project
class BuildResultInline(admin.StackedInline):
model = BuildResult
readonly_fields = ('result_log', 'succeeded', 'return_code')
extra = 0
max_num = 0
class BuildInline(admin.TabularInline):
model = Build
readonly_fields = ('build_number', 'branch', 'color', 'pull_request_id', 'sha')
extra = 0
max_num = 0
@admin.register(Project)
class ProjectAdmin(admin.ModelAdmin):
list_display = ('__str__', 'git_repository', 'average_time', 'last_build_number')
inlines = [BuildInline]
list_filter = ['owner']
@admin.register(Build)
class BuildAdmin(admin.ModelAdmin):
list_display = ('build_number', 'project', 'branch', 'pull_request_id', 'sha', 'color')
inlines = [BuildResultInline]
list_filter = ['project']
@admin.register(BuildResult)
class BuildResultAdmin(admin.ModelAdmin):
list_display = ('__str__', 'succeeded', 'return_code', 'coverage')
|
# -*- coding: utf8 -*-
from django.contrib import admin
from django.template.defaultfilters import pluralize
from .models import Build, BuildResult, Project
class BuildResultInline(admin.StackedInline):
model = BuildResult
readonly_fields = ('result_log', 'succeeded', 'return_code')
extra = 0
max_num = 0
class BuildInline(admin.TabularInline):
model = Build
readonly_fields = ('build_number', 'branch', 'color', 'pull_request_id', 'sha')
extra = 0
max_num = 0
@admin.register(Project)
class ProjectAdmin(admin.ModelAdmin):
list_display = ('__str__', 'git_repository', 'average_time', 'last_build_number')
inlines = [BuildInline]
list_filter = ['owner']
@admin.register(Build)
class BuildAdmin(admin.ModelAdmin):
list_display = ('build_number', 'project', 'branch', 'pull_request_id', 'sha', 'color')
inlines = [BuildResultInline]
list_filter = ['project']
actions = ['restart_build']
def restart_build(self, request, queryset):
for build in queryset:
build.start()
self.message_user(
request,
'{} build{} was restarted'.format(len(queryset), pluralize(len(queryset)))
)
restart_build.short_description = 'Restart selected builds'
@admin.register(BuildResult)
class BuildResultAdmin(admin.ModelAdmin):
list_display = ('__str__', 'succeeded', 'return_code', 'coverage')
|
Add restart_build action to BuildAdmin
|
Add restart_build action to BuildAdmin
|
Python
|
mit
|
frigg/frigg-hq,frigg/frigg-hq,frigg/frigg-hq
|
b1b1392d2f268a5c74fd21c826a3ea6387567cab
|
froide/bounce/apps.py
|
froide/bounce/apps.py
|
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class BounceConfig(AppConfig):
name = 'froide.bounce'
verbose_name = _('Bounce')
def ready(self):
from froide.account import account_canceled
account_canceled.connect(cancel_user)
def cancel_user(sender, user=None, **kwargs):
from .models import Bounce
if user is None:
return
Bounce.objects.filter(user=user).delete()
|
import json
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class BounceConfig(AppConfig):
name = 'froide.bounce'
verbose_name = _('Bounce')
def ready(self):
from froide.account import account_canceled
from froide.account.export import registry
account_canceled.connect(cancel_user)
registry.register(export_user_data)
def cancel_user(sender, user=None, **kwargs):
from .models import Bounce
if user is None:
return
Bounce.objects.filter(user=user).delete()
def export_user_data(user):
from .models import Bounce
bounces = Bounce.objects.filter(user=user)
if not bounces:
return
yield ('bounces.json', json.dumps([
{
'last_update': (
b.last_update.isoformat() if b.last_update else None
),
'bounces': b.bounces,
'email': b.email,
}
for b in bounces]).encode('utf-8')
)
|
Add user data export for bounce handling
|
Add user data export for bounce handling
|
Python
|
mit
|
fin/froide,fin/froide,stefanw/froide,stefanw/froide,fin/froide,stefanw/froide,stefanw/froide,fin/froide,stefanw/froide
|
f9d7f69d7e8ae1dceaba09ac4412438076261744
|
tests/test_completion.py
|
tests/test_completion.py
|
import os
import subprocess
import sys
from pathlib import Path
import typer
from typer.testing import CliRunner
from first_steps import tutorial001 as mod
runner = CliRunner()
app = typer.Typer()
app.command()(mod.main)
def test_show_completion():
result = subprocess.run(
[
"bash",
"-c",
f"{sys.executable} -m coverage run {mod.__file__} --show-completion",
],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
encoding="utf-8",
env={**os.environ, "SHELL": "/bin/bash"},
)
assert "_TUTORIAL001.PY_COMPLETE=complete-bash" in result.stdout
def test_install_completion():
bash_completion_path: Path = Path.home() / ".bash_completion"
text = ""
if bash_completion_path.is_file():
text = bash_completion_path.read_text()
result = subprocess.run(
[
"bash",
"-c",
f"{sys.executable} -m coverage run {mod.__file__} --install-completion",
],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
encoding="utf-8",
env={**os.environ, "SHELL": "/bin/bash"},
)
new_text = bash_completion_path.read_text()
assert "_TUTORIAL001.PY_COMPLETE=complete-bash" in new_text
bash_completion_path.write_text(text)
|
import os
import subprocess
import sys
from pathlib import Path
import typer
from typer.testing import CliRunner
from first_steps import tutorial001 as mod
runner = CliRunner()
app = typer.Typer()
app.command()(mod.main)
def test_show_completion():
result = subprocess.run(
[
"bash",
"-c",
f"{sys.executable} -m coverage run {mod.__file__} --show-completion",
],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
encoding="utf-8",
env={**os.environ, "SHELL": "/bin/bash"},
)
assert "_TUTORIAL001.PY_COMPLETE=complete-bash" in result.stdout
def test_install_completion():
bash_completion_path: Path = Path.home() / ".bash_completion"
text = ""
if bash_completion_path.is_file():
text = bash_completion_path.read_text()
result = subprocess.run(
[
"bash",
"-c",
f"{sys.executable} -m coverage run {mod.__file__} --install-completion",
],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
encoding="utf-8",
env={**os.environ, "SHELL": "/bin/bash"},
)
new_text = bash_completion_path.read_text()
bash_completion_path.write_text(text)
assert "_TUTORIAL001.PY_COMPLETE=complete-bash" in new_text
assert "completion installed in" in result.stdout
assert "Completion will take effect once you restart the terminal." in result.stdout
|
Update completion tests, checking for printed message
|
:white_check_mark: Update completion tests, checking for printed message
|
Python
|
mit
|
tiangolo/typer,tiangolo/typer
|
83ea38ee5616b1637cc2d983d4518d83793c7b72
|
lint/events.py
|
lint/events.py
|
from collections import defaultdict
import traceback
LINT_START = 'LINT_START'
LINT_RESULT = 'LINT_RESULT'
LINT_END = 'LINT_END'
listeners = defaultdict(set)
def subscribe(topic, fn):
listeners[topic].add(fn)
def unsubscribe(topic, fn):
try:
listeners[topic].remove(fn)
except KeyError:
pass
def broadcast(topic, message=None):
payload = message.copy() or {}
for fn in listeners.get(topic, []):
try:
fn(**payload)
except Exception:
traceback.print_exc()
map_fn_to_topic = {}
def on(topic):
def inner(fn):
subscribe(topic, fn)
map_fn_to_topic[fn] = topic
return fn
return inner
def off(fn):
topic = map_fn_to_topic.get(fn, None)
if topic:
unsubscribe(topic, fn)
|
from collections import defaultdict
import traceback
LINT_START = 'LINT_START' # (buffer_id)
LINT_RESULT = 'LINT_RESULT' # (buffer_id, linter_name, errors)
LINT_END = 'LINT_END' # (buffer_id)
listeners = defaultdict(set)
def subscribe(topic, fn):
listeners[topic].add(fn)
def unsubscribe(topic, fn):
try:
listeners[topic].remove(fn)
except KeyError:
pass
def broadcast(topic, message=None):
payload = message.copy() or {}
for fn in listeners.get(topic, []):
try:
fn(**payload)
except Exception:
traceback.print_exc()
map_fn_to_topic = {}
def on(topic):
def inner(fn):
subscribe(topic, fn)
map_fn_to_topic[fn] = topic
return fn
return inner
def off(fn):
topic = map_fn_to_topic.get(fn, None)
if topic:
unsubscribe(topic, fn)
|
Add very brief comments about the event types
|
Add very brief comments about the event types
|
Python
|
mit
|
SublimeLinter/SublimeLinter3,SublimeLinter/SublimeLinter3
|
4286d2d6a685571c70a8f48c3cd6802d13c4acef
|
braid/postgres.py
|
braid/postgres.py
|
from fabric.api import sudo, quiet
from braid import package
from pipes import quote
def install():
package.install(['postgresql-9.1', 'postgresql-server-dev-9.1'])
def _runQuery(query):
with quiet():
return sudo('psql --no-align --no-readline --no-password --quiet '
'--tuples-only -c {}'.format(quote(query)),
user='postgres', pty=False, combine_stderr=False)
def _dbExists(name):
res = _runQuery("select count(*) from pg_database "
"where datname = '{}';".format(name))
return res == '1'
def _userExists(name):
res = _runQuery("select count(*) from pg_user "
"where usename = '{}';".format(name))
return res == '1'
def createUser(name):
if not _userExists(name):
sudo('createuser -D -R -S {}'.format(name), user='postgres', pty=False)
def createDb(name, owner):
if not _dbExists(name):
sudo('createdb -O {} {}'.format(owner, name), user='postgres',
pty=False)
def grantRead(user, database):
"""
Grant read permissions to C{user} to all tables in C{database}.
"""
def grantReadWrite(user, database):
"""
Grant read and write permissions to C{user} to all tables in C{database}.
"""
|
from fabric.api import sudo, hide
from braid import package
from pipes import quote
def install():
package.install(['postgresql-9.1', 'postgresql-server-dev-9.1'])
def _runQuery(query):
with hide('running', 'output'):
return sudo('psql --no-align --no-readline --no-password --quiet '
'--tuples-only -c {}'.format(quote(query)),
user='postgres', pty=False, combine_stderr=False)
def _dbExists(name):
res = _runQuery("select count(*) from pg_database "
"where datname = '{}';".format(name))
return res == '1'
def _userExists(name):
res = _runQuery("select count(*) from pg_user "
"where usename = '{}';".format(name))
return res == '1'
def createUser(name):
if not _userExists(name):
sudo('createuser -D -R -S {}'.format(name), user='postgres', pty=False)
def createDb(name, owner):
if not _dbExists(name):
sudo('createdb -O {} {}'.format(owner, name), user='postgres',
pty=False)
def grantRead(user, database):
"""
Grant read permissions to C{user} to all tables in C{database}.
"""
def grantReadWrite(user, database):
"""
Grant read and write permissions to C{user} to all tables in C{database}.
"""
|
Make _runQuery to fail if the query fails, but still hide the execution messages
|
Make _runQuery to fail if the query fails, but still hide the execution messages
|
Python
|
mit
|
alex/braid,alex/braid
|
92d253fdce108162ab2ce05dd38da971ca42293d
|
keystone/contrib/kds/common/service.py
|
keystone/contrib/kds/common/service.py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo.config import cfg
from keystone.openstack.common import log
CONF = cfg.CONF
API_SERVICE_OPTS = [
cfg.StrOpt('bind_ip',
default='0.0.0.0',
help='IP for the server to bind to'),
cfg.IntOpt('port',
default=9109,
help='The port for the server'),
]
CONF.register_opts(API_SERVICE_OPTS)
def parse_args(args, default_config_files=None):
CONF(args=args[1:],
project='kds',
default_config_files=default_config_files)
def prepare_service(argv=[]):
cfg.set_defaults(log.log_opts,
default_log_levels=['sqlalchemy=WARN',
'eventlet.wsgi.server=WARN'
])
parse_args(argv)
log.setup('kds')
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo.config import cfg
from keystone.openstack.common import log
CONF = cfg.CONF
FILE_OPTIONS = {
None: [
cfg.StrOpt('bind_ip',
default='0.0.0.0',
help='IP for the server to bind to'),
cfg.IntOpt('port',
default=9109,
help='The port for the server')]}
def configure(conf=None):
if conf is None:
conf = CONF
for group in FILE_OPTIONS:
conf.register_opts(FILE_OPTIONS[group], group=group)
def parse_args(args, default_config_files=None):
CONF(args=args[1:],
project='kds',
default_config_files=default_config_files)
def prepare_service(argv=[]):
cfg.set_defaults(log.log_opts,
default_log_levels=['sqlalchemy=WARN',
'eventlet.wsgi.server=WARN'
])
parse_args(argv)
log.setup('kds')
configure()
|
Restructure KDS options to be more like Keystone's options
|
Restructure KDS options to be more like Keystone's options
Restructure the KDS options to be more closely aligned with the way
Keystone options work and allowing movement towards not registering
the options on import. This will also prevent KDS options from
appearing in the Keystone auto-generated sample config.
Change-Id: I073aa58ff3132e2714478f54c88c3a8200ff47da
|
Python
|
apache-2.0
|
rushiagr/keystone,jumpstarter-io/keystone,reeshupatel/demo,dstanek/keystone,idjaw/keystone,jonnary/keystone,vivekdhayaal/keystone,MaheshIBM/keystone,klmitch/keystone,rajalokan/keystone,rajalokan/keystone,nuxeh/keystone,ging/keystone,rushiagr/keystone,takeshineshiro/keystone,ilay09/keystone,nuxeh/keystone,roopali8/keystone,JioCloud/keystone,himanshu-setia/keystone,blueboxgroup/keystone,dims/keystone,vivekdhayaal/keystone,jamielennox/keystone,maestro-hybrid-cloud/keystone,idjaw/keystone,ilay09/keystone,maestro-hybrid-cloud/keystone,ajayaa/keystone,mahak/keystone,mahak/keystone,blueboxgroup/keystone,jamielennox/keystone,cernops/keystone,openstack/keystone,ging/keystone,rushiagr/keystone,rodrigods/keystone,JioCloud/keystone,openstack/keystone,jonnary/keystone,klmitch/keystone,reeshupatel/demo,takeshineshiro/keystone,dstanek/keystone,MaheshIBM/keystone,UTSA-ICS/keystone-kerberos,ajayaa/keystone,dstanek/keystone,rodrigods/keystone,nuxeh/keystone,vivekdhayaal/keystone,rajalokan/keystone,promptworks/keystone,cernops/keystone,jumpstarter-io/keystone,openstack/keystone,mahak/keystone,himanshu-setia/keystone,roopali8/keystone,ilay09/keystone,dims/keystone,reeshupatel/demo,promptworks/keystone,jumpstarter-io/keystone,UTSA-ICS/keystone-kerberos,promptworks/keystone
|
eaa13f9005a8aaf8c748a98de697b03eee9e675b
|
salt/client/netapi.py
|
salt/client/netapi.py
|
# encoding: utf-8
'''
The main entry point for salt-api
'''
from __future__ import absolute_import
# Import python libs
import logging
# Import salt-api libs
import salt.loader
import salt.utils.process
logger = logging.getLogger(__name__)
class NetapiClient(object):
'''
Start each netapi module that is configured to run
'''
def __init__(self, opts):
self.opts = opts
self.process_manager = salt.utils.process.ProcessManager()
self.netapi = salt.loader.netapi(self.opts)
def run(self):
'''
Load and start all available api modules
'''
for fun in self.netapi:
if fun.endswith('.start'):
logger.info('Starting {0} netapi module'.format(fun))
self.process_manager.add_process(self.netapi[fun])
self.process_manager.run()
|
# encoding: utf-8
'''
The main entry point for salt-api
'''
from __future__ import absolute_import
# Import python libs
import logging
# Import salt-api libs
import salt.loader
import salt.utils.process
logger = logging.getLogger(__name__)
class NetapiClient(object):
'''
Start each netapi module that is configured to run
'''
def __init__(self, opts):
self.opts = opts
self.process_manager = salt.utils.process.ProcessManager()
self.netapi = salt.loader.netapi(self.opts)
def run(self):
'''
Load and start all available api modules
'''
if not len(self.netapi):
logger.error("Did not find any netapi configurations, nothing to start")
for fun in self.netapi:
if fun.endswith('.start'):
logger.info('Starting {0} netapi module'.format(fun))
self.process_manager.add_process(self.netapi[fun])
self.process_manager.run()
|
Add log error if we run salt-api w/ no config
|
Add log error if we run salt-api w/ no config
Currently, the salt-api script will exit with no error or hint of why it
failed if there is no netapi module configured. Added a short line if
we find no api modules to start, warning the user that the config may be
missing.
Fixes #28240
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
0f1ed52e7525ea5f41d63642bca1eaeb9d5af8ba
|
emission/core/wrapper/labelprediction.py
|
emission/core/wrapper/labelprediction.py
|
# Based on modeprediction.py
import emission.core.wrapper.wrapperbase as ecwb
class Labelprediction(ecwb.WrapperBase):
props = {"trip_id": ecwb.WrapperBase.Access.WORM, # the trip that this is part of
"prediction": ecwb.WrapperBase.Access.WORM, # What we predict
"start_ts": ecwb.WrapperBase.Access.WORM, # start time for the prediction, so that it can be captured in time-based queries, e.g. to reset the pipeline
"end_ts": ecwb.WrapperBase.Access.WORM, # end time for the prediction, so that it can be captured in time-based queries, e.g. to reset the pipeline
}
enums = {}
geojson = {}
local_dates = {}
def _populateDependencies(self):
pass
|
# Based on modeprediction.py
import emission.core.wrapper.wrapperbase as ecwb
# The "prediction" data structure is a list of label possibilities, each one consisting of a set of labels and a probability:
# [
# {"labels": {"labeltype1": "labelvalue1", "labeltype2": "labelvalue2"}, "p": 0.61},
# {"labels": {"labeltype1": "labelvalue3", "labeltype2": "labelvalue4"}, "p": 0.27},
# ...
# ]
class Labelprediction(ecwb.WrapperBase):
props = {"trip_id": ecwb.WrapperBase.Access.WORM, # the trip that this is part of
"prediction": ecwb.WrapperBase.Access.WORM, # What we predict -- see above
"start_ts": ecwb.WrapperBase.Access.WORM, # start time for the prediction, so that it can be captured in time-based queries, e.g. to reset the pipeline
"end_ts": ecwb.WrapperBase.Access.WORM, # end time for the prediction, so that it can be captured in time-based queries, e.g. to reset the pipeline
}
enums = {}
geojson = {}
local_dates = {}
def _populateDependencies(self):
pass
|
Add comments explaining prediction data structure
|
Add comments explaining prediction data structure
|
Python
|
bsd-3-clause
|
shankari/e-mission-server,shankari/e-mission-server,shankari/e-mission-server,shankari/e-mission-server,e-mission/e-mission-server,e-mission/e-mission-server,e-mission/e-mission-server,e-mission/e-mission-server
|
4e483d6443e809f9e7e1a59c3fe959fd5f42f938
|
simple-cipher/simple_cipher.py
|
simple-cipher/simple_cipher.py
|
import math
import secrets
from string import ascii_lowercase
class Cipher(object):
def __init__(self, key=None):
if not key:
key = Cipher._random_key()
if not key.isalpha() or not key.islower():
raise ValueError("Key must consist only of lowercase letters")
self.key = key
self._key = [ord(k)-97 for k in key]
def encode(self, s):
key = self._key * math.ceil(len(s)/len(self._key))
chars = [c for c in s.lower() if c in ascii_lowercase]
return "".join(Cipher._shift(c, k) for c, k in zip(chars, key))
def decode(self, s):
key = self._key * math.ceil(len(s)/len(self._key))
chars = [c for c in s.lower() if c in ascii_lowercase]
return "".join(Cipher._shift(c, -k) for c, k in zip(chars, key))
@staticmethod
def _shift(char, key):
return chr(97 + ((ord(char) - 97 + key) % 26))
@staticmethod
def _random_key(length=256):
return "".join(secrets.choice(ascii_lowercase) for _ in range(length))
class Caesar(Cipher):
def __init__(self):
Cipher.__init__(self, "d")
|
import math
import secrets
from string import ascii_lowercase
class Cipher(object):
def __init__(self, key=None):
if not key:
key = self._random_key()
if not key.isalpha() or not key.islower():
raise ValueError("Key must consist only of lowercase letters")
self.key = key
self._key = [ord(k)-97 for k in key]
def encode(self, s):
key = self._key * math.ceil(len(s)/len(self._key))
chars = [c for c in s.lower() if c in ascii_lowercase]
return "".join(self._shift(c, k) for c, k in zip(chars, key))
def decode(self, s):
key = self._key * math.ceil(len(s)/len(self._key))
chars = [c for c in s.lower() if c in ascii_lowercase]
return "".join(self._shift(c, -k) for c, k in zip(chars, key))
@staticmethod
def _shift(char, key):
return chr(97 + ((ord(char) - 97 + key) % 26))
@staticmethod
def _random_key(length=256):
return "".join(secrets.choice(ascii_lowercase) for _ in range(length))
class Caesar(Cipher):
def __init__(self):
super().__init__("d")
|
Use super() and self within the Cipher and Caesar classes
|
Use super() and self within the Cipher and Caesar classes
|
Python
|
agpl-3.0
|
CubicComet/exercism-python-solutions
|
5ca96beb26dd2ab5285a57f5cade6f01160df368
|
joequery/blog/posts/code/notes-on-dynamic-programming-part-1/meta.py
|
joequery/blog/posts/code/notes-on-dynamic-programming-part-1/meta.py
|
title="Notes on dynamic programming - part 1"
description="""
Part 1 of extensive notes discussing the fundamentals of dynamic programming.
Examples in these notes include the Fibonacci sequence and Warshall's
algorithm. Pseudocode and Python implementations of the algorithms are
provided.
"""
time="2012-12-10 Mon 02:28 AM"
# related=[("Some article", "its/url")]
|
title="Notes on dynamic programming - part 1"
description="""
Part 1 of extensive notes discussing the fundamentals of dynamic programming.
Examples in these notes include the Fibonacci sequence, the Binomial Formula,
and Warshall's algorithm. Python implementations of the algorithms are
provided.
"""
time="2012-12-10 Mon 02:48 AM"
# related=[("Some article", "its/url")]
|
Update description and timestamp for dynamic programming part 1
|
Update description and timestamp for dynamic programming part 1
|
Python
|
mit
|
joequery/joequery.me,joequery/joequery.me,joequery/joequery.me,joequery/joequery.me
|
c04b9813b5d6d3f8bc8eaa7be2d49d32f150aaf2
|
tests/test_authentication.py
|
tests/test_authentication.py
|
import unittest
from flask import json
from api import db
from api.BucketListAPI import app
from instance.config import application_config
class AuthenticationTestCase(unittest.TestCase):
def setUp(self):
app.config.from_object(application_config['TestingEnv'])
self.client = app.test_client()
# Binds the app to current context
with app.app_context():
# Create all tables
db.create_all()
def test_index_route(self):
response = self.client.get('/')
self.assertEqual(response.status_code, 201)
self.assertIn('Welcome Message', response.data.decode())
def tearDown(self):
# Drop all tables
with app.app_context():
# Drop all tables
db.session.remove()
db.drop_all()
if __name__ == '__main__':
unittest.main()
|
import unittest
from flask import json
from api import db
from api.BucketListAPI import app
from instance.config import application_config
class AuthenticationTestCase(unittest.TestCase):
def setUp(self):
app.config.from_object(application_config['TestingEnv'])
self.client = app.test_client()
# Binds the app to current context
with app.app_context():
# Create all tables
db.create_all()
def test_index_route(self):
response = self.client.get('/')
self.assertEqual(response.status_code, 201)
self.assertIn('Welcome Message', response.data.decode())
def test_registration_with_missing_dredentials(self):
"""Should throw error for missing credentials"""
user = json.dumps({
'name': '',
'email': '',
'password': ''
})
response = self.client.post('/auth/register', data=user)
self.assertEqual(response.status_code, 400)
self.assertIn('Missing', response.data.decode())
def tearDown(self):
# Drop all tables
with app.app_context():
# Drop all tables
db.session.remove()
db.drop_all()
if __name__ == '__main__':
unittest.main()
|
Add test for user with missing credentials
|
Add test for user with missing credentials
|
Python
|
mit
|
patlub/BucketListAPI,patlub/BucketListAPI
|
47f1b47f37da4f9a3444a2ac6cc7b7a0affafbf3
|
node_bridge.py
|
node_bridge.py
|
import os
import platform
import subprocess
IS_MACOS = platform.system() == 'Darwin'
IS_WINDOWS = platform.system() == 'Windows'
def node_bridge(data, bin, args=[]):
env = None
startupinfo = None
if IS_MACOS:
# GUI apps on macOS doesn't contain .bashrc/.zshrc set paths
env = os.environ.copy()
env['PATH'] += ':/usr/local/bin'
if IS_WINDOWS:
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
try:
p = subprocess.Popen(['node', bin] + args,
stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE,
env=env, startupinfo=startupinfo)
except OSError:
raise Exception('Couldn\'t find Node.js. Make sure it\'s in your $PATH by running `node -v` in your command-line.')
stdout, stderr = p.communicate(input=data.encode('utf-8'))
stdout = stdout.decode('utf-8')
stderr = stderr.decode('utf-8')
if stderr:
raise Exception('Error: %s' % stderr)
else:
return stdout
|
import os
import platform
import subprocess
IS_MACOS = platform.system() == 'Darwin'
IS_WINDOWS = platform.system() == 'Windows'
def node_bridge(data, bin, args=[]):
env = None
startupinfo = None
if IS_MACOS:
# GUI apps on macOS doesn't contain .bashrc/.zshrc set paths
env = os.environ.copy()
env['PATH'] += os.path.expanduser('~/n/bin')
env['PATH'] += ':/usr/local/bin'
if IS_WINDOWS:
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
try:
p = subprocess.Popen(['node', bin] + args,
stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE,
env=env, startupinfo=startupinfo)
except OSError:
raise Exception('Couldn\'t find Node.js. Make sure it\'s in your $PATH by running `node -v` in your command-line.')
stdout, stderr = p.communicate(input=data.encode('utf-8'))
stdout = stdout.decode('utf-8')
stderr = stderr.decode('utf-8')
if stderr:
raise Exception('Error: %s' % stderr)
else:
return stdout
|
Add support for `n` Node.js version manager
|
Add support for `n` Node.js version manager
|
Python
|
mit
|
sindresorhus/sublime-autoprefixer,sindresorhus/sublime-autoprefixer,sindresorhus/sublime-autoprefixer
|
c7660db45e0275a685a6cc450fd4341a69c52b92
|
threaded_multihost/fields.py
|
threaded_multihost/fields.py
|
from django.db.models import ForeignKey
from django.contrib.auth.models import User
import threadlocals
class UserField(ForeignKey):
""" UserField
By defaults, foreign key to User; null=True, blank=True
"""
def __init__(self, **kwargs):
kwargs.setdefault('null', True)
kwargs.setdefault('blank', True)
ForeignKey.__init__(self, User, **kwargs)
class CreatorField(UserField):
""" CreatorField
By default, sets editable=False, default=threadlocals.get_current_user
"""
def __init__(self, **kwargs):
kwargs.setdefault('editable', False)
kwargs.setdefault('default', threadlocals.get_current_user)
UserField.__init__(self, **kwargs)
class EditorField(CreatorField):
""" EditorField
By default, sets editable=False, default=threadlocals.get_current_user
Sets value to get_current_user() on each save of the model.
"""
def __init__(self, **kwargs):
super(CreatorField, self).__init__(**kwargs)
def pre_save(self, model_instance, add):
value = threadlocals.get_current_user()
setattr(model_instance, self.name, value)
if value:
value = value.pk
setattr(model_instance, self.attname, value)
return value
|
from django.db.models import ForeignKey
from django.contrib.auth.models import User
import threadlocals
class UserField(ForeignKey):
""" UserField
By defaults, foreign key to User; null=True, blank=True
"""
def __init__(self, **kwargs):
kwargs.setdefault('to', User)
kwargs.setdefault('null', True)
kwargs.setdefault('blank', True)
ForeignKey.__init__(self, **kwargs)
class CreatorField(UserField):
""" CreatorField
By default, sets editable=False, default=threadlocals.get_current_user
"""
def __init__(self, **kwargs):
kwargs.setdefault('editable', False)
kwargs.setdefault('default', threadlocals.get_current_user)
UserField.__init__(self, **kwargs)
class EditorField(CreatorField):
""" EditorField
By default, sets editable=False, default=threadlocals.get_current_user
Sets value to get_current_user() on each save of the model.
"""
def __init__(self, **kwargs):
super(CreatorField, self).__init__(**kwargs)
def pre_save(self, model_instance, add):
value = threadlocals.get_current_user()
setattr(model_instance, self.name, value)
if value:
value = value.pk
setattr(model_instance, self.attname, value)
return value
try:
from south.modelsinspector import add_introspection_rules
except ImportError:
add_introspection_rules = False
if add_introspection_rules:
add_introspection_rules([], [r"^threaded_multihost\.fields\.(User|Creator|Editor)Field"])
|
Patch from chrischambers to enable south migrations.
|
Patch from chrischambers to enable south migrations.
|
Python
|
bsd-3-clause
|
diver-in-sky/django-threaded-multihost
|
305e88780fc2d3638fb3a9f33bfec8d6c295535e
|
feincms/views/base.py
|
feincms/views/base.py
|
from django.contrib.auth.decorators import permission_required
from django.shortcuts import get_object_or_404, render_to_response
from django.template import RequestContext
from feincms.module.page.models import Page
def build_page_response(page, request):
response = page.setup_request(request)
if response is None:
extra_context = request._feincms_extra_context
response = render_to_response(page.template.path, {
'feincms_page': page,
}, context_instance=RequestContext(request, extra_context))
return response
def handler(request, path=None):
"""
This is the default handler for feincms page content.
"""
if path is None:
path = request.path
page = Page.objects.page_for_path_or_404(path)
response = build_page_response(page, request)
page.finalize_response(request, response)
return response
@permission_required('page.change_page')
def preview_handler(request, page_id):
"""
This handler is for previewing site content; it takes a page_id so
the page is uniquely identified and does not care whether the page
is active or expired. To balance that, it requires a logged in user.
"""
page = get_object_or_404(Page, pk=page_id)
return build_page_response(page, request)
|
from django.contrib.auth.decorators import permission_required
from django.shortcuts import get_object_or_404, render_to_response
from django.template import RequestContext
from feincms.module.page.models import Page
def _build_page_response(page, request):
response = page.setup_request(request)
if response is None:
extra_context = request._feincms_extra_context
response = render_to_response(page.template.path, {
'feincms_page': page,
}, context_instance=RequestContext(request, extra_context))
return response
def build_page_response(page, request):
response = _build_page_response(page, request)
page.finalize_response(request, response)
return response
def handler(request, path=None):
"""
This is the default handler for feincms page content.
"""
if path is None:
path = request.path
page = Page.objects.page_for_path_or_404(path)
response = build_page_response(page, request)
return response
@permission_required('page.change_page')
def preview_handler(request, page_id):
"""
This handler is for previewing site content; it takes a page_id so
the page is uniquely identified and does not care whether the page
is active or expired. To balance that, it requires a logged in user.
"""
page = get_object_or_404(Page, pk=page_id)
return _build_page_response(page, request)
|
Make sure we invoke the response processors even for app content.
|
Make sure we invoke the response processors even for app content.
|
Python
|
bsd-3-clause
|
matthiask/feincms2-content,joshuajonah/feincms,nickburlett/feincms,nickburlett/feincms,hgrimelid/feincms,feincms/feincms,mjl/feincms,mjl/feincms,joshuajonah/feincms,pjdelport/feincms,matthiask/django-content-editor,feincms/feincms,matthiask/feincms2-content,matthiask/django-content-editor,hgrimelid/feincms,pjdelport/feincms,michaelkuty/feincms,joshuajonah/feincms,michaelkuty/feincms,mjl/feincms,feincms/feincms,michaelkuty/feincms,joshuajonah/feincms,michaelkuty/feincms,nickburlett/feincms,matthiask/django-content-editor,matthiask/django-content-editor,pjdelport/feincms,hgrimelid/feincms,matthiask/feincms2-content,nickburlett/feincms
|
fc472d043e81c2b5687a0f83dbbdd0dd02b73e35
|
flowtype/commands/exec_flow.py
|
flowtype/commands/exec_flow.py
|
import os
import json
import threading
import subprocess
import sublime
class ExecFlowCommand(threading.Thread):
"""Threaded class used for running flow commands in a different thread.
The subprocess must be threaded so we don't lockup the UI.
"""
def __init__(self, cmd, content):
"""Initialize with the command and the file content to send."""
self.cmd = cmd
self.content = content
self.stdout = None
self.returncode = 0
self.stderr = None
threading.Thread.__init__(self)
def run(self):
"""Execute the command in a subprocess."""
read, write = os.pipe()
os.write(write, str.encode(self.content))
os.close(write)
try:
output = subprocess.check_output(
self.cmd,
shell=sublime.platform() == 'windows',
stdin=read,
stderr=subprocess.STDOUT
)
if type(output) is bytes:
output = output.decode('utf-8')
try:
self.stdout = json.loads(output)
except ValueError:
self.stdout = output
os.close(read)
except subprocess.CalledProcessError as err:
self.stderr = str(err)
self.returncode = 1
|
import os
import json
import threading
import subprocess
import sublime
class ExecFlowCommand(threading.Thread):
"""Threaded class used for running flow commands in a different thread.
The subprocess must be threaded so we don't lockup the UI.
"""
def __init__(self, cmd, content):
"""Initialize with the command and the file content to send."""
self.cmd = cmd
self.content = content
self.stdout = None
self.returncode = 0
self.stderr = None
threading.Thread.__init__(self)
def run(self):
"""Execute the command in a subprocess."""
read, write = os.pipe()
os.write(write, str.encode(self.content))
os.close(write)
try:
output = subprocess.check_output(
self.cmd,
shell=sublime.platform() == 'windows',
stdin=read,
stderr=subprocess.STDOUT
)
if type(output) is bytes:
output = output.decode('utf-8')
try:
self.stdout = json.loads(output)
except ValueError:
self.stdout = output
os.close(read)
except subprocess.CalledProcessError as err:
if type(err.output) is bytes:
output = err.output.decode('utf-8')
else:
output = err.output
self.stderr = str(err) + ': ' + str(output)
self.returncode = 1
|
Add error output to exec error messages
|
Add error output to exec error messages
e.g. for an error like "env: ‘node’: No such file or directory"
the sublime console was only reporting "exited with code 127"
which wasn't very helpful in determining the cause.
|
Python
|
mit
|
Pegase745/sublime-flowtype
|
3aa13efa28b4ededa465541a7db8df5fc5878ce3
|
tempora/tests/test_timing.py
|
tempora/tests/test_timing.py
|
import datetime
import time
import contextlib
import os
from unittest import mock
from tempora import timing
def test_IntervalGovernor():
"""
IntervalGovernor should prevent a function from being called more than
once per interval.
"""
func_under_test = mock.MagicMock()
# to look like a function, it needs a __name__ attribute
func_under_test.__name__ = 'func_under_test'
interval = datetime.timedelta(seconds=1)
governed = timing.IntervalGovernor(interval)(func_under_test)
governed('a')
governed('b')
governed(3, 'sir')
func_under_test.assert_called_once_with('a')
@contextlib.contextmanager
def change(alt_tz, monkeypatch):
monkeypatch.setitem(os.environ, 'TZ', alt_tz)
time.tzset()
try:
yield
finally:
monkeypatch.delitem(os.environ, 'TZ')
time.tzset()
def test_Stopwatch_timezone_change(monkeypatch):
"""
The stopwatch should provide a consistent duration even
if the timezone changes.
"""
watch = timing.Stopwatch()
with change('AEST-10AEDT-11,M10.5.0,M3.5.0', monkeypatch):
assert abs(watch.split().total_seconds()) < 0.1
|
import datetime
import time
import contextlib
import os
from unittest import mock
import pytest
from tempora import timing
def test_IntervalGovernor():
"""
IntervalGovernor should prevent a function from being called more than
once per interval.
"""
func_under_test = mock.MagicMock()
# to look like a function, it needs a __name__ attribute
func_under_test.__name__ = 'func_under_test'
interval = datetime.timedelta(seconds=1)
governed = timing.IntervalGovernor(interval)(func_under_test)
governed('a')
governed('b')
governed(3, 'sir')
func_under_test.assert_called_once_with('a')
@pytest.fixture
def alt_tz(monkeypatch):
if not hasattr(time, 'tzset'):
pytest.skip("tzset not available")
@contextlib.contextmanager
def change():
val = 'AEST-10AEDT-11,M10.5.0,M3.5.0'
with monkeypatch.context() as ctx:
ctx.setitem(os.environ, 'TZ', val)
time.tzset()
yield
time.tzset()
return change()
def test_Stopwatch_timezone_change(alt_tz):
"""
The stopwatch should provide a consistent duration even
if the timezone changes.
"""
watch = timing.Stopwatch()
with alt_tz:
assert abs(watch.split().total_seconds()) < 0.1
|
Rewrite alt_tz as proper fixture. Skip when tzset isn't available.
|
Rewrite alt_tz as proper fixture. Skip when tzset isn't available.
|
Python
|
mit
|
jaraco/tempora
|
c43820a2e26dd4f87c36b986a9a0af80b409f659
|
sentence_extractor.py
|
sentence_extractor.py
|
import textract
import sys
import os
import re
import random
###################################
# Extracts text from a pdf file and
# selects one sentence, which it
# then prints.
#
# Created by Fredrik Omstedt.
###################################
# Extracts texts from pdf files. If given a directory, the
# program will return texts from all pdf files in that directory.
def extractTexts():
file = sys.argv[1]
texts = []
if os.path.isdir(file):
for f in os.listdir(file):
if re.match(r'^.*\.pdf$', f):
texts.append(textract.process(file + "/" + f))
else:
texts.append(textract.process(file))
return texts
# Chooses one sentence randomly from each of the given texts.
def selectSentences(texts):
chosen_sentences = []
for text in texts:
sentence_structure = re.compile(r'([A-Z][^\.!?]*[\.!?])', re.M)
sentences = sentence_structure.findall(text)
chosen_sentences.append(
sentences[random.randint(0, len(sentences)-1)].replace("\n", " ")
)
return chosen_sentences
def main():
texts = extractTexts()
sentences = selectSentences(texts)
for sentence in sentences:
print(sentence)
print("\n")
if __name__ == '__main__':
main()
|
import textract
import sys
import os
import re
import random
###################################
# Extracts text from a pdf file and
# selects one sentence, which it
# then prints.
#
# Created by Fredrik Omstedt.
###################################
# Extracts texts from pdf files. If given a directory, the
# program will return texts from all pdf files in that directory.
def extractTexts():
file = sys.argv[1]
texts = []
if os.path.isdir(file):
for f in os.listdir(file):
if re.match(r'^.*\.pdf$', f):
texts.append(textract.process(file + "/" + f))
else:
texts.append(textract.process(file))
return texts
# Chooses one sentence randomly from each of the given texts.
def selectSentences(texts):
chosen_sentences = []
for text in texts:
sentence_structure = re.compile(r'([A-Z\xc4\xc5\xd6][^\.!?]*[\.!?])', re.M)
sentences = sentence_structure.findall(text)
chosen_sentences.append(
sentences[random.randint(0, len(sentences)-1)].replace("\n", " ")
)
return chosen_sentences
def main():
texts = extractTexts()
sentences = selectSentences(texts)
for sentence in sentences:
print(sentence)
print("\n")
if __name__ == '__main__':
main()
|
Update regex to match sentences starting with ÅÄÖ
|
Update regex to match sentences starting with ÅÄÖ
|
Python
|
mit
|
Xaril/sentence-extractor,Xaril/sentence-extractor
|
c8cc1f8e0e9b6d7dfb29ff9aef04bf2b5867cceb
|
genomediff/records.py
|
genomediff/records.py
|
class Metadata(object):
def __init__(self, name, value):
self.name = name
self.value = value
def __repr__(self):
return "Metadata({}, {})".format(repr(self.name), repr(self.value))
def __eq__(self, other):
return self.__dict__ == other.__dict__
class Record(object):
def __init__(self, type, id, document=None, parent_ids=None, **extra):
self.document = document
self.type = type
self.id = id
self.parent_ids = parent_ids
self._extra = extra
@property
def parents(self):
if not self.parent_ids is None:
return [self.document[pid] for pid in self.parent_ids]
else:
return []
def __getattr__(self, item):
return self._extra[item]
def __repr__(self):
return "Record('{}', {}, {}, {})".format(self.type,
self.id,
self.parent_ids,
', '.join('{}={}'.format(k, repr(v)) for k, v in self._extra.items()))
def __eq__(self, other):
return self.__dict__ == other.__dict__
|
class Metadata(object):
def __init__(self, name, value):
self.name = name
self.value = value
def __repr__(self):
return "Metadata({}, {})".format(repr(self.name), repr(self.value))
def __eq__(self, other):
return self.__dict__ == other.__dict__
class Record(object):
def __init__(self, type, id, document=None, parent_ids=None, **attributes):
self.document = document
self.type = type
self.id = id
self.parent_ids = parent_ids
self.attributes = attributes
@property
def parents(self):
if not self.parent_ids is None:
return [self.document[pid] for pid in self.parent_ids]
else:
return []
def __getattr__(self, item):
try:
return self.attributes[item]
except KeyError:
raise AttributeError
def __repr__(self):
return "Record('{}', {}, {}, {})".format(self.type,
self.id,
self.parent_ids,
', '.join('{}={}'.format(k, repr(v)) for k, v in self._extra.items()))
def __eq__(self, other):
return self.__dict__ == other.__dict__
|
Raise AttributeError if key does not exist when trying to get it from a Record
|
Raise AttributeError if key does not exist when trying to get it from a Record
|
Python
|
mit
|
biosustain/genomediff-python
|
9aace6d89642e5025692b25e2c6253544ed580a6
|
social_auth/models.py
|
social_auth/models.py
|
"""Social auth models"""
from django.db import models
from django.contrib.auth.models import User
class UserSocialAuth(models.Model):
"""Social Auth association model"""
user = models.ForeignKey(User, related_name='social_auth')
provider = models.CharField(max_length=32)
uid = models.TextField()
class Meta:
"""Meta data"""
unique_together = ('provider', 'uid')
class Nonce(models.Model):
"""One use numbers"""
server_url = models.TextField()
timestamp = models.IntegerField()
salt = models.CharField(max_length=40)
class Association(models.Model):
"""OpenId account association"""
server_url = models.TextField(max_length=2047)
handle = models.CharField(max_length=255)
secret = models.TextField(max_length=255) # Stored base64 encoded
issued = models.IntegerField()
lifetime = models.IntegerField()
assoc_type = models.TextField(max_length=64)
|
"""Social auth models"""
from django.db import models
from django.contrib.auth.models import User
class UserSocialAuth(models.Model):
"""Social Auth association model"""
user = models.ForeignKey(User, related_name='social_auth')
provider = models.CharField(max_length=32)
uid = models.TextField()
class Meta:
"""Meta data"""
unique_together = ('provider', 'uid')
class Nonce(models.Model):
"""One use numbers"""
server_url = models.TextField()
timestamp = models.IntegerField()
salt = models.CharField(max_length=40)
class Association(models.Model):
"""OpenId account association"""
server_url = models.TextField()
handle = models.CharField(max_length=255)
secret = models.CharField(max_length=255) # Stored base64 encoded
issued = models.IntegerField()
lifetime = models.IntegerField()
assoc_type = models.CharField(max_length=64)
|
Remove max_length from TextFields and replace short text fields with CharFields
|
Remove max_length from TextFields and replace short text fields with CharFields
|
Python
|
bsd-3-clause
|
michael-borisov/django-social-auth,krvss/django-social-auth,thesealion/django-social-auth,lovehhf/django-social-auth,sk7/django-social-auth,dongguangming/django-social-auth,czpython/django-social-auth,beswarm/django-social-auth,adw0rd/django-social-auth,MjAbuz/django-social-auth,VishvajitP/django-social-auth,MjAbuz/django-social-auth,brianmckinneyrocks/django-social-auth,beswarm/django-social-auth,thesealion/django-social-auth,mayankcu/Django-social,vxvinh1511/django-social-auth,vuchau/django-social-auth,1st/django-social-auth,WW-Digital/django-social-auth,omab/django-social-auth,omab/django-social-auth,caktus/django-social-auth,vuchau/django-social-auth,limdauto/django-social-auth,antoviaque/django-social-auth-norel,vxvinh1511/django-social-auth,qas612820704/django-social-auth,gustavoam/django-social-auth,michael-borisov/django-social-auth,dongguangming/django-social-auth,limdauto/django-social-auth,duoduo369/django-social-auth,caktus/django-social-auth,gustavoam/django-social-auth,VishvajitP/django-social-auth,qas612820704/django-social-auth,getsentry/django-social-auth,lovehhf/django-social-auth,brianmckinneyrocks/django-social-auth
|
eca73e0c57042593f7e65446e26e63790c5cf2aa
|
notes/admin.py
|
notes/admin.py
|
#
# Copyright (c) 2009 Brad Taylor <brad@getcoded.net>
#
# This program is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from snowy.accounts.models import UserProfile
from snowy.notes.models import Note, NoteTag
from reversion.admin import VersionAdmin
from django.contrib import admin
class NoteAdmin(VersionAdmin):
list_display = ('created', 'author', 'title')
search_fields = ['content', 'title']
prepopulated_fields = {'slug': ('title',)}
admin.site.register(Note, NoteAdmin)
admin.site.register(NoteTag)
admin.site.register(UserProfile)
|
#
# Copyright (c) 2009 Brad Taylor <brad@getcoded.net>
#
# This program is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from snowy.accounts.models import UserProfile
from snowy.notes.models import Note, NoteTag
#from reversion.admin import VersionAdmin
from django.contrib import admin
#class NoteAdmin(VersionAdmin):
class NoteAdmin(admin.ModelAdmin):
list_display = ('created', 'author', 'title')
search_fields = ['content', 'title']
prepopulated_fields = {'slug': ('title',)}
admin.site.register(Note, NoteAdmin)
admin.site.register(NoteTag)
admin.site.register(UserProfile)
|
Complete removal of reversion usage
|
Complete removal of reversion usage
|
Python
|
agpl-3.0
|
leonhandreke/snowy,NoUsername/PrivateNotesExperimental,jaredjennings/snowy,GNOME/snowy,sandyarmstrong/snowy,syskill/snowy,syskill/snowy,NoUsername/PrivateNotesExperimental,sandyarmstrong/snowy,jaredjennings/snowy,jaredjennings/snowy,widox/snowy,jaredjennings/snowy,nekohayo/snowy,nekohayo/snowy,widox/snowy,GNOME/snowy,leonhandreke/snowy
|
630ba21f3b08dcd2685297b057cbee4b6abee6f7
|
us_ignite/sections/models.py
|
us_ignite/sections/models.py
|
from django.db import models
class Sponsor(models.Model):
name = models.CharField(max_length=255)
website = models.URLField(max_length=500)
image = models.ImageField(upload_to="sponsor")
order = models.IntegerField(default=0)
class Meta:
ordering = ('order', )
def __unicode__(self):
return self.name
|
from django.db import models
class Sponsor(models.Model):
name = models.CharField(max_length=255)
website = models.URLField(max_length=500)
image = models.ImageField(
upload_to="sponsor", help_text='This image is not post processed. '
'Please make sure it has the right design specs.')
order = models.IntegerField(default=0)
class Meta:
ordering = ('order', )
def __unicode__(self):
return self.name
|
Add help text describing the image field functionality.
|
Add help text describing the image field functionality.
|
Python
|
bsd-3-clause
|
us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite
|
850d5189d3159f7cab6c509a2dd58c9f427c0bfc
|
examples/python/find_events.py
|
examples/python/find_events.py
|
from gi.repository import Zeitgeist
log = Zeitgeist.Log.get_default()
def callback (x):
print x
log.get_events([x for x in xrange(100)], None, callback, None)
|
from gi.repository import Zeitgeist, Gtk
log = Zeitgeist.Log.get_default()
def callback (x):
print x
log.get_events([x for x in xrange(100)], None, callback, None)
Gtk.main()
|
Add loop to the python example
|
Add loop to the python example
|
Python
|
lgpl-2.1
|
freedesktop-unofficial-mirror/zeitgeist__zeitgeist,freedesktop-unofficial-mirror/zeitgeist__zeitgeist,freedesktop-unofficial-mirror/zeitgeist__zeitgeist,freedesktop-unofficial-mirror/zeitgeist__zeitgeist,freedesktop-unofficial-mirror/zeitgeist__zeitgeist
|
9cfd402c8f95c016953eda752e1bd91302d6c8c0
|
translations/lantmateriet.py
|
translations/lantmateriet.py
|
def filterTags(attrs):
res = {}
if 'NAMN' in attrs:
res['name'] = attrs['NAMN']
if 'TATNR' in attrs:
res['ref:se:scb'] = attrs['TATNR']
if attrs.get('BEF') is not None:
bef = int(attrs.get('BEF'))
# This is an approximation based on http://wiki.openstreetmap.org/wiki/Key:place
# and the observed values of nodes in OpenStreetMap itself for cities and towns
# around Sweden.
# This seems to be around where OSM sets city status for Sweden
if bef >= 30000:
res['place'] = 'city'
elif bef >= 6000:
res['place'] = 'town'
elif bef >= 200:
res['place'] = 'village'
return res
|
def filterTags(attrs):
res = {}
if 'NAMN' in attrs:
res['name'] = attrs['NAMN']
if 'TATNR' in attrs:
res['ref:se:scb'] = attrs['TATNR']
if attrs.get('BEF') is not None:
bef = int(attrs.get('BEF'))
# This is an approximation based on http://wiki.openstreetmap.org/wiki/Key:place
# and the observed values of nodes in OpenStreetMap itself for cities and towns
# around Sweden.
# This seems to be around where OSM sets city status for Sweden
if bef >= 30000:
res['place'] = 'city'
elif bef >= 6000:
res['place'] = 'town'
elif bef >= 200:
res['place'] = 'village'
res['population'] = str(bef)
return res
|
Add population to the tags
|
LM: Add population to the tags
|
Python
|
bsd-3-clause
|
andpe/swegov-to-osm
|
d13c674a7286f1af9cd13babe2cb5c429b5b3bfa
|
scripts/update_guide_stats.py
|
scripts/update_guide_stats.py
|
#!/usr/bin/env python
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from mica.stats import update_guide_stats
update_guide_stats.main()
import os
table_file = mica.stats.guide_stats.TABLE_FILE
file_stat = os.stat(table_file)
if file_stat.st_size > 200e6:
print("""
Warning: {tfile} is larger than 200MB and may need
Warning: to be manually repacked (i.e.):
Warning:
Warning: ptrepack --chunkshape=auto --propindexes --keep-source-filters {tfile} compressed.h5
Warning: cp compressed.h5 {tfile}
""".format(tfile=table_file))
|
#!/usr/bin/env python
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import os
import argparse
from mica.stats import update_guide_stats
import mica.stats.guide_stats
# Cheat and pass options directly. Needs entrypoint scripts
opt = argparse.Namespace(datafile=mica.stats.guide_stats.TABLE_FILE,
obsid=None, check_missing=False, start=None, stop=None)
update_guide_stats.update(opt)
table_file = mica.stats.guide_stats.TABLE_FILE
file_stat = os.stat(table_file)
if file_stat.st_size > 200e6:
print("""
Warning: {tfile} is larger than 200MB and may need
Warning: to be manually repacked (i.e.):
Warning:
Warning: ptrepack --chunkshape=auto --propindexes --keep-source-filters {tfile} compressed.h5
Warning: cp compressed.h5 {tfile}
""".format(tfile=table_file))
|
Update guide stat script to pass datafile
|
Update guide stat script to pass datafile
|
Python
|
bsd-3-clause
|
sot/mica,sot/mica
|
5914b9a4d1d086f1a92309c0895aa7dd11761776
|
conf_site/accounts/tests/test_registration.py
|
conf_site/accounts/tests/test_registration.py
|
from factory import Faker, fuzzy
from django.contrib.auth import get_user_model
from django.test import TestCase
from django.urls import reverse
class UserRegistrationTestCase(TestCase):
def test_registration_view(self):
"""Verify that user registration view loads properly."""
response = self.client.get(reverse("account_signup"))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "account/signup.html")
def test_user_registration(self):
"""Ensure that user registration works properly."""
EMAIL = Faker("email").generate()
PASSWORD = fuzzy.FuzzyText(length=16)
test_user_data = {
"password1": PASSWORD,
"password2": PASSWORD,
"email": EMAIL,
"email2": EMAIL,
}
# Verify that POSTing user data to the registration view
# succeeds / returns the right HTTP status code.
response = self.client.post(
reverse("account_signup"), test_user_data)
# Successful form submission will cause the HTTP status code
# to be "302 Found", not "200 OK".
self.assertEqual(response.status_code, 302)
# Verify that a User has been successfully created.
user_model = get_user_model()
user_model.objects.get(email=EMAIL)
|
from django.contrib.auth import get_user_model
from django.test import TestCase
from django.urls import reverse
from factory import fuzzy
from faker import Faker
class UserRegistrationTestCase(TestCase):
def test_registration_view(self):
"""Verify that user registration view loads properly."""
response = self.client.get(reverse("account_signup"))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "account/signup.html")
def test_user_registration(self):
"""Ensure that user registration works properly."""
EMAIL = Faker().email()
PASSWORD = fuzzy.FuzzyText(length=16)
test_user_data = {
"password1": PASSWORD,
"password2": PASSWORD,
"email": EMAIL,
"email2": EMAIL,
}
# Verify that POSTing user data to the registration view
# succeeds / returns the right HTTP status code.
response = self.client.post(
reverse("account_signup"), test_user_data)
# Successful form submission will cause the HTTP status code
# to be "302 Found", not "200 OK".
self.assertEqual(response.status_code, 302)
# Verify that a User has been successfully created.
user_model = get_user_model()
user_model.objects.get(email=EMAIL)
|
Change imports in user registration test.
|
Change imports in user registration test.
|
Python
|
mit
|
pydata/conf_site,pydata/conf_site,pydata/conf_site
|
9a7100aaf0207fe93b28d7e473a4b5c1cd6061fe
|
vumi/application/__init__.py
|
vumi/application/__init__.py
|
"""The vumi.application API."""
__all__ = ["ApplicationWorker", "SessionManager"]
from vumi.application.base import ApplicationWorker
from vumi.application.session import SessionManager
|
"""The vumi.application API."""
__all__ = ["ApplicationWorker", "SessionManager", "TagpoolManager"]
from vumi.application.base import ApplicationWorker
from vumi.application.session import SessionManager
from vumi.application.tagpool import TagpoolManager
|
Add TagpoolManager to vumi.application API.
|
Add TagpoolManager to vumi.application API.
|
Python
|
bsd-3-clause
|
vishwaprakashmishra/xmatrix,vishwaprakashmishra/xmatrix,TouK/vumi,harrissoerja/vumi,TouK/vumi,vishwaprakashmishra/xmatrix,harrissoerja/vumi,TouK/vumi,harrissoerja/vumi
|
2479b4a51b733ce8ba989d8f01b48791492d9f21
|
cogs/utils/dataIO.py
|
cogs/utils/dataIO.py
|
import redis_collections
import threading
import time
import __main__
class RedisDict(redis_collections.Dict):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.die = False
self.thread = threading.Thread(target=self.update_loop, daemon=True, name=kwargs['key'])
self.thread.start()
self.prev = None
def update_loop(self):
time.sleep(2)
while not self.die:
if self.prev != repr(self):
self.prev = repr(self)
self.sync()
time.sleep(0.1)
else:
self.cache.clear()
time.sleep(0.1)
class dataIO:
@staticmethod
def save_json(filename, content):
pass # "oops"
@staticmethod
def load_json(filename):
return RedisDict(key=filename, redis=__main__.redis_conn, writeback=True)
|
import redis_collections
import threading
import time
# noinspection PyUnresolvedReferences
import __main__
class RedisDict(redis_collections.Dict):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.die = False
self.thread = threading.Thread(target=self.update_loop, daemon=True, name=kwargs['key'])
self.thread.start()
self.rthread = threading.Thread(target=self.refresh_loop, daemon=True, name=kwargs['key'])
self.rthread.start()
self.prev = None
db = str(self.redis.connection_pool.connection_kwargs['db'])
self.pubsub_format = 'liara.{}.{}'.format(db, kwargs['key'])
def update_loop(self):
time.sleep(2)
while not self.die:
if self.prev != str(self.cache):
self.prev = str(self.cache)
self.sync()
self.redis.publish(self.pubsub_format, 'update')
time.sleep(0.01)
else:
time.sleep(0.01)
def refresh_loop(self):
time.sleep(2)
pubsub = self.redis.pubsub()
pubsub.subscribe([self.pubsub_format])
for message in pubsub.listen():
if message['type'] == 'message':
self.cache.clear()
self.cache = dict(self)
self.prev = str(self.cache)
class dataIO:
@staticmethod
def save_json(filename, content):
pass # "oops"
@staticmethod
def load_json(filename):
return RedisDict(key=filename, redis=__main__.redis_conn, writeback=True)
|
Make config sync more efficient
|
Make config sync more efficient
|
Python
|
mit
|
Thessia/Liara
|
87a3025196b0b3429cab1f439cd10728e99d982f
|
skimage/transform/__init__.py
|
skimage/transform/__init__.py
|
from .hough_transform import *
from .radon_transform import *
from .finite_radon_transform import *
from .integral import *
from ._geometric import (warp, warp_coords, estimate_transform,
SimilarityTransform, AffineTransform,
ProjectiveTransform, PolynomialTransform,
PiecewiseAffineTransform)
from ._warps import swirl, homography
|
from .hough_transform import *
from .radon_transform import *
from .finite_radon_transform import *
from .integral import *
from ._geometric import (warp, warp_coords, estimate_transform,
SimilarityTransform, AffineTransform,
ProjectiveTransform, PolynomialTransform,
PiecewiseAffineTransform)
from ._warps import swirl, homography, resize, rotate
|
Add missing imports in transform module.
|
BUG: Add missing imports in transform module.
|
Python
|
bsd-3-clause
|
emon10005/scikit-image,bsipocz/scikit-image,blink1073/scikit-image,youprofit/scikit-image,rjeli/scikit-image,SamHames/scikit-image,almarklein/scikit-image,rjeli/scikit-image,emon10005/scikit-image,almarklein/scikit-image,youprofit/scikit-image,chintak/scikit-image,almarklein/scikit-image,paalge/scikit-image,vighneshbirodkar/scikit-image,juliusbierk/scikit-image,jwiggins/scikit-image,michaelaye/scikit-image,warmspringwinds/scikit-image,almarklein/scikit-image,SamHames/scikit-image,WarrenWeckesser/scikits-image,blink1073/scikit-image,newville/scikit-image,keflavich/scikit-image,warmspringwinds/scikit-image,vighneshbirodkar/scikit-image,Midafi/scikit-image,juliusbierk/scikit-image,michaelaye/scikit-image,keflavich/scikit-image,rjeli/scikit-image,bennlich/scikit-image,Britefury/scikit-image,ofgulban/scikit-image,ajaybhat/scikit-image,SamHames/scikit-image,robintw/scikit-image,ofgulban/scikit-image,dpshelio/scikit-image,WarrenWeckesser/scikits-image,GaZ3ll3/scikit-image,Hiyorimi/scikit-image,robintw/scikit-image,oew1v07/scikit-image,chriscrosscutler/scikit-image,jwiggins/scikit-image,Midafi/scikit-image,chintak/scikit-image,michaelpacer/scikit-image,michaelpacer/scikit-image,ClinicalGraphics/scikit-image,chriscrosscutler/scikit-image,oew1v07/scikit-image,ajaybhat/scikit-image,vighneshbirodkar/scikit-image,pratapvardhan/scikit-image,Britefury/scikit-image,bennlich/scikit-image,chintak/scikit-image,paalge/scikit-image,pratapvardhan/scikit-image,SamHames/scikit-image,paalge/scikit-image,ClinicalGraphics/scikit-image,chintak/scikit-image,GaZ3ll3/scikit-image,bsipocz/scikit-image,dpshelio/scikit-image,ofgulban/scikit-image,newville/scikit-image,Hiyorimi/scikit-image
|
ed3c03ac4f213f3882e28f25ae0596a7021928cd
|
test/ParseableInterface/Inputs/make-unreadable.py
|
test/ParseableInterface/Inputs/make-unreadable.py
|
import platform
import subprocess
import sys
if platform.system() == 'Windows':
import ctypes
AdvAPI32 = ctypes.windll.Advapi32
from ctypes.wintypes import POINTER
UNLEN = 256
GetUserNameW = AdvAPI32.GetUserNameW
GetUserNameW.argtypes = (
ctypes.c_wchar_p, # _In_Out_ lpBuffer
POINTER(ctypes.c_uint) # _In_out_ pcBuffer
)
GetUserNameW.restype = ctypes.c_uint
buffer = ctypes.create_unicode_buffer(UNLEN + 1)
size = ctypes.c_uint(len(buffer))
GetUserNameW(buffer, ctypes.byref(size))
for path in sys.argv[1:]:
subprocess.call(['icacls', path, '/deny',
'{}:(R)'.format(buffer.value)])
else:
for path in sys.argv[1:]:
subprocess.call(['chmod', 'a-r', path])
|
import platform
import subprocess
import sys
if platform.system() == 'Windows':
import ctypes
AdvAPI32 = ctypes.windll.Advapi32
from ctypes.wintypes import POINTER
UNLEN = 256
GetUserNameW = AdvAPI32.GetUserNameW
GetUserNameW.argtypes = (
ctypes.c_wchar_p, # _In_Out_ lpBuffer
POINTER(ctypes.c_uint) # _In_out_ pcBuffer
)
GetUserNameW.restype = ctypes.c_uint
buffer = ctypes.create_unicode_buffer(UNLEN + 1)
size = ctypes.c_uint(len(buffer))
GetUserNameW(buffer, ctypes.byref(size))
# For NetworkService, Host$ is returned, so we choose have to turn it back
# into something that icacls understands.
if not buffer.value.endswith('$'):
user_name = buffer.value
else:
user_name = 'NT AUTHORITY\\NetworkService'
for path in sys.argv[1:]:
subprocess.call(['icacls', path, '/deny',
'{}:(R)'.format(user_name)])
else:
for path in sys.argv[1:]:
subprocess.call(['chmod', 'a-r', path])
|
Fix handling of Network Service username.
|
[windows] Fix handling of Network Service username.
In Windows Server 2016 at least, the Network Service user (the one being
used by the CI machine) is returned as Host$, which icacls doesn't
understand. Turn the name into something that icacls if we get a name
that ends with a dollar.
|
Python
|
apache-2.0
|
atrick/swift,hooman/swift,harlanhaskins/swift,shahmishal/swift,stephentyrone/swift,jmgc/swift,devincoughlin/swift,ahoppen/swift,tkremenek/swift,xedin/swift,shahmishal/swift,xwu/swift,xedin/swift,harlanhaskins/swift,harlanhaskins/swift,sschiau/swift,shajrawi/swift,karwa/swift,gribozavr/swift,apple/swift,CodaFi/swift,ahoppen/swift,lorentey/swift,nathawes/swift,JGiola/swift,allevato/swift,airspeedswift/swift,harlanhaskins/swift,hooman/swift,karwa/swift,rudkx/swift,CodaFi/swift,gregomni/swift,lorentey/swift,sschiau/swift,shajrawi/swift,karwa/swift,parkera/swift,tkremenek/swift,sschiau/swift,devincoughlin/swift,xedin/swift,aschwaighofer/swift,airspeedswift/swift,jmgc/swift,nathawes/swift,lorentey/swift,tkremenek/swift,allevato/swift,jmgc/swift,xwu/swift,JGiola/swift,ahoppen/swift,shahmishal/swift,tkremenek/swift,roambotics/swift,benlangmuir/swift,roambotics/swift,hooman/swift,atrick/swift,gribozavr/swift,gregomni/swift,glessard/swift,xedin/swift,apple/swift,jckarter/swift,gregomni/swift,karwa/swift,benlangmuir/swift,sschiau/swift,xedin/swift,stephentyrone/swift,aschwaighofer/swift,jckarter/swift,CodaFi/swift,lorentey/swift,CodaFi/swift,lorentey/swift,harlanhaskins/swift,tkremenek/swift,karwa/swift,gribozavr/swift,nathawes/swift,gregomni/swift,tkremenek/swift,JGiola/swift,nathawes/swift,JGiola/swift,parkera/swift,gregomni/swift,aschwaighofer/swift,airspeedswift/swift,CodaFi/swift,karwa/swift,apple/swift,shajrawi/swift,atrick/swift,stephentyrone/swift,hooman/swift,apple/swift,xwu/swift,parkera/swift,CodaFi/swift,glessard/swift,devincoughlin/swift,glessard/swift,hooman/swift,rudkx/swift,ahoppen/swift,harlanhaskins/swift,sschiau/swift,hooman/swift,allevato/swift,shajrawi/swift,ahoppen/swift,allevato/swift,devincoughlin/swift,shajrawi/swift,devincoughlin/swift,nathawes/swift,JGiola/swift,rudkx/swift,devincoughlin/swift,benlangmuir/swift,parkera/swift,roambotics/swift,rudkx/swift,jmgc/swift,xwu/swift,xedin/swift,roambotics/swift,aschwaighofer/swift,jmgc/swift,airspeedswift/swift,shahmishal/swift,stephentyrone/swift,gribozavr/swift,karwa/swift,devincoughlin/swift,gribozavr/swift,nathawes/swift,roambotics/swift,benlangmuir/swift,rudkx/swift,shahmishal/swift,xwu/swift,glessard/swift,karwa/swift,aschwaighofer/swift,allevato/swift,parkera/swift,glessard/swift,atrick/swift,tkremenek/swift,gribozavr/swift,sschiau/swift,jmgc/swift,benlangmuir/swift,jckarter/swift,jckarter/swift,shahmishal/swift,allevato/swift,gregomni/swift,airspeedswift/swift,parkera/swift,shahmishal/swift,gribozavr/swift,sschiau/swift,jckarter/swift,JGiola/swift,harlanhaskins/swift,xedin/swift,allevato/swift,jckarter/swift,sschiau/swift,nathawes/swift,airspeedswift/swift,xedin/swift,lorentey/swift,glessard/swift,devincoughlin/swift,atrick/swift,lorentey/swift,aschwaighofer/swift,CodaFi/swift,gribozavr/swift,roambotics/swift,shajrawi/swift,rudkx/swift,airspeedswift/swift,lorentey/swift,stephentyrone/swift,apple/swift,aschwaighofer/swift,xwu/swift,xwu/swift,benlangmuir/swift,ahoppen/swift,atrick/swift,jckarter/swift,parkera/swift,parkera/swift,shajrawi/swift,hooman/swift,shahmishal/swift,stephentyrone/swift,shajrawi/swift,jmgc/swift,stephentyrone/swift,apple/swift
|
8f68e3f3ab63d67d3e7fc1c8cd63c6c9d03729a2
|
Channels/News_Channel/lz77.py
|
Channels/News_Channel/lz77.py
|
import glob
import os
import subprocess
"""This is used to decompress the news.bin files."""
def decompress(file):
with open(file, "rb") as source_file:
read = source_file.read()
tail = read[320:]
with open(file + ".2", "w+") as dest_file:
dest_file.write(tail)
FNULL = open(os.devnull, "w+")
decompress = subprocess.call(["mono", "--runtime=v4.0.30319", "DSDecmp.exe", "-d", file + ".2", file + ".3"], stdout=FNULL, stderr=subprocess.STDOUT)
remove = os.remove(file + ".2")
move = subprocess.call(["mv", file + ".3", file + ".2"], stdout=FNULL, stderr=subprocess.STDOUT)
open_hex = subprocess.call(["open", "-a", "Hex Fiend", file + ".2"], stdout=FNULL, stderr=subprocess.STDOUT)
for file in glob.glob("news.bin.*"):
if os.path.exists(file):
decompress(file)
for file in glob.glob("*.bin"):
if os.path.exists(file):
decompress(file)
|
import glob
import os
import subprocess
"""This is used to decompress the news.bin files."""
def decompress(file):
with open(file, "rb") as source_file:
read = source_file.read()
tail = read[320:]
with open(file + ".2", "w+") as dest_file:
dest_file.write(tail)
FNULL = open(os.devnull, "w+")
decompress = subprocess.call(["mono", "--runtime=v4.0.30319", "DSDecmp.exe", "-d", file + ".2", file + ".3"], stdout=FNULL, stderr=subprocess.STDOUT)
remove = os.remove(file + ".2")
move = subprocess.call(["mv", file + ".3", file + ".2"], stdout=FNULL, stderr=subprocess.STDOUT)
open_hex = subprocess.call(["open", "-a", "Hex Fiend", file + ".2"], stdout=FNULL, stderr=subprocess.STDOUT) // This is to open the news files in the Mac hex editor I use called Hex Fiend.
for file in glob.glob("news.bin.*"):
if os.path.exists(file):
decompress(file)
for file in glob.glob("*.bin"):
if os.path.exists(file):
decompress(file)
|
Comment about the hex part
|
Comment about the hex part
|
Python
|
agpl-3.0
|
RiiConnect24/File-Maker,RiiConnect24/File-Maker
|
794a75ed410fe39ba2376ebcab75d21cc5e9fee0
|
common/safeprint.py
|
common/safeprint.py
|
import multiprocessing, sys, datetime
print_lock = multiprocessing.Lock()
def safeprint(content):
with print_lock:
sys.stdout.write(("[" + str(multiprocessing.current_process().pid) + "] " + datetime.datetime.now().strftime('%H%M%S') + ": " + str(content) + '\r\n'))
|
import multiprocessing, sys, datetime
print_lock = multiprocessing.RLock()
def safeprint(content):
string = "[" + str(multiprocessing.current_process().pid) + "] " + datetime.datetime.now().strftime('%H%M%S') + ": " + str(content) + '\r\n'
with print_lock:
sys.stdout.write(string)
|
Reduce the amount of time locking
|
Reduce the amount of time locking
|
Python
|
mit
|
gappleto97/Senior-Project
|
b79ed827f7211efbcdef95286bf2d4113d6e8b88
|
posts/views.py
|
posts/views.py
|
from django.shortcuts import get_object_or_404
from django.views.generic.dates import ArchiveIndexView
from django.views.generic.edit import FormView
from .models import Entry, Category
from .forms import ContactForm
class CategoryView(ArchiveIndexView):
model = Entry
date_field = 'date'
paginate_by = 20
template_name = 'posts/entry_category.html'
def get(self, request, slug, **kwargs):
self.kwargs['category'] = get_object_or_404(Category, slug=slug)
return super().get(request, kwargs)
def get_queryset(self):
return Entry.objects.filter(category=self.kwargs['category'])
def get_context_data(self, **kwargs):
result = super().get_context_data(**kwargs)
result['category'] = self.kwargs['category']
return result
class ContactView(FormView):
template_name = 'contact.html'
form_class = ContactForm
success_url = '/kontakt/'
def form_valid(self, form):
# This method is called when valid form data has been POSTed.
# It should return an HttpResponse.
form.send_email()
return super().form_valid(form)
|
from django.shortcuts import get_object_or_404
from django.views.generic.dates import ArchiveIndexView
from django.views.generic.edit import FormView
from .models import Entry, Category
from .forms import ContactForm
class CategoryView(ArchiveIndexView):
model = Entry
date_field = 'date'
paginate_by = 20
template_name = 'posts/entry_category.html'
def get(self, request, slug, **kwargs):
self.kwargs['category'] = get_object_or_404(Category, slug=slug)
return super().get(request, kwargs)
def get_queryset(self):
return super().get_queryset().filter(category=self.kwargs['category'])
def get_context_data(self, **kwargs):
result = super().get_context_data(**kwargs)
result['category'] = self.kwargs['category']
return result
class ContactView(FormView):
template_name = 'contact.html'
form_class = ContactForm
success_url = '/kontakt/'
def form_valid(self, form):
# This method is called when valid form data has been POSTed.
# It should return an HttpResponse.
form.send_email()
return super().form_valid(form)
|
Fix ordering of category view
|
Fix ordering of category view
Signed-off-by: Michal Čihař <a2df1e659c9fd2578de0a26565357cb273292eeb@cihar.com>
|
Python
|
agpl-3.0
|
nijel/photoblog,nijel/photoblog
|
df99ee50e7d7a677aec4e30af10283399a8edb8c
|
dlstats/configuration.py
|
dlstats/configuration.py
|
import configobj
import validate
import os
def _get_filename():
"""Return the configuration file path."""
appname = 'dlstats'
if os.name == 'posix':
if os.path.isfile(os.environ["HOME"]+'/.'+appname):
return os.environ["HOME"]+'/.'+appname
elif os.path.isfile('/etc/'+appname):
return '/etc/'+appname
else:
raise FileNotFoundError('No configuration file found.')
elif os.name == 'mac':
return ("%s/Library/Application Support/%s" % (os.environ["HOME"], appname))
elif os.name == 'nt':
return ("%s\Application Data\%s" % (os.environ["HOMEPATH"], appname))
else:
raise UnsupportedOSError(os.name)
configuration_filename = _get_filename()
_configspec = """
[General]
logging_directory = string()
socket_directory = string()
[MongoDB]
host = ip_addr()
port = integer()
max_pool_size = integer()
socketTimeoutMS = integer()
connectTimeoutMS = integer()
waitQueueTimeout = integer()
waitQueueMultiple = integer()
auto_start_request = boolean()
use_greenlets = boolean()
[ElasticSearch]
host = integer()
port = integer()
[Fetchers]
[[Eurostat]]
url_table_of_contents = string()"""
configuration = configobj.ConfigObj(configuration_filename,
configspec=_configspec.split('\n'))
validator = validate.Validator()
configuration.validate(validator)
configuration = configuration.dict()
|
import configobj
import validate
import os
def _get_filename():
"""Return the configuration file path."""
appname = 'dlstats'
if os.name == 'posix':
if "HOME" in os.environ:
if os.path.isfile(os.environ["HOME"]+'/.'+appname):
return os.environ["HOME"]+'/.'+appname
if os.path.isfile('/etc/'+appname):
return '/etc/'+appname
else:
raise FileNotFoundError('No configuration file found.')
elif os.name == 'mac':
return ("%s/Library/Application Support/%s" % (os.environ["HOME"], appname))
elif os.name == 'nt':
return ("%s\Application Data\%s" % (os.environ["HOMEPATH"], appname))
else:
raise UnsupportedOSError(os.name)
configuration_filename = _get_filename()
_configspec = """
[General]
logging_directory = string()
socket_directory = string()
[MongoDB]
host = ip_addr()
port = integer()
max_pool_size = integer()
socketTimeoutMS = integer()
connectTimeoutMS = integer()
waitQueueTimeout = integer()
waitQueueMultiple = integer()
auto_start_request = boolean()
use_greenlets = boolean()
[ElasticSearch]
host = integer()
port = integer()
[Fetchers]
[[Eurostat]]
url_table_of_contents = string()"""
configuration = configobj.ConfigObj(configuration_filename,
configspec=_configspec.split('\n'))
validator = validate.Validator()
configuration.validate(validator)
configuration = configuration.dict()
|
Test for environment variable existence
|
Test for environment variable existence
|
Python
|
agpl-3.0
|
Widukind/dlstats,MichelJuillard/dlstats,MichelJuillard/dlstats,mmalter/dlstats,mmalter/dlstats,Widukind/dlstats,mmalter/dlstats,MichelJuillard/dlstats
|
3838e44a397fdb4b605ead875b7c6ebc5787644d
|
jal_stats/stats/serializers.py
|
jal_stats/stats/serializers.py
|
from rest_framework import serializers
from .models import Activity, Datapoint
class ActivitySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Activity
fields = ('user', 'full_description', 'units', 'url')
class DatapointSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Datapoint
fields = ('user', 'activity', 'reps', 'timestamp', 'url')
|
from rest_framework import serializers
from .models import Activity, Datapoint
class ActivitySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Activity
fields = ('id', 'user', 'full_description', 'units', 'url')
class DatapointSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Datapoint
fields = ('id', 'user', 'activity', 'reps', 'timestamp', 'url')
|
Add 'id' to both Serializers
|
Add 'id' to both Serializers
|
Python
|
mit
|
jal-stats/django
|
ec1e0cd1fa8bab59750032942643a7abc8700642
|
cspreports/models.py
|
cspreports/models.py
|
#LIBRARIES
from django.db import models
from django.utils.html import escape
from django.utils.safestring import mark_safe
class CSPReport(models.Model):
class Meta(object):
ordering = ('-created',)
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
json = models.TextField()
def json_as_html(self):
""" Print out self.json in a nice way. """
# To avoid circular import
from cspreports import utils
formatted_json = utils.format_report(self.json)
return mark_safe(u"<pre>\n%s</pre>" % escape(formatted_json))
|
# STANDARD LIB
import json
#LIBRARIES
from django.db import models
from django.utils.html import escape
from django.utils.safestring import mark_safe
class CSPReport(models.Model):
class Meta(object):
ordering = ('-created',)
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
json = models.TextField()
@property
def data(self):
""" Returns self.json loaded as a python object. """
try:
data = self._data
except AttributeError:
data = self._data = json.loads(self.json)
return data
def json_as_html(self):
""" Print out self.json in a nice way. """
# To avoid circular import
from cspreports import utils
formatted_json = utils.format_report(self.json)
return mark_safe(u"<pre>\n%s</pre>" % escape(formatted_json))
|
Revert removing data which is still used
|
Revert removing data which is still used
This was removed in b1bc34e9a83cb3af5dd11baa1236f2b65ab823f9 but is still used in admin.py.
|
Python
|
mit
|
adamalton/django-csp-reports
|
271b4cd3795cbe0e5e013ac53c3ea26ca08e7a1a
|
IPython/utils/importstring.py
|
IPython/utils/importstring.py
|
# encoding: utf-8
"""
A simple utility to import something by its string name.
Authors:
* Brian Granger
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2008-2011 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Functions and classes
#-----------------------------------------------------------------------------
def import_item(name):
"""Import and return bar given the string foo.bar."""
package = '.'.join(name.split('.')[0:-1])
obj = name.split('.')[-1]
# Note: the original code for this was the following. We've left it
# visible for now in case the new implementation shows any problems down
# the road, to make it easier on anyone looking for a problem. This code
# should be removed once we're comfortable we didn't break anything.
## execString = 'from %s import %s' % (package, obj)
## try:
## exec execString
## except SyntaxError:
## raise ImportError("Invalid class specification: %s" % name)
## exec 'temp = %s' % obj
## return temp
if package:
module = __import__(package,fromlist=[obj])
return module.__dict__[obj]
else:
return __import__(obj)
|
# encoding: utf-8
"""
A simple utility to import something by its string name.
Authors:
* Brian Granger
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2008-2011 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Functions and classes
#-----------------------------------------------------------------------------
def import_item(name):
"""Import and return bar given the string foo.bar."""
package = '.'.join(name.split('.')[0:-1])
obj = name.split('.')[-1]
# Note: the original code for this was the following. We've left it
# visible for now in case the new implementation shows any problems down
# the road, to make it easier on anyone looking for a problem. This code
# should be removed once we're comfortable we didn't break anything.
## execString = 'from %s import %s' % (package, obj)
## try:
## exec execString
## except SyntaxError:
## raise ImportError("Invalid class specification: %s" % name)
## exec 'temp = %s' % obj
## return temp
if package:
module = __import__(package,fromlist=[obj])
try:
pak = module.__dict__[obj]
except KeyError:
raise ImportError('No module named %s' % obj)
return pak
else:
return __import__(obj)
|
Fix error in test suite startup with dotted import names.
|
Fix error in test suite startup with dotted import names.
Detected first on ubuntu 12.04, but the bug is generic, we just hadn't
seen it before. Will push straight to master as this will begin
causing problems as more people upgrade.
|
Python
|
bsd-3-clause
|
ipython/ipython,ipython/ipython
|
14869bd8c58a393caec488e95d51c282ccf23d0d
|
katagawa/sql/__init__.py
|
katagawa/sql/__init__.py
|
"""
SQL generators for Katagawa.
"""
import abc
import typing
class Token(abc.ABC):
"""
Base class for a token.
"""
def __init__(self, subtokens: typing.List['Token']):
"""
:param subtokens: Any subtokens this token has.
"""
self.subtokens = subtokens
@abc.abstractproperty
def name(self):
"""
Returns the name of the token.
"""
@abc.abstractmethod
def generate_sql(self):
"""
Generate SQL from this statement.
:return: The generated SQL.
"""
|
"""
SQL generators for Katagawa.
"""
import abc
import typing
class Token(abc.ABC):
"""
Base class for a token.
"""
__slots__ = ()
def __init__(self, subtokens: typing.List['Token']):
"""
:param subtokens: Any subtokens this token has.
"""
self.subtokens = subtokens
@abc.abstractproperty
def name(self):
"""
Returns the name of the token.
This is a unique identifier, but is not always related to the actual SQL underneath it.
"""
@abc.abstractmethod
def generate_sql(self):
"""
Generate SQL from this statement.
:return: The generated SQL.
"""
class Aliased(Token):
"""
Mixin class for an aliased token.
"""
__slots__ = ("alias",)
def __init__(self, subtokens: typing.List['Token'], alias: str):
"""
:param subtokens: Any subtokens this token has.
:param alias: The alias this token has.
"""
super().__init__(subtokens)
self.alias = alias
|
Add aliased ABC for tokens with an alias.
|
Add aliased ABC for tokens with an alias.
|
Python
|
mit
|
SunDwarf/asyncqlio
|
7844df0c4f32c9cc1f5833aba4712680461f77b5
|
test/on_yubikey/cli_piv/test_misc.py
|
test/on_yubikey/cli_piv/test_misc.py
|
import unittest
from ..framework import cli_test_suite
from .util import DEFAULT_MANAGEMENT_KEY
@cli_test_suite
def additional_tests(ykman_cli):
class Misc(unittest.TestCase):
def setUp(self):
ykman_cli('piv', 'reset', '-f')
def test_info(self):
output = ykman_cli('piv', 'info')
self.assertIn('PIV version:', output)
def test_reset(self):
output = ykman_cli('piv', 'reset', '-f')
self.assertIn('Success!', output)
def test_write_read_object(self):
ykman_cli(
'piv', 'write-object',
'-m', DEFAULT_MANAGEMENT_KEY, '0x5f0001',
'-', input='test data')
output = ykman_cli('piv', 'read-object', '0x5f0001')
self.assertEqual('test data\n', output)
return [Misc]
|
import unittest
from ykman.piv import OBJ
from .util import DEFAULT_MANAGEMENT_KEY
from ..framework import cli_test_suite
from .util import DEFAULT_MANAGEMENT_KEY
@cli_test_suite
def additional_tests(ykman_cli):
class Misc(unittest.TestCase):
def setUp(self):
ykman_cli('piv', 'reset', '-f')
def test_info(self):
output = ykman_cli('piv', 'info')
self.assertIn('PIV version:', output)
def test_reset(self):
output = ykman_cli('piv', 'reset', '-f')
self.assertIn('Success!', output)
def test_write_read_object(self):
ykman_cli(
'piv', 'write-object',
'-m', DEFAULT_MANAGEMENT_KEY, '0x5f0001',
'-', input='test data')
output = ykman_cli('piv', 'read-object', '0x5f0001')
self.assertEqual('test data\n', output)
def test_export_invalid_certificate_fails(self):
ykman_cli('piv', 'write-object', hex(OBJ.AUTHENTICATION), '-',
'-m', DEFAULT_MANAGEMENT_KEY,
input='Kom ihåg att du aldrig får snyta dig i mattan!')
with self.assertRaises(SystemExit):
ykman_cli('piv', 'export-certificate',
hex(OBJ.AUTHENTICATION), '-')
def test_info_with_invalid_certificate_does_not_crash(self):
ykman_cli('piv', 'write-object', hex(OBJ.AUTHENTICATION), '-',
'-m', DEFAULT_MANAGEMENT_KEY,
input='Kom ihåg att du aldrig får snyta dig i mattan!')
ykman_cli('piv', 'info')
return [Misc]
|
Test that invalid cert crashes export-certificate but not info
|
Test that invalid cert crashes export-certificate but not info
|
Python
|
bsd-2-clause
|
Yubico/yubikey-manager,Yubico/yubikey-manager
|
1c3c092afae1946e72a87cca8792bd012bee23e4
|
ktbs_bench/utils/decorators.py
|
ktbs_bench/utils/decorators.py
|
from functools import wraps
from inspect import getcallargs
from timer import Timer
def bench(f):
"""Times a function given specific arguments."""
# TODO mettre args (n_repeat, func) qui execute n_repeat fois et applique un reduce(res, func)
@wraps(f)
def wrapped(*args, **kwargs):
timer = Timer(tick_now=False)
timer.start()
f(*args, **kwargs)
timer.stop()
res = [call_signature(f, *args, **kwargs),
timer.get_times()['real']] # TODO penser a quel temps garder
return res
return wrapped
def call_signature(f, *args, **kwargs):
"""Return a string representation of a function call."""
call_args = getcallargs(f, *args, **kwargs)
return ';'.join(["%s=%s" % (k, v) for k, v in call_args.items()])
|
from functools import wraps
from inspect import getcallargs
from timer import Timer
def bench(f):
"""Decorator to time a function.
Parameters
----------
f : function
The function to time.
Returns
-------
call_signature : str
The signature of the function call, with parameter names and values.
time : float
The real time taken to execute the function, in second.
Examples
--------
>>> @bench
... def square_list(numbers):
... for ind_num in range(len(numbers)):
... numbers[ind_num] *= numbers[ind_num]
... return numbers
>>> call_sig, time = square_list(range(10))
>>> call_sig
'numbers=[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]'
>>> 0 < time < 1 # benched function is not computationally intensive so time should be less than 1 s
True
"""
@wraps(f)
def wrapped(*args, **kwargs):
"""Actual benchmark takes place here."""
call_sig = call_signature(f, *args, **kwargs)
timer = Timer(tick_now=False)
timer.start()
f(*args, **kwargs)
timer.stop()
res = [call_sig, timer.get_times()['real']]
return res
return wrapped
def call_signature(f, *args, **kwargs):
"""Return a string representation of a function call.
Parameters
----------
f : function
The function to get the call signature from.
args : list
List of arguments.
kwargs : dict
Dictionary of argument names and values.
Returns
-------
out : str
String representation of the function call
Examples
--------
>>> def square(num):
... return num*num
>>> call_signature(square, 4)
'num=4'
"""
call_args = getcallargs(f, *args, **kwargs)
return ';'.join(["%s=%s" % (k, v) for k, v in call_args.items()])
|
Add docstrings and fix call of call_signature.
|
Add docstrings and fix call of call_signature.
For the fix:
call_signature has been moved before executing the actual call, if the call is made before then it might change arguments if they are references.
|
Python
|
mit
|
ktbs/ktbs-bench,ktbs/ktbs-bench
|
15be3bd492a0808713c6ae6981ecb99acacd5297
|
allauth/socialaccount/providers/trello/provider.py
|
allauth/socialaccount/providers/trello/provider.py
|
from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth.provider import OAuthProvider
class TrelloAccount(ProviderAccount):
def get_profile_url(self):
return None
def get_avatar_url(self):
return None
class TrelloProvider(OAuthProvider):
id = 'trello'
name = 'Trello'
account_class = TrelloAccount
def get_default_scope(self):
return ['read']
def extract_uid(self, data):
return data['id']
def get_auth_params(self, request, action):
data = super(TrelloProvider, self).get_auth_params(request, action)
app = self.get_app(request)
data['type'] = 'web_server'
data['name'] = app.name
# define here for how long it will be, this can be configured on the
# social app
data['expiration'] = 'never'
return data
provider_classes = [TrelloProvider]
|
from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth.provider import OAuthProvider
class TrelloAccount(ProviderAccount):
def get_profile_url(self):
return None
def get_avatar_url(self):
return None
class TrelloProvider(OAuthProvider):
id = 'trello'
name = 'Trello'
account_class = TrelloAccount
def get_default_scope(self):
return ['read']
def extract_uid(self, data):
return data['id']
def get_auth_params(self, request, action):
data = super(TrelloProvider, self).get_auth_params(request, action)
app = self.get_app(request)
data['type'] = 'web_server'
data['name'] = app.name
data['scope'] = self.get_scope(request)
# define here for how long it will be, this can be configured on the
# social app
data['expiration'] = 'never'
return data
provider_classes = [TrelloProvider]
|
Use 'scope' in TrelloProvider auth params. Allows overriding from django settings.
|
feat(TrelloProvider): Use 'scope' in TrelloProvider auth params. Allows overriding from django settings.
|
Python
|
mit
|
AltSchool/django-allauth,AltSchool/django-allauth,AltSchool/django-allauth
|
a35b6e46bd9d443f07391f37f5e0e384e37608bb
|
nbgrader/tests/test_nbgrader_feedback.py
|
nbgrader/tests/test_nbgrader_feedback.py
|
from .base import TestBase
from nbgrader.api import Gradebook
import os
class TestNbgraderFeedback(TestBase):
def _setup_db(self):
dbpath = self._init_db()
gb = Gradebook(dbpath)
gb.add_assignment("Problem Set 1")
gb.add_student("foo")
gb.add_student("bar")
return dbpath
def test_help(self):
"""Does the help display without error?"""
with self._temp_cwd():
self._run_command("nbgrader feedback --help-all")
def test_single_file(self):
"""Can feedback be generated for an unchanged assignment?"""
with self._temp_cwd(["files/submitted-unchanged.ipynb"]):
dbpath = self._setup_db()
self._run_command(
'nbgrader autograde submitted-unchanged.ipynb '
'--db="{}" '
'--assignment="Problem Set 1" '
'--AssignmentExporter.notebook_id=teacher '
'--student=foo'.format(dbpath))
self._run_command(
'nbgrader feedback submitted-unchanged.nbconvert.ipynb '
'--db="{}" '
'--assignment="Problem Set 1" '
'--AssignmentExporter.notebook_id=teacher '
'--student=foo'.format(dbpath))
assert os.path.exists('submitted-unchanged.nbconvert.nbconvert.html')
|
from .base import TestBase
from nbgrader.api import Gradebook
import os
import shutil
class TestNbgraderFeedback(TestBase):
def _setup_db(self):
dbpath = self._init_db()
gb = Gradebook(dbpath)
gb.add_assignment("ps1")
gb.add_student("foo")
return dbpath
def test_help(self):
"""Does the help display without error?"""
with self._temp_cwd():
self._run_command("nbgrader feedback --help-all")
def test_single_file(self):
"""Can feedback be generated for an unchanged assignment?"""
with self._temp_cwd(["files/submitted-unchanged.ipynb"]):
dbpath = self._setup_db()
os.makedirs('source/ps1')
shutil.copy('submitted-unchanged.ipynb', 'source/ps1/p1.ipynb')
self._run_command('nbgrader assign ps1 --db="{}" '.format(dbpath))
os.makedirs('submitted/foo/ps1')
shutil.move('submitted-unchanged.ipynb', 'submitted/foo/ps1/p1.ipynb')
self._run_command('nbgrader autograde ps1 --db="{}" '.format(dbpath))
self._run_command('nbgrader feedback ps1 --db="{}" '.format(dbpath))
assert os.path.exists('feedback/foo/ps1/p1.html')
|
Update tests for nbgrader feedback
|
Update tests for nbgrader feedback
|
Python
|
bsd-3-clause
|
jhamrick/nbgrader,alope107/nbgrader,ellisonbg/nbgrader,EdwardJKim/nbgrader,modulexcite/nbgrader,EdwardJKim/nbgrader,ellisonbg/nbgrader,jdfreder/nbgrader,MatKallada/nbgrader,jupyter/nbgrader,MatKallada/nbgrader,jupyter/nbgrader,dementrock/nbgrader,jhamrick/nbgrader,EdwardJKim/nbgrader,jupyter/nbgrader,jupyter/nbgrader,modulexcite/nbgrader,jhamrick/nbgrader,alope107/nbgrader,jdfreder/nbgrader,EdwardJKim/nbgrader,jupyter/nbgrader,dementrock/nbgrader,ellisonbg/nbgrader,jhamrick/nbgrader,ellisonbg/nbgrader
|
0749c47bb280230ae5b1e2cda23773d3b10b2491
|
redis_check.py
|
redis_check.py
|
#!/usr/bin/env python3
import sys
import redis
import redis.exceptions
host = sys.argv[1]
host = host.strip('\r\n')
port = 6379
timeout = 5
try:
db = redis.StrictRedis(host=host, port=port, socket_timeout=timeout)
i = db.info()
ver = i.get('redis_version')
siz = db.dbsize()
print('[+] {0}:{1} - {2}({3})'.format(host, port, ver, siz))
except redis.exceptions.ResponseError as e:
print('[+] {0}:{1} - {2}'.format(host, port, e))
except redis.exceptions.ConnectionError:
print('[-] {0}:{1} - Connection Error'.format(host, port))
except redis.exceptions.TimeoutError:
print('[-] {0}:{1} - Timeout'.format(host, port))
except redis.exceptions.InvalidResponse:
print('[-] {0}:{1} - Invalid Response'.format(host, port))
|
#!/usr/bin/env python3
import sys
import redis
import redis.exceptions
host = sys.argv[1]
host = host.strip('\r\n')
port = 6379
timeout = 5
try:
db = redis.StrictRedis(host=host, port=port, socket_timeout=timeout)
i = db.info()
ver = i.get('redis_version')
siz = db.dbsize()
print('[+] {0}:{1}:{2}'.format(host, ver, siz))
except redis.exceptions.ResponseError as e:
print('[+] {0}::{1}'.format(host, e))
except redis.exceptions.ConnectionError:
print('[-] {0}::Connection Error'.format(host))
except redis.exceptions.TimeoutError:
print('[-] {0}::Timeout'.format(host))
except redis.exceptions.InvalidResponse:
print('[-] {0}::Invalid Response'.format(host))
|
Make output easier to parse with cli tools.
|
Make output easier to parse with cli tools.
|
Python
|
bsd-3-clause
|
averagesecurityguy/research
|
8e53b65b5f28a02f8ee980b9f53a57e7cdd077bd
|
main.py
|
main.py
|
import places
from character import Character
import actions
import options
from multiple_choice import MultipleChoice
def combat(character):
"""
takes in a character, returns outcome of fight
"""
return actions.Attack(character.person).get_outcome(character)
def main():
"""
The goal is to have the main function operate as follows:
Set up the initial state
Display the initial message
Display the initial options
Choose an action
Get an outcome
Display results of the outcomes
Outcome changes game state
"""
character = Character()
character.place = places.tavern
choices = MultipleChoice()
options.set_initial_actions(choices)
print("\n---The St. George Game---\n")
print("You are in a tavern. The local assassins hate you.")
while character.alive and character.alone and not character.lose:
action = choices.choose_action()
if not character.threatened or action.combat_action:
outcome = action.get_outcome(character)
else:
outcome = combat(character)
if not character.alive:
break
outcome.execute()
options.add_actions(choices, character, outcome)
choices.generate_actions(character)
if __name__ == "__main__":
main()
|
import places
from character import Character
import actions
import options
from multiple_choice import MultipleChoice
def main():
"""
The goal is to have the main function operate as follows:
Set up the initial state
Display the initial message
Display the initial options
Choose an action
Get an outcome
Display results of the outcomes
Outcome changes game state
"""
character = Character(place=places.tavern)
choices = MultipleChoice()
options.set_initial_actions(choices)
print("\n---The St. George Game---\n")
print("You are in a tavern. The local assassins hate you.")
while character.alive and character.alone and not character.lose:
action = choices.choose_action()
if not character.threatened or action.combat_action:
outcome = action.get_outcome(character)
else:
outcome = actions.Attack(character.person).get_outcome(character)
outcome.execute()
options.add_actions(choices, character, outcome)
choices.generate_actions(character)
if __name__ == "__main__":
main()
|
Refactor combat code to be more concise
|
Refactor combat code to be more concise
|
Python
|
apache-2.0
|
SageBerg/St.GeorgeGame,SageBerg/St.GeorgeGame,SageBerg/St.GeorgeGame,SageBerg/St.GeorgeGame
|
75d6e88de0ed8f8cb081de15ce0d3949a78c9ded
|
efselab/build.py
|
efselab/build.py
|
#!/usr/bin/env python3
from distutils.core import setup, Extension
MODULES_TO_BUILD = ["fasthash", "suc", "lemmatize"]
for module in MODULES_TO_BUILD:
setup(
name=module,
ext_modules=[
Extension(
name=module,
sources=['%s.c' % module],
libraries=[],
extra_compile_args=['-Wall', '-Wno-unused-function'],
extra_link_args=[]
)
],
script_args=['build_ext', '--inplace']
)
|
#!/usr/bin/env python3
from distutils.core import setup, Extension
MODULES_TO_BUILD = ["fasthash", "suc", "lemmatize"]
def main():
for module in MODULES_TO_BUILD:
setup(
name=module,
ext_modules=[
Extension(
name=module,
sources=['%s.c' % module],
libraries=[],
extra_compile_args=['-Wall', '-Wno-unused-function'],
extra_link_args=[]
)
],
script_args=['build_ext', '--inplace']
)
if __name__ == '__main__':
main()
|
Put module in method to enable calls from libraries.
|
Put module in method to enable calls from libraries.
Former-commit-id: e614cec07ee71723be5b114163fe835961f6811c
|
Python
|
mit
|
EmilStenstrom/json-tagger,EmilStenstrom/json-tagger,EmilStenstrom/json-tagger,EmilStenstrom/json-tagger,EmilStenstrom/json-tagger
|
1763b533bf33a8e450b4cf8d6f55d4ffaf6b2bea
|
tests/window/WINDOW_CAPTION.py
|
tests/window/WINDOW_CAPTION.py
|
#!/usr/bin/env python
'''Test that the window caption can be set.
Expected behaviour:
Two windows will be opened, one with the caption "Window caption 1"
counting up every second; the other with a Unicode string including
some non-ASCII characters.
Press escape or close either window to finished the test.
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id: $'
import time
import unittest
from pyglet import window
class WINDOW_CAPTION(unittest.TestCase):
def test_caption(self):
w1 = window.Window(200, 200)
w2 = window.Window(200, 200)
count = 1
w1.set_caption('Window caption %d' % count)
w2.set_caption(u'\u00bfHabla espa\u00f1ol?')
last_time = time.time()
while not (w1.has_exit or w2.has_exit):
if time.time() - last_time > 1:
count += 1
w1.set_caption('Window caption %d' % count)
last_time = time.time()
w1.dispatch_events()
w2.dispatch_events()
w1.close()
w2.close()
if __name__ == '__main__':
unittest.main()
|
#!/usr/bin/env python
'''Test that the window caption can be set.
Expected behaviour:
Two windows will be opened, one with the caption "Window caption 1"
counting up every second; the other with a Unicode string including
some non-ASCII characters.
Press escape or close either window to finished the test.
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id: $'
import time
import unittest
from pyglet import window
class WINDOW_CAPTION(unittest.TestCase):
def test_caption(self):
w1 = window.Window(400, 200, resizable=True)
w2 = window.Window(400, 200, resizable=True)
count = 1
w1.set_caption('Window caption %d' % count)
w2.set_caption(u'\u00bfHabla espa\u00f1ol?')
last_time = time.time()
while not (w1.has_exit or w2.has_exit):
if time.time() - last_time > 1:
count += 1
w1.set_caption('Window caption %d' % count)
last_time = time.time()
w1.dispatch_events()
w2.dispatch_events()
w1.close()
w2.close()
if __name__ == '__main__':
unittest.main()
|
Make windows bigger in this test so the captions can be read.
|
Make windows bigger in this test so the captions can be read.
Index: tests/window/WINDOW_CAPTION.py
===================================================================
--- tests/window/WINDOW_CAPTION.py (revision 777)
+++ tests/window/WINDOW_CAPTION.py (working copy)
@@ -19,8 +19,8 @@
class WINDOW_CAPTION(unittest.TestCase):
def test_caption(self):
- w1 = window.Window(200, 200)
- w2 = window.Window(200, 200)
+ w1 = window.Window(400, 200, resizable=True)
+ w2 = window.Window(400, 200, resizable=True)
count = 1
w1.set_caption('Window caption %d' % count)
w2.set_caption(u'\u00bfHabla espa\u00f1ol?')
|
Python
|
bsd-3-clause
|
mammadori/pyglet,oktayacikalin/pyglet,oktayacikalin/pyglet,theblacklion/pyglet,mammadori/pyglet,oktayacikalin/pyglet,theblacklion/pyglet,oktayacikalin/pyglet,mammadori/pyglet,theblacklion/pyglet,theblacklion/pyglet,oktayacikalin/pyglet,mammadori/pyglet,theblacklion/pyglet
|
10f72ab0428ddf51b47bc95b64b2a532c8e670a5
|
auth0/v2/authentication/enterprise.py
|
auth0/v2/authentication/enterprise.py
|
import requests
class Enterprise(object):
def __init__(self, domain):
self.domain = domain
def saml_login(self, client_id, connection):
"""
"""
return requests.get(
'https://%s/samlp/%s' % (self.domain, client_id),
params={'connection': connection}
)
def saml_metadata(self,
|
from .base import AuthenticationBase
class Enterprise(AuthenticationBase):
def __init__(self, domain):
self.domain = domain
def saml_metadata(self, client_id):
return self.get(url='https://%s/samlp/metadata/%s' % (self.domain,
client_id))
def wsfed_metadata(self):
url = 'https://%s/wsfed/FederationMetadata' \
'/2007-06/FederationMetadata.xml'
return self.get(url=url % self.domain)
|
Refactor Enterprise class to use AuthenticationBase
|
Refactor Enterprise class to use AuthenticationBase
|
Python
|
mit
|
auth0/auth0-python,auth0/auth0-python
|
acec9342e392fed103e5d6b78470251d2cf535d6
|
timpani/webserver/webserver.py
|
timpani/webserver/webserver.py
|
import flask
import os.path
import datetime
import urllib.parse
from .. import configmanager
from . import controllers
FILE_LOCATION = os.path.abspath(os.path.dirname(__file__))
STATIC_PATH = os.path.abspath(os.path.join(FILE_LOCATION, "../../static"))
CONFIG_PATH = os.path.abspath(os.path.join(FILE_LOCATION, "../../configs/"))
configs = configmanager.ConfigManager(configPath = CONFIG_PATH)
authConfig = configs["auth"]
app = flask.Flask(__name__, static_folder = STATIC_PATH)
app.secret_key = authConfig["signing_key"]
app.register_blueprint(controllers.user.blueprint)
app.register_blueprint(controllers.admin.blueprint)
|
import flask
import os.path
import datetime
import urllib.parse
from .. import database
from .. import configmanager
from . import controllers
FILE_LOCATION = os.path.abspath(os.path.dirname(__file__))
STATIC_PATH = os.path.abspath(os.path.join(FILE_LOCATION, "../../static"))
CONFIG_PATH = os.path.abspath(os.path.join(FILE_LOCATION, "../../configs/"))
configs = configmanager.ConfigManager(configPath = CONFIG_PATH)
authConfig = configs["auth"]
app = flask.Flask(__name__, static_folder = STATIC_PATH)
app.secret_key = authConfig["signing_key"]
app.register_blueprint(controllers.user.blueprint)
app.register_blueprint(controllers.admin.blueprint)
@app.teardown_request
def teardown_request(exception = None):
databaseConnection = database.ConnectionManager.getConnection("main")
databaseConnection.session.close()
|
Add teardown request instead of usesDatabase decorator
|
Add teardown request instead of usesDatabase decorator
|
Python
|
mit
|
ollien/Timpani,ollien/Timpani,ollien/Timpani
|
967ec17d15f07191e6d42fc122eb5e731605ad67
|
git_code_debt/repo_parser.py
|
git_code_debt/repo_parser.py
|
import collections
import contextlib
import shutil
import subprocess
import tempfile
from util.iter import chunk_iter
Commit = collections.namedtuple('Commit', ['sha', 'date', 'name'])
class RepoParser(object):
def __init__(self, git_repo, ref):
self.git_repo = git_repo
self.ref = ref
self.tempdir = None
@contextlib.contextmanager
def repo_checked_out(self):
assert not self.tempdir
self.tempdir = tempfile.mkdtemp(suffix='temp-repo')
try:
subprocess.call(
['git', 'clone', self.git_repo, self.tempdir],
stdout=None,
)
subprocess.call(
['git', 'checkout', self.ref],
cwd=self.tempdir,
stdout=None,
)
yield
finally:
shutil.rmtree(self.tempdir)
self.tempdir = None
def get_commit_shas(self, since=None):
"""Returns a list of Commit objects.
Args:
since - (optional) A timestamp to look from.
"""
assert self.tempdir
cmd = ['git', 'log', self.ref, '--topo-order', '--format=%H%n%at%n%cN']
if since:
cmd += ['--after={0}'.format(since)]
output = subprocess.check_output(
cmd,
cwd=self.tempdir,
)
commits = []
for sha, date, name in chunk_iter(output.splitlines(), 3):
commits.append(Commit(sha, int(date), name))
return commits
|
import collections
import contextlib
import shutil
import subprocess
import tempfile
from util.iter import chunk_iter
Commit = collections.namedtuple('Commit', ['sha', 'date', 'name'])
class RepoParser(object):
def __init__(self, git_repo):
self.git_repo = git_repo
self.tempdir = None
@contextlib.contextmanager
def repo_checked_out(self):
assert not self.tempdir
self.tempdir = tempfile.mkdtemp(suffix='temp-repo')
try:
subprocess.check_call(
['git', 'clone', self.git_repo, self.tempdir],
stdout=None,
)
yield
finally:
shutil.rmtree(self.tempdir)
self.tempdir = None
def get_commit_shas(self, since=None):
"""Returns a list of Commit objects.
Args:
since - (optional) A timestamp to look from.
"""
assert self.tempdir
cmd = ['git', 'log', 'master', '--first-parent', '--format=%H%n%at%n%cN']
if since:
cmd += ['--after={0}'.format(since)]
output = subprocess.check_output(
cmd,
cwd=self.tempdir,
)
commits = []
for sha, date, name in chunk_iter(output.splitlines(), 3):
commits.append(Commit(sha, int(date), name))
return commits
|
Change sha fetching to use --parent-only and removed ref parameter
|
Change sha fetching to use --parent-only and removed ref parameter
|
Python
|
mit
|
ucarion/git-code-debt,Yelp/git-code-debt,ucarion/git-code-debt,ucarion/git-code-debt,Yelp/git-code-debt,Yelp/git-code-debt,Yelp/git-code-debt
|
a1583d181170302df72fc0a97e5db7f6300061b3
|
tests/__init__.py
|
tests/__init__.py
|
#
|
import os
DATADIR = os.path.abspath('docs/data')
FILES = ['test_uk.shp', 'test_uk.shx', 'test_uk.dbf', 'test_uk.prj']
def create_zipfile(zipfilename):
import zipfile
with zipfile.ZipFile(zipfilename, 'w') as zip:
for filename in FILES:
zip.write(os.path.join(DATADIR, filename), filename)
def create_tarfile(tarfilename):
import tarfile
with tarfile.open(tarfilename, 'w') as tar:
for filename in FILES:
tar.add(os.path.join(DATADIR, filename), arcname='testing/%s' % filename)
def create_jsonfile(jsonfilename):
import json
import fiona
from fiona.crs import from_string
from fiona.tool import crs_uri
with fiona.collection(os.path.join(DATADIR, FILES[0]), 'r') as source:
features = [feat for feat in source]
crs = ' '.join('+%s=%s' % (k,v) for k,v in source.crs.items())
my_layer = {'type': 'FeatureCollection',
'features': features,
'crs': { 'type': 'name',
'properties': {
'name':crs_uri(from_string(crs))}}}
with open(jsonfilename, 'w') as f:
f.write(json.dumps(my_layer))
def setup():
"""Setup function for nosetests to create test files if they do not exist
"""
zipfile = os.path.join(DATADIR, 'test_uk.zip')
tarfile = os.path.join(DATADIR, 'test_uk.tar')
jsonfile = os.path.join(DATADIR, 'test_uk.json')
if not os.path.exists(zipfile):
create_zipfile(zipfile)
if not os.path.exists(tarfile):
create_tarfile(tarfile)
if not os.path.exists(jsonfile):
create_jsonfile(jsonfile)
|
Create derived test data files if they do not exist when running nosetests
|
Create derived test data files if they do not exist when running nosetests
|
Python
|
bsd-3-clause
|
perrygeo/Fiona,rbuffat/Fiona,perrygeo/Fiona,johanvdw/Fiona,Toblerity/Fiona,rbuffat/Fiona,Toblerity/Fiona
|
34031f2b16303bcff69a7b52ec3e85ce35103c96
|
src/hunter/const.py
|
src/hunter/const.py
|
import collections
import os
import site
import sys
from distutils.sysconfig import get_python_lib
SITE_PACKAGES_PATHS = set()
if hasattr(site, 'getsitepackages'):
SITE_PACKAGES_PATHS.update(site.getsitepackages())
if hasattr(site, 'getusersitepackages'):
SITE_PACKAGES_PATHS.add(site.getusersitepackages())
SITE_PACKAGES_PATHS.add(get_python_lib())
SITE_PACKAGES_PATHS = tuple(SITE_PACKAGES_PATHS)
SYS_PREFIX_PATHS = set((
sys.prefix,
sys.exec_prefix,
os.path.dirname(os.__file__),
os.path.dirname(collections.__file__),
))
for prop in 'real_prefix', 'real_exec_prefix', 'base_prefix', 'base_exec_prefix':
if hasattr(sys, prop):
SYS_PREFIX_PATHS.add(getattr(sys, prop))
SYS_PREFIX_PATHS = tuple(SYS_PREFIX_PATHS)
|
import collections
import os
import site
import sys
from distutils.sysconfig import get_python_lib
SITE_PACKAGES_PATHS = set()
if hasattr(site, 'getsitepackages'):
SITE_PACKAGES_PATHS.update(site.getsitepackages())
if hasattr(site, 'getusersitepackages'):
SITE_PACKAGES_PATHS.add(site.getusersitepackages())
SITE_PACKAGES_PATHS.add(get_python_lib())
SITE_PACKAGES_PATHS = tuple(SITE_PACKAGES_PATHS)
SYS_PREFIX_PATHS = {
sys.prefix,
sys.exec_prefix,
os.path.dirname(os.__file__),
os.path.dirname(collections.__file__),
}
for prop in 'real_prefix', 'real_exec_prefix', 'base_prefix', 'base_exec_prefix':
if hasattr(sys, prop):
SYS_PREFIX_PATHS.add(getattr(sys, prop))
SYS_PREFIX_PATHS = tuple(sorted(SYS_PREFIX_PATHS, key=len, reverse=True))
|
Sort by longest path (assuming stdlib stuff will be in the longest).
|
Sort by longest path (assuming stdlib stuff will be in the longest).
|
Python
|
bsd-2-clause
|
ionelmc/python-hunter
|
d1edcb2f59d96168e94ec748633221a2d5f95b99
|
colorise/color_tools.py
|
colorise/color_tools.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Functions for converting and comparing colors."""
import colorsys
import math
import operator
def hls_to_rgb(hue, lightness, saturation):
"""Convert HLS (hue, lightness, saturation) values to RGB."""
return tuple(int(math.ceil(c * 255.))
for c in colorsys.hls_to_rgb(hue, lightness, saturation))
def hsv_to_rgb(hue, saturation, value):
"""Convert HSV (hue, saturation, value) values to RGB."""
return tuple(int(c * 255.)
for c in colorsys.hsv_to_rgb(hue/360.,
saturation/100.,
value/100.))
def color_difference(rgb1, rgb2):
"""Return the sums of component differences between two colors."""
return sum(abs(i - j) for i, j in zip(rgb1, rgb2))
def color_distance(rgb1, rgb2):
"""Compute the Euclidian distance between two colors."""
r1, g1, b1 = rgb1
r2, g2, b2 = rgb2
return math.sqrt((r2 - r1)**2 + (g2 - g1)**2 + (b2 - b1)**2)
def closest_color(rgb, clut):
"""Return the CLUT index of the closest RGB color to a given RGB tuple."""
# Generate a list of tuples of CLUT indices and the color difference value
indexed_diffs = ((idx, color_difference(rgb, clut[idx])) for idx in clut)
return min(indexed_diffs, key=operator.itemgetter(1))[0]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Functions for converting and comparing colors."""
import colorsys
import math
import operator
def hls_to_rgb(hue, lightness, saturation):
"""Convert HLS (hue, lightness, saturation) values to RGB."""
return tuple(int(math.ceil(c * 255.))
for c in colorsys.hls_to_rgb(hue, lightness, saturation))
def hsv_to_rgb(hue, saturation, value):
"""Convert HSV (hue, saturation, value) values to RGB."""
return tuple(int(c * 255.)
for c in colorsys.hsv_to_rgb(hue/360.,
saturation/100.,
value/100.))
def color_difference(rgb1, rgb2):
"""Return the sums of component differences between two colors."""
return sum(abs(i - j) for i, j in zip(rgb1, rgb2))
def closest_color(rgb, clut):
"""Return the CLUT index of the closest RGB color to a given RGB tuple."""
# Generate a list of tuples of CLUT indices and the color difference value
indexed_diffs = ((idx, color_difference(rgb, clut[idx])) for idx in clut)
return min(indexed_diffs, key=operator.itemgetter(1))[0]
|
Remove unused color distance function
|
Remove unused color distance function
|
Python
|
bsd-3-clause
|
MisanthropicBit/colorise
|
2c0ff93e3ef5e6914a85e4fc3443f0432337854e
|
text_processor.py
|
text_processor.py
|
from urllib.request import urlopen
def fetch_words():
with urlopen('http://sixty-north.com/c/t.txt') as story:
story_words = []
word_list = ''
for line in story:
line_words = line.decode('utf-8').split()
for word in line_words:
story_words.append(word)
wordCursor = 0
print("Word Count", len(story_words))
while wordCursor < len(story_words):
paragraphCursor = 0
while paragraphCursor < 6:
if (wordCursor + paragraphCursor) == len(story_words):
break
word_list += story_words[wordCursor + paragraphCursor] + ' '
paragraphCursor += 1
wordCursor += paragraphCursor
word_list += '\n'
return word_list
def print_words(word_list):
print(word_list)
if __name__ == '__main__':
print(fetch_words())
|
from urllib.request import urlopen
def fetch_words():
with urlopen('http://sixty-north.com/c/t.txt') as story:
story_words = []
word_list = ''
for line in story:
line_words = line.decode('utf-8').split()
for word in line_words:
story_words.append(word)
wordCursor = 0
print("Word Count", len(story_words))
while wordCursor < len(story_words):
paragraphCursor = 0
while paragraphCursor < 6:
if (wordCursor + paragraphCursor) == len(story_words):
break
word_list += story_words[wordCursor + paragraphCursor] + ' '
paragraphCursor += 1
wordCursor += paragraphCursor
word_list += '\n'
return word_list
def print_words(word_list):
print(word_list)
def main():
print(fetch_words())
if __name__ == '__main__':
main()
|
Move main execution to function
|
Move main execution to function
|
Python
|
mit
|
kentoj/python-fundamentals
|
83ceca04758c6546c41d5bc7f96583d838f25e11
|
src/mmw/apps/user/backends.py
|
src/mmw/apps/user/backends.py
|
# -*- coding: utf-8 -*-
from django.core.exceptions import ObjectDoesNotExist
from django.contrib.auth.backends import BaseBackend
from django.contrib.auth.models import User
from apps.user.models import ItsiUser, ConcordUser
class SSOAuthenticationBackend(BaseBackend):
"""
A custom authentication back-end for Single Sign On providers.
Before we can call django.contrib.auth.login on an SSO user, we must first
authenticate them. This must be done using a custom authentication back-
end, which sets the backend attribute on the user model.
This class should be instantiated with an SSO provider user model, such
as ItsiUser or ConcordUser, before it can be used.
"""
def __init__(self, model, field):
self.SSOUserModel = model
self.SSOField = field
def authenticate(self, sso_id=None):
if sso_id is not None:
try:
query = {self.SSOField: sso_id}
user = self.SSOUserModel.objects.get(**query).user
return user
except ObjectDoesNotExist:
return None
return None
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
class ItsiAuthenticationBackend(SSOAuthenticationBackend):
def __init__(self):
super(ItsiAuthenticationBackend, self).__init__(
ItsiUser, 'itsi_id')
class ConcordAuthenticationBackend(SSOAuthenticationBackend):
def __init__(self):
super(ConcordAuthenticationBackend, self).__init__(
ConcordUser, 'concord_id')
|
# -*- coding: utf-8 -*-
from django.core.exceptions import ObjectDoesNotExist
from django.contrib.auth.backends import BaseBackend
from django.contrib.auth.models import User
from apps.user.models import ItsiUser, ConcordUser
class SSOAuthenticationBackend(BaseBackend):
"""
A custom authentication back-end for Single Sign On providers.
Before we can call django.contrib.auth.login on an SSO user, we must first
authenticate them. This must be done using a custom authentication back-
end, which sets the backend attribute on the user model.
This class should be instantiated with an SSO provider user model, such
as ItsiUser or ConcordUser, before it can be used.
"""
def __init__(self, model, field):
self.SSOUserModel = model
self.SSOField = field
def authenticate(self, request=None, sso_id=None):
if sso_id is not None:
try:
query = {self.SSOField: sso_id}
user = self.SSOUserModel.objects.get(**query).user
return user
except ObjectDoesNotExist:
return None
return None
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
class ItsiAuthenticationBackend(SSOAuthenticationBackend):
def __init__(self):
super(ItsiAuthenticationBackend, self).__init__(
ItsiUser, 'itsi_id')
class ConcordAuthenticationBackend(SSOAuthenticationBackend):
def __init__(self):
super(ConcordAuthenticationBackend, self).__init__(
ConcordUser, 'concord_id')
|
Add request parameter to backend.authenticate
|
Add request parameter to backend.authenticate
Without this, the signature of our custom backend does not
match that of the function call. This signature is tested
in django.contrib.auth.authenticate here: https://github.com/django/django/blob/fdf209eab8949ddc345aa0212b349c79fc6fdebb/django/contrib/auth/__init__.py#L69
and `request` was added to that signature in Django 1.11
in https://github.com/django/django/commit/4b9330ccc04575f9e5126529ec355a450d12e77c.
With this, the Concord users are authenticated correctly.
|
Python
|
apache-2.0
|
WikiWatershed/model-my-watershed,WikiWatershed/model-my-watershed,WikiWatershed/model-my-watershed,WikiWatershed/model-my-watershed,WikiWatershed/model-my-watershed
|
e3919de815594c73c35c265380c66ef14a51acbd
|
__init__.py
|
__init__.py
|
import sys
import marshal
import os.path
if not hasattr(sys, 'implementation'):
raise ImportError('Python 3.3 or newer is required')
PY_TAG = sys.implementation.cache_tag
PY_VERSION = sys.hexversion
if PY_TAG is None:
# Never seen this to be true, but Python documentation
# mentions that it's possible.
raise ImportError('cannot load the bundle since module caching is disabled')
__file__ = os.path.join(__path__[0], 'bundle', PY_TAG + '.dgbundle')
try:
with open(__file__, 'rb') as _fd:
for _c in marshal.load(_fd):
eval(_c)
except IOError:
raise ImportError('`{}.dgbundle` is inaccessible'.format(PY_TAG))
except Exception:
raise ImportError('`{}.dgbundle` is corrupt'.format(PY_TAG))
|
import sys
import marshal
import os.path
if not hasattr(sys, 'implementation'):
raise ImportError('Python 3.3 or newer is required')
if sys.implementation.cache_tag is None:
raise ImportError('cannot load the bundle since module caching is disabled')
PY_TAG = sys.implementation.cache_tag
PY_VERSION = sys.hexversion
BUNDLE_DIR = os.path.join(__path__[0], 'bundle')
BUNDLE_FILE = os.path.join(BUNDLE_DIR, PY_TAG + '.dgbundle')
if not os.path.exists(BUNDLE_FILE):
raise ImportError('unsupported platform: {}'.format(PY_TAG))
with open(BUNDLE_FILE, 'rb') as _fd:
for _c in marshal.load(_fd):
eval(_c)
del _c
del _fd
|
Rewrite the initial bootstrapping mechanism.
|
Rewrite the initial bootstrapping mechanism.
|
Python
|
mit
|
pyos/dg
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.