commit
stringlengths 40
40
| old_file
stringlengths 4
236
| new_file
stringlengths 4
236
| old_contents
stringlengths 1
3.26k
| new_contents
stringlengths 16
4.43k
| subject
stringlengths 16
624
| message
stringlengths 17
3.29k
| lang
stringclasses 5
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
|
|---|---|---|---|---|---|---|---|---|---|
a08483b5fc55556b46c08e988ac297b1dffaed48
|
app/utils/utilities.py
|
app/utils/utilities.py
|
from re import search
from flask import g
from flask_restplus import abort
from flask_httpauth import HTTPBasicAuth
from app.models.user import User
from instance.config import Config
auth = HTTPBasicAuth()
def validate_email(email):
''' Method to check that a valid email is provided '''
email_re = r"(^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$)"
return True if search(email_re, email) else False
@auth.verify_token
def verify_token(token=None):
''' Method to verify token '''
token = request.headers.get('x-access-token')
user_id = User.verify_authentication_token(token)
if user_id:
g.current_user = User.query.filter_by(id=user.id).first()
return True
return False
|
from re import search
from flask import g, request
from flask_httpauth import HTTPTokenAuth
from app.models.user import User
auth = HTTPTokenAuth(scheme='Token')
def validate_email(email):
''' Method to check that a valid email is provided '''
email_re = r"(^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$)"
return True if search(email_re, email) else False
@auth.verify_token
def verify_token(token=None):
''' Method to verify token '''
token = request.headers.get('x-access-token')
user_id = User.verify_authentication_token(token)
if user_id:
g.current_user = User.query.filter_by(id=user_id).first()
return True
return False
|
Implement HTTPTokenAuth Store user data in global
|
Implement HTTPTokenAuth
Store user data in global
|
Python
|
mit
|
Elbertbiggs360/buckelist-api
|
463e0ab2a77734cf6787d9cb788a57e7dd53ff06
|
games/admin.py
|
games/admin.py
|
from django.contrib import admin
from .models import Game, Framework, Release, Asset
class GameAdmin(admin.ModelAdmin):
pass
class FrameworkAdmin(admin.ModelAdmin):
pass
class ReleaseAdmin(admin.ModelAdmin):
pass
class AssetAdmin(admin.ModelAdmin):
list_display = ['__unicode__', 'release']
admin.site.register(Game, GameAdmin)
admin.site.register(Release, ReleaseAdmin)
admin.site.register(Framework, FrameworkAdmin)
admin.site.register(Asset, AssetAdmin)
|
from django.contrib import admin
from .models import Game, Framework, Release, Asset
class GameAdmin(admin.ModelAdmin):
list_display = ['name', 'uuid', 'owner', 'framework', 'public']
class FrameworkAdmin(admin.ModelAdmin):
pass
class ReleaseAdmin(admin.ModelAdmin):
pass
class AssetAdmin(admin.ModelAdmin):
list_display = ['__unicode__', 'release']
admin.site.register(Game, GameAdmin)
admin.site.register(Release, ReleaseAdmin)
admin.site.register(Framework, FrameworkAdmin)
admin.site.register(Asset, AssetAdmin)
|
Add fields to the game display in Admin
|
Add fields to the game display in Admin
|
Python
|
mit
|
stackmachine/bearweb,stackmachine/bearweb,stackmachine/bearweb,stackmachine/bearweb,stackmachine/bearweb,stackmachine/bearweb
|
0a336447546442ab5d48716223713135a4812adf
|
get_problem.py
|
get_problem.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
from bs4 import BeautifulSoup
from requests import get, codes
def match_soup_class(target, mode='class'):
def do_match(tag):
classes = tag.get(mode, [])
return all(c in classes for c in target)
return do_match
def main():
if len(sys.argv) == 1:
p = 1
else:
p = int(sys.argv[1])
url = 'https://projecteuler.net/problem=%d' % p
r = get(url)
if r.status_code != codes.ok:
print('[url request failed] ', url)
return
soup = BeautifulSoup(r.text, 'html.parser')
for content in soup.find_all(match_soup_class(['problem_content'])):
print(content.text)
if __name__ == '__main__':
main()
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
from bs4 import BeautifulSoup
from requests import get, codes
def match_soup_class(target, mode='class'):
def do_match(tag):
classes = tag.get(mode, [])
return all(c in classes for c in target)
return do_match
def main():
if len(sys.argv) == 1:
p = 1
else:
p = int(sys.argv[1])
url = 'https://projecteuler.net/problem=%d' % p
r = get(url)
if r.status_code != codes.ok:
print('[url request failed] ', url)
return
soup = BeautifulSoup(r.text, 'html.parser')
print("'''")
print('Problem %d' % p)
for content in soup.find_all(match_soup_class(['problem_content'])):
print(content.text)
print("'''")
if __name__ == '__main__':
main()
|
ADD comment for python file
|
ADD comment for python file
|
Python
|
mit
|
byung-u/ProjectEuler
|
42b4837570fd936c5a7593026fc4868c38d4b09d
|
base/management/commands/revision_count.py
|
base/management/commands/revision_count.py
|
# -*- encoding: utf-8 -*-
from django.core.management.base import BaseCommand
from django.apps import apps
# from reversion import revisions as reversion
from reversion.models import Version
from reversion.errors import RegistrationError
class Command(BaseCommand):
help = "Count reversion records for each model"
def handle(self, *args, **options):
total_count = 0
print_pattern = "{:<15} {:<30s} {:>10d}"
prev_app = None
for model in sorted(
apps.get_models(),
key=lambda mod: mod.__module__ + '.' + mod.__name__):
app_name = model._meta.app_label
model_name = model.__name__
try:
qs = Version.objects.get_for_model(model)
count = qs.count()
total_count += count
if prev_app and prev_app != app_name:
print()
print (print_pattern.format(
app_name if prev_app != app_name else "",
model_name, count
))
prev_app = app_name
except RegistrationError:
# model is not registered with reversion ignore
pass
print ()
print (print_pattern.format("Total Records", "", total_count))
|
# -*- encoding: utf-8 -*-
from django.core.management.base import BaseCommand
from django.apps import apps
# from reversion import revisions as reversion
from reversion.models import Version
from reversion.errors import RegistrationError
class Command(BaseCommand):
help = "Count reversion records for each model"
def handle(self, *args, **options):
total_count = 0
print_pattern = "{:<15} {:<30s} {:>10d}"
title_pattern = "{:<15} {:<30s} {:>10s}"
self.stdout.write(title_pattern.format("App", "Model", "Revisions"))
self.stdout.write(title_pattern.format("===", "=====", "========="))
prev_app = None
for model in sorted(
apps.get_models(),
key=lambda mod: mod.__module__ + '.' + mod.__name__):
app_name = model._meta.app_label
model_name = model.__name__
try:
qs = Version.objects.get_for_model(model)
count = qs.count()
total_count += count
if prev_app and prev_app != app_name:
self.stdout.write("")
self.stdout.write(print_pattern.format(
app_name if prev_app != app_name else "",
model_name, count
))
prev_app = app_name
except RegistrationError:
# model is not registered with reversion ignore
pass
self.stdout.write("")
self.stdout.write(print_pattern.format("Total Records", "", total_count))
|
Add titles to columns and use write instead of print
|
Add titles to columns and use write instead of print
|
Python
|
apache-2.0
|
pkimber/base,pkimber/base,pkimber/base,pkimber/base
|
8b7aa0a540c7927b53adf6368e9cb8476816d941
|
asciibooth/statuses.py
|
asciibooth/statuses.py
|
# encoding: UTF-8
import random
from . import config
def sampler(source):
def reshuffle():
copy = list(source)
random.shuffle(copy)
return copy
stack = reshuffle()
lastitem = ''
while True:
try:
item = stack.pop()
if item == lastitem:
item = stack.pop()
yield item
lastitem = item
except IndexError:
stack = reshuffle()
continue
def incremental_chance(increment=0.01, start=0.5):
current_chance = start
while True:
r = random.random()
success = (r < current_chance)
if success:
current_chance = start
else:
current_chance += increment
yield success
def status_generator():
random_status = sampler(config.TWEET_MESSAGES)
show_status = incremental_chance(start=0, increment=0.25)
fixed = config.TWEET_FIXED
while True:
status = ''
if next(show_status):
status = next(random_status) + " "
yield "{status}{fixed}".format(status=status, fixed=fixed)
if __name__ == '__main__':
gen = status_generator()
for i in range(0, 20):
print(next(gen))
|
# encoding: UTF-8
import random
from . import config
def sampler(source):
def reshuffle():
copy = list(source)
random.shuffle(copy)
return copy
stack = reshuffle()
lastitem = ''
while True:
try:
item = stack.pop()
if item == lastitem:
item = stack.pop()
yield item
lastitem = item
except IndexError:
stack = reshuffle()
continue
def incremental_chance(start=0.5, increment=0.01):
current_chance = start
while True:
r = random.random()
success = (r < current_chance)
if success:
current_chance = start
else:
current_chance += increment
yield success
def status_generator():
random_status = sampler(config.TWEET_MESSAGES)
show_status = incremental_chance(start=config.TWEET_CHANCE_INITIAL, increment=config.TWEET_CHANCE_INCREMENT)
fixed = config.TWEET_FIXED
while True:
status = ''
if next(show_status):
status = next(random_status) + " "
yield "{status}{fixed}".format(status=status, fixed=fixed)
if __name__ == '__main__':
gen = status_generator()
for i in range(0, 20):
print(next(gen))
|
Add configuration options for randomness
|
Add configuration options for randomness
|
Python
|
cc0-1.0
|
jnv/asciibooth,jnv/asciibooth
|
0d023a51283d477e4b3d02059361b003a91134e0
|
jaspyx/scope.py
|
jaspyx/scope.py
|
class Scope(object):
tmp_index = 0
def __init__(self, parent=None):
self.parent = parent
self.prefix = []
self.declarations = {}
self.globals = set()
self.inherited = True
def prefixed(self, name):
return '.'.join(self.prefix + [name])
def declare(self, name, var=True):
self.declarations[name] = var
def get_scope(self, name, inherit=False):
if name in self.declarations and (not inherit or self.inherited):
return self
elif self.parent is not None:
return self.parent.get_scope(name, True)
else:
return None
def declare_global(self, name):
self.globals.add(name)
def is_global(self, name):
return name in self.globals
def get_global_scope(self):
if self.parent:
return self.parent.get_global_scope()
else:
return self
@classmethod
def alloc_temp(cls):
cls.tmp_index += 1
return '__jpx_tmp_%i' % cls.tmp_index
|
class Scope(object):
def __init__(self, parent=None):
self.parent = parent
self.prefix = []
self.declarations = {}
self.globals = set()
self.inherited = True
def prefixed(self, name):
return '.'.join(self.prefix + [name])
def declare(self, name, var=True):
self.declarations[name] = var
def get_scope(self, name, inherit=False):
if name in self.declarations and (not inherit or self.inherited):
return self
elif self.parent is not None:
return self.parent.get_scope(name, True)
else:
return None
def declare_global(self, name):
self.globals.add(name)
def is_global(self, name):
return name in self.globals
def get_global_scope(self):
if self.parent:
return self.parent.get_global_scope()
else:
return self
|
Remove temp var allocation code.
|
Remove temp var allocation code.
|
Python
|
mit
|
ztane/jaspyx,iksteen/jaspyx
|
4524b88eef8a46d40c4d353c3561401ac3689878
|
bookmarks/urls.py
|
bookmarks/urls.py
|
from django.conf.urls import patterns, url
# for voting
from voting.views import vote_on_object
from bookmarks.models import Bookmark
urlpatterns = patterns('',
url(r'^$', 'bookmarks.views.bookmarks', name="all_bookmarks"),
url(r'^your_bookmarks/$', 'bookmarks.views.your_bookmarks', name="your_bookmarks"),
url(r'^add/$', 'bookmarks.views.add', name="add_bookmark"),
url(r'^(\d+)/delete/$', 'bookmarks.views.delete', name="delete_bookmark_instance"),
# for voting
(r'^(?P<object_id>\d+)/(?P<direction>up|down|clear)vote/?$',
vote_on_object, dict(
model=Bookmark,
template_object_name='bookmark',
template_name='kb/link_confirm_vote.html',
allow_xmlhttprequest=True)),
)
|
from django.conf.urls import patterns, url
from django.views.decorators.csrf import csrf_exempt
# for voting
from voting.views import vote_on_object
from bookmarks.models import Bookmark
urlpatterns = patterns('',
url(r'^$', 'bookmarks.views.bookmarks', name="all_bookmarks"),
url(r'^your_bookmarks/$', 'bookmarks.views.your_bookmarks', name="your_bookmarks"),
url(r'^add/$', 'bookmarks.views.add', name="add_bookmark"),
url(r'^(\d+)/delete/$', 'bookmarks.views.delete', name="delete_bookmark_instance"),
# for voting
(r'^(?P<object_id>\d+)/(?P<direction>up|down|clear)vote/?$',
csrf_exempt(vote_on_object), dict(
model=Bookmark,
template_object_name='bookmark',
template_name='kb/link_confirm_vote.html',
allow_xmlhttprequest=True)),
)
|
Disable csrf checks for voting
|
Disable csrf checks for voting
|
Python
|
mit
|
incuna/incuna-bookmarks,incuna/incuna-bookmarks
|
c2b0f66d5760d61444b4909e40c45993780cd473
|
examples/champion.py
|
examples/champion.py
|
import cassiopeia as cass
from cassiopeia.core import Champion
def test_cass():
#annie = Champion(name="Annie", region="NA")
annie = Champion(name="Annie")
print(annie.name)
print(annie.title)
print(annie.title)
for spell in annie.spells:
print(spell.name, spell.keywords)
print(annie.info.difficulty)
print(annie.passive.name)
#print(annie.recommended_itemsets[0].item_sets[0].items)
print(annie.free_to_play)
print(annie._Ghost__all_loaded)
print(annie)
return
print()
#ziggs = cass.get_champion(region="NA", "Ziggs")
ziggs = cass.get_champion("Renekton")
print(ziggs.name)
print(ziggs.region)
#print(ziggs.recommended_itemset[0].item_sets[0].items)
print(ziggs.free_to_play)
for spell in ziggs.spells:
for var in spell.variables:
print(spell.name, var)
print(ziggs._Ghost__all_loaded)
if __name__ == "__main__":
test_cass()
|
import cassiopeia as cass
from cassiopeia.core import Champion
def test_cass():
#annie = Champion(name="Annie", region="NA")
annie = Champion(name="Annie")
print(annie.name)
print(annie.title)
print(annie.title)
for spell in annie.spells:
print(spell.name, spell.keywords)
print(annie.info.difficulty)
print(annie.passive.name)
#print(annie.recommended_itemsets[0].item_sets[0].items)
print(annie.free_to_play)
print(annie._Ghost__all_loaded)
print(annie)
print()
#ziggs = cass.get_champion(region="NA", "Ziggs")
ziggs = cass.get_champion("Ziggs")
print(ziggs.name)
print(ziggs.region)
#print(ziggs.recommended_itemset[0].item_sets[0].items)
print(ziggs.free_to_play)
for spell in ziggs.spells:
for var in spell.variables:
print(spell.name, var)
print(ziggs._Ghost__all_loaded)
if __name__ == "__main__":
test_cass()
|
Remove `return`, get Ziggs instead of Renekton, since we're saving as Ziggs
|
Remove `return`, get Ziggs instead of Renekton, since we're saving as Ziggs
|
Python
|
mit
|
robrua/cassiopeia,meraki-analytics/cassiopeia,10se1ucgo/cassiopeia
|
f22eff612427dc5f530858bb47326d69b48aa68a
|
darchan/urls.py
|
darchan/urls.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf.urls import patterns, url
urlpatterns = patterns(
'darchan.views',
url(r'^view_matrix/$', 'v_view_last_matrix', name='view_last_matrix'),
url(r'^view_matrix/(?P<builder_id>\d+)/(?P<depth>\d+)/$', 'v_view_matrix',
name='view_matrix'),
url(r'^generate_matrix/$', 'v_generate_matrix', name='generate_matrix'),
url(r'^download_csv/(?P<builder_id>\d+)/(?P<depth>\d+)/$',
'v_download_csv', name='download_csv'),
)
|
# -*- coding: utf-8 -*-
# from __future__ import unicode_literals
from django.conf.urls import url
from darchan import views
urlpatterns = [
url(r'^view_matrix/$',
views.v_view_last_matrix, name='view_last_matrix'),
url(r'^view_matrix/(?P<builder_id>\d+)/(?P<depth>\d+)/$',
views.v_view_matrix, name='view_matrix'),
url(r'^generate_matrix/$',
views.v_generate_matrix, name='generate_matrix'),
url(r'^download_csv/(?P<builder_id>\d+)/(?P<depth>\d+)/$',
views.v_download_csv, name='download_csv'),
]
|
Update support to Django 1.9
|
Update support to Django 1.9
|
Python
|
mpl-2.0
|
Pawamoy/django-archan,Pawamoy/django-archan,Pawamoy/django-archan
|
7e2d6cfa6b6a536d1df6e2d2d523a4bb4094f5eb
|
src/poliastro/plotting/misc.py
|
src/poliastro/plotting/misc.py
|
from poliastro.bodies import (
Earth,
Jupiter,
Mars,
Mercury,
Neptune,
Saturn,
Uranus,
Venus,
)
from poliastro.plotting.core import OrbitPlotter2D, OrbitPlotter3D
from poliastro.twobody import Orbit
def plot_solar_system(outer=True, epoch=None, use_3d=False):
"""
Plots the whole solar system in one single call.
.. versionadded:: 0.9.0
Parameters
------------
outer : bool, optional
Whether to print the outer Solar System, default to True.
epoch: ~astropy.time.Time, optional
Epoch value of the plot, default to J2000.
"""
bodies = [Mercury, Venus, Earth, Mars]
if outer:
bodies.extend([Jupiter, Saturn, Uranus, Neptune])
if use_3d:
op = OrbitPlotter3D()
else:
op = OrbitPlotter2D()
for body in bodies:
orb = Orbit.from_body_ephem(body, epoch)
op.plot(orb, label=str(body))
# Sets frame to the orbit of the Earth by default
# TODO: Wait until https://github.com/poliastro/poliastro/issues/316
# op.set_frame(*Orbit.from_body_ephem(Earth, epoch).pqw())
return op
|
from typing import Union
from poliastro.bodies import (
Earth,
Jupiter,
Mars,
Mercury,
Neptune,
Saturn,
Uranus,
Venus,
)
from poliastro.plotting.core import OrbitPlotter2D, OrbitPlotter3D
from poliastro.twobody import Orbit
def plot_solar_system(outer=True, epoch=None, use_3d=False):
"""
Plots the whole solar system in one single call.
.. versionadded:: 0.9.0
Parameters
------------
outer : bool, optional
Whether to print the outer Solar System, default to True.
epoch : ~astropy.time.Time, optional
Epoch value of the plot, default to J2000.
use_3d : bool, optional
Produce 3D plot, default to False.
"""
bodies = [Mercury, Venus, Earth, Mars]
if outer:
bodies.extend([Jupiter, Saturn, Uranus, Neptune])
if use_3d:
op = OrbitPlotter3D() # type: Union[OrbitPlotter3D, OrbitPlotter2D]
else:
op = OrbitPlotter2D()
op.set_frame(*Orbit.from_body_ephem(Earth, epoch).pqw()) # type: ignore
for body in bodies:
orb = Orbit.from_body_ephem(body, epoch)
op.plot(orb, label=str(body))
return op
|
Set frame only when using 2D
|
Set frame only when using 2D
|
Python
|
mit
|
Juanlu001/poliastro,Juanlu001/poliastro,Juanlu001/poliastro,poliastro/poliastro
|
40347e45646aa57c9181cb289dfa88a3b3eb3396
|
experiment/models.py
|
experiment/models.py
|
from django.db import models
from experiment_session.models import ExperimentSession
from django.core.validators import MinValueValidator
class Experiment(models.Model):
LIGHTOFF_FIXED = 'fixed'
LIGHTOFF_WAITING = 'waiting'
_LIGHTOFF_CHOICES = (
(LIGHTOFF_FIXED, 'Fixed'),
(LIGHTOFF_WAITING, 'Waiting')
)
AUDIO_NONE = 'none'
AUDIO_BEEP = 'beep'
_AUDIO_CHOICES = (
(AUDIO_NONE, 'None'),
(AUDIO_BEEP, 'Audible beep on error')
)
name = models.CharField(unique=True, max_length=255)
lightoffmode = models.CharField(
choices=_LIGHTOFF_CHOICES,
max_length=30
)
lightofftimeout = models.IntegerField(validators=(MinValueValidator(0),))
audiomode = models.CharField(
choices=_AUDIO_CHOICES,
max_length=30
)
repeatscount = models.IntegerField(
validators=(
MinValueValidator(1),
)
)
createdon = models.DateTimeField(auto_now_add=True, editable=False)
traininglength = models.IntegerField(validators=(MinValueValidator(0),))
instructions = models.CharField(max_length=10000, default='')
def __str__(self):
return self.name
|
from django.db import models
from experiment_session.models import ExperimentSession
from django.core.validators import MinValueValidator
class Experiment(models.Model):
LIGHTOFF_FIXED = 'fixed'
LIGHTOFF_WAITING = 'waiting'
_LIGHTOFF_CHOICES = (
(LIGHTOFF_FIXED, 'Fixed'),
(LIGHTOFF_WAITING, 'Waiting')
)
AUDIO_NONE = 'none'
AUDIO_BEEP = 'beep'
_AUDIO_CHOICES = (
(AUDIO_NONE, 'None'),
(AUDIO_BEEP, 'Audible beep on error')
)
name = models.CharField(unique=True, max_length=255)
lightoffmode = models.CharField(
choices=_LIGHTOFF_CHOICES,
max_length=30
)
lightofftimeout = models.IntegerField(validators=(MinValueValidator(0),))
audiomode = models.CharField(
choices=_AUDIO_CHOICES,
max_length=30
)
repeatscount = models.IntegerField(
validators=(
MinValueValidator(1),
)
)
createdon = models.DateTimeField(auto_now_add=True, editable=False)
traininglength = models.IntegerField(validators=(MinValueValidator(0),))
instructions = models.CharField(max_length=10000, blank=True)
def __str__(self):
return self.name
|
Allow empty strings as instructions
|
Allow empty strings as instructions
|
Python
|
mit
|
piotrb5e3/1023alternative-backend
|
04944ccd83e924fed6b351a6073d837a5ce639e9
|
sevenbridges/models/compound/price_breakdown.py
|
sevenbridges/models/compound/price_breakdown.py
|
import six
from sevenbridges.meta.resource import Resource
from sevenbridges.meta.fields import StringField
class Breakdown(Resource):
"""
Breakdown resource contains price breakdown by storage and computation.
"""
storage = StringField(read_only=True)
computation = StringField(read_only=True)
def __str__(self):
return six.text_type(
'<Breakdown: storage={storage}, computation={computation}>'.format(
storage=self.storage, computation=self.computation
)
)
|
import six
from sevenbridges.meta.resource import Resource
from sevenbridges.meta.fields import StringField
class Breakdown(Resource):
"""
Breakdown resource contains price breakdown by storage and computation.
"""
storage = StringField(read_only=True)
computation = StringField(read_only=True)
data_transfer = StringField(read_only=True)
def __str__(self):
if self.data_transfer:
return six.text_type(
'<Breakdown: storage={storage}, computation={computation}, '
'data_transfer={data_transfer}>'.format(
storage=self.storage, computation=self.computation,
data_transfer=self.data_transfer
)
)
return six.text_type(
'<Breakdown: storage={storage}, computation={computation}>'.format(
storage=self.storage, computation=self.computation
)
)
|
Add data_transfer to price breakdown
|
Add data_transfer to price breakdown
|
Python
|
apache-2.0
|
sbg/sevenbridges-python
|
9b0d5796c1e48a3bf294971dc129499876936a36
|
send2trash/plat_osx.py
|
send2trash/plat_osx.py
|
# Copyright 2017 Virgil Dupras
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
# which should be included with this package. The terms are also available at
# http://www.hardcoded.net/licenses/bsd_license
from platform import mac_ver
from sys import version_info
# If macOS is 11.0 or newer try to use the pyobjc version to get around #51
# NOTE: pyobjc only supports python >= 3.6
if version_info >= (3, 6) and int(mac_ver()[0].split(".", 1)[0]) >= 11:
try:
from .plat_osx_pyobjc import send2trash
except ImportError:
# Try to fall back to ctypes version, although likely problematic still
from .plat_osx_ctypes import send2trash
else:
# Just use the old version otherwise
from .plat_osx_ctypes import send2trash # noqa: F401
|
# Copyright 2017 Virgil Dupras
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
# which should be included with this package. The terms are also available at
# http://www.hardcoded.net/licenses/bsd_license
from platform import mac_ver
from sys import version_info
# NOTE: version of pyobjc only supports python >= 3.6 and 10.9+
macos_ver = tuple(int(part) for part in mac_ver()[0].split("."))
if version_info >= (3, 6) and macos_ver >= (10, 9):
try:
from .plat_osx_pyobjc import send2trash
except ImportError:
# Try to fall back to ctypes version, although likely problematic still
from .plat_osx_ctypes import send2trash
else:
# Just use the old version otherwise
from .plat_osx_ctypes import send2trash # noqa: F401
|
Change conditional for macos pyobjc usage
|
Change conditional for macos pyobjc usage
macOS 11.x will occasionally identify as 10.16, since there was no real
reason to prevent on all supported platforms allow.
|
Python
|
bsd-3-clause
|
hsoft/send2trash
|
c09b468583c97d7831478119614b231be0d24afa
|
scripts/generate_input_syntax.py
|
scripts/generate_input_syntax.py
|
#!/usr/bin/env python
import sys, os
# get the location of this script
app_path = os.path.abspath(os.path.dirname(sys.argv[0]))
# this script is actually in the scripts subdirectory, so go up a level
app_path += '/..'
# Set the name of the application here and moose directory relative to the application
app_name = 'falcon'
MOOSE_DIR = app_path + '/../moose'
# See if MOOSE_DIR is already in the environment instead
if os.environ.has_key("MOOSE_DIR"):
MOOSE_DIR = os.environ['MOOSE_DIR']
sys.path.append(MOOSE_DIR + '/scripts/syntaxHTML')
import genInputFileSyntaxHTML
# this will automatically copy the documentation to the base directory
# in a folder named syntax
genInputFileSyntaxHTML.generateHTML(app_name, app_path, sys.argv, MOOSE_DIR)
|
#!/usr/bin/env python
import sys, os
# get the location of this script
app_path = os.path.abspath(os.path.dirname(sys.argv[0]))
# Set the name of the application here and moose directory relative to the application
app_name = 'falcon'
MOOSE_DIR = os.path.abspath(os.path.join(app_path, '..', '..' 'moose'))
FRAMEWORK_DIR = os.path.abspath(os.path.join(app_path, '..', '..', 'moose', 'framework'))
#### See if MOOSE_DIR is already in the environment instead
if os.environ.has_key("MOOSE_DIR"):
MOOSE_DIR = os.environ['MOOSE_DIR']
FRAMEWORK_DIR = os.path.join(MOOSE_DIR, 'framework')
if os.environ.has_key("FRAMEWORK_DIR"):
FRAMEWORK_DIR = os.environ['FRAMEWORK_DIR']
sys.path.append(FRAMEWORK_DIR + '/scripts/syntaxHTML')
import genInputFileSyntaxHTML
# this will automatically copy the documentation to the base directory
# in a folder named syntax
genInputFileSyntaxHTML.generateHTML(app_name, app_path, sys.argv, FRAMEWORK_DIR)
|
Update scripts to reflect new MOOSE_DIR definition
|
Update scripts to reflect new MOOSE_DIR definition
r25009
|
Python
|
lgpl-2.1
|
idaholab/falcon,aeslaughter/falcon,idaholab/falcon,aeslaughter/falcon,idaholab/falcon,idaholab/falcon,aeslaughter/falcon
|
6dbd72af13f017d9b1681da49f60aaf69f0a9e41
|
tests/transformer_test_case.py
|
tests/transformer_test_case.py
|
class TransformerTestCase(object):
def get_pattern_for_spec(self, patterns, spec):
for pattern in patterns:
if pattern.search(spec):
return pattern
|
from spec2scl import settings
from spec2scl import specfile
class TransformerTestCase(object):
def make_prep(self, spec):
# just create one of settings.RUNTIME_SECTIONS, so that we can test all the matching
return '%prep\n' + spec
def get_pattern_for_spec(self, handler, spec_text):
spec = specfile.Specfile(spec_text)
for s_name, s_text in spec.sections:
for i, pattern in enumerate(handler.matches):
if pattern.search(s_text) and s_name in handler.sections[i]:
return pattern
|
Improve our custom test case
|
Improve our custom test case
- create a make_prep method that allows quick creation of prep section from anything for good testing of custom transformers (that usually don't transform header section)
- improve get_pattern_for_spec with section checking
|
Python
|
mit
|
mbooth101/spec2scl,sclorg/spec2scl
|
4de9bee656041c9cfcd91ec61d294460f6427d77
|
lib/database.py
|
lib/database.py
|
class Database:
def __init__(self, db):
self.db = db
self.cursor = db.cursor()
def disconnect(self):
self.cursor.close()
self.db.close()
def query(self, sql):
self.cursor.execute(sql)
return self.cursor.fetchall()
def insert(self, sql):
self.cursor.execute(sql)
self.db.commit()
|
import pymysql
class Database:
def __init__(self, db):
self.db = db
self.cursor = db.cursor()
def disconnect(self):
self.cursor.close()
self.db.close()
def query(self, sql):
try:
self.cursor.execute(sql)
return self.cursor.fetchall()
except pymysql.OperationalError:
self.db.ping()
self.cursor.execute(sql)
return self.cursor.fetchall()
def insert(self, sql):
try:
self.cursor.execute(sql)
self.db.commit()
except pymysql.OperationalError:
self.db.ping()
self.cursor.execute(sql)
self.db.commit()
|
Reconnect if the connection times out.
|
Reconnect if the connection times out.
|
Python
|
mit
|
aquaticpond/pyqodbc
|
8657f7aef8944eae718cabaaa7dfd25d2ec95960
|
conditions/__init__.py
|
conditions/__init__.py
|
from .conditions import *
from .exceptions import *
from .fields import *
from .lists import *
from .types import *
|
from .conditions import Condition, CompareCondition
from .exceptions import UndefinedConditionError, InvalidConditionError
from .fields import ConditionsWidget, ConditionsFormField, ConditionsField
from .lists import CondList, CondAllList, CondAnyList, eval_conditions
from .types import conditions_from_module
__all__ = [
'Condition', 'CompareCondition', 'UndefinedConditionError', 'InvalidConditionError', 'ConditionsWidget',
'ConditionsFormField', 'ConditionsField', 'CondList', 'CondAllList', 'CondAnyList', 'eval_conditions',
'conditions_from_module',
]
|
Replace star imports with explicit imports
|
PEP8: Replace star imports with explicit imports
|
Python
|
isc
|
RevolutionTech/django-conditions,RevolutionTech/django-conditions,RevolutionTech/django-conditions
|
7d79e6f0404b04ababaca3d8c50b1e682fd64222
|
chainer/initializer.py
|
chainer/initializer.py
|
import typing as tp # NOQA
from chainer import types # NOQA
from chainer import utils
class Initializer(object):
"""Initializes array.
It initializes the given array.
Attributes:
dtype: Data type specifier. It is for type check in ``__call__``
function.
"""
def __init__(self, dtype=None):
# type: (tp.Optional[types.DTypeSpec]) -> None
self.dtype = dtype # type: types.DTypeSpec
def __call__(self, array):
# type: (types.NdArray) -> None
"""Initializes given array.
This method destructively changes the value of array.
The derived class is required to implement this method.
The algorithms used to make the new values depend on the
concrete derived classes.
Args:
array (:ref:`ndarray`):
An array to be initialized by this initializer.
"""
raise NotImplementedError()
# Original code forked from MIT licensed keras project
# https://github.com/fchollet/keras/blob/master/keras/initializations.py
def get_fans(shape):
if not isinstance(shape, tuple):
raise ValueError('shape must be tuple')
if len(shape) < 2:
raise ValueError('shape must be of length >= 2: shape={}', shape)
receptive_field_size = utils.size_of_shape(shape[2:])
fan_in = shape[1] * receptive_field_size
fan_out = shape[0] * receptive_field_size
return fan_in, fan_out
|
import typing as tp # NOQA
from chainer import types # NOQA
from chainer import utils
class Initializer(object):
"""Initializes array.
It initializes the given array.
Attributes:
dtype: Data type specifier. It is for type check in ``__call__``
function.
"""
def __init__(self, dtype=None):
# type: (tp.Optional[types.DTypeSpec]) -> None
self.dtype = dtype # type: types.DTypeSpec
def __call__(self, array):
# type: (types.NdArray) -> None
"""Initializes given array.
This method destructively changes the value of array.
The derived class is required to implement this method.
The algorithms used to make the new values depend on the
concrete derived classes.
Args:
array (:ref:`ndarray`):
An array to be initialized by this initializer.
"""
raise NotImplementedError()
# Original code forked from MIT licensed keras project
# https://github.com/fchollet/keras/blob/master/keras/initializations.py
def get_fans(shape):
if not isinstance(shape, tuple):
raise ValueError(
'shape must be tuple. Actual type: {}'.format(type(shape)))
if len(shape) < 2:
raise ValueError(
'shape must be of length >= 2. Actual shape: {}'.format(shape))
receptive_field_size = utils.size_of_shape(shape[2:])
fan_in = shape[1] * receptive_field_size
fan_out = shape[0] * receptive_field_size
return fan_in, fan_out
|
Fix error messages in get_fans
|
Fix error messages in get_fans
|
Python
|
mit
|
niboshi/chainer,tkerola/chainer,niboshi/chainer,keisuke-umezawa/chainer,okuta/chainer,chainer/chainer,wkentaro/chainer,niboshi/chainer,okuta/chainer,okuta/chainer,wkentaro/chainer,pfnet/chainer,okuta/chainer,hvy/chainer,wkentaro/chainer,keisuke-umezawa/chainer,keisuke-umezawa/chainer,keisuke-umezawa/chainer,chainer/chainer,chainer/chainer,hvy/chainer,wkentaro/chainer,chainer/chainer,niboshi/chainer,hvy/chainer,hvy/chainer
|
3786d778f583f96cb4dce37a175d2c460a020724
|
cnxauthoring/events.py
|
cnxauthoring/events.py
|
# -*- coding: utf-8 -*-
# ###
# Copyright (c) 2013, Rice University
# This software is subject to the provisions of the GNU Affero General
# Public License version 3 (AGPLv3).
# See LICENCE.txt for details.
# ###
from pyramid.events import NewRequest
def add_cors_headers(request, response):
settings = request.registry.settings
acac = settings['cors.access_control_allow_credentials']
acao = settings['cors.access_control_allow_origin'].split()
acah = settings['cors.access_control_allow_headers']
acam = settings['cors.access_control_allow_methods']
if acac:
response.headerlist.append(
('Access-Control-Allow-Credentials', acac))
if acao:
if request.host in acao:
response.headerlist.append(
('Access-Control-Allow-Origin', request.host))
else:
response.headerlist.append(
('Access-Control-Allow-Origin', acao[0]))
if acah:
response.headerlist.append(
('Access-Control-Allow-Headers', acah))
if acam:
response.headerlist.append(
('Access-Control-Allow-Methods', acam))
def new_request_subscriber(event):
request = event.request
request.add_response_callback(add_cors_headers)
def main(config):
config.add_subscriber(new_request_subscriber, NewRequest)
|
# -*- coding: utf-8 -*-
# ###
# Copyright (c) 2013, Rice University
# This software is subject to the provisions of the GNU Affero General
# Public License version 3 (AGPLv3).
# See LICENCE.txt for details.
# ###
from pyramid.events import NewRequest
def add_cors_headers(request, response):
settings = request.registry.settings
acac = settings['cors.access_control_allow_credentials']
acao = settings['cors.access_control_allow_origin'].split()
acah = settings['cors.access_control_allow_headers']
acam = settings['cors.access_control_allow_methods']
if acac:
response.headerlist.append(
('Access-Control-Allow-Credentials', acac))
if acao:
if request.headers.get('Origin') in acao:
response.headerlist.append(
('Access-Control-Allow-Origin', request.headers.get('Origin')))
else:
response.headerlist.append(
('Access-Control-Allow-Origin', acao[0]))
if acah:
response.headerlist.append(
('Access-Control-Allow-Headers', acah))
if acam:
response.headerlist.append(
('Access-Control-Allow-Methods', acam))
def new_request_subscriber(event):
request = event.request
request.add_response_callback(add_cors_headers)
def main(config):
config.add_subscriber(new_request_subscriber, NewRequest)
|
Fix Access-Control-Allow-Origin to return the request origin
|
Fix Access-Control-Allow-Origin to return the request origin
request.host is the host part of the request url. For example, if
webview is trying to access http://localhost:8080/users/profile,
request. It's the Origin field in the headers that we should be
matching.
|
Python
|
agpl-3.0
|
Connexions/cnx-authoring
|
a4b0830b7336694dacc822077c2ce6901be4929b
|
widgy/contrib/widgy_mezzanine/search_indexes.py
|
widgy/contrib/widgy_mezzanine/search_indexes.py
|
from haystack import indexes
from widgy.contrib.widgy_mezzanine import get_widgypage_model
from widgy.templatetags.widgy_tags import render_root
from widgy.utils import html_to_plaintext
from .signals import widgypage_pre_index
WidgyPage = get_widgypage_model()
class PageIndex(indexes.SearchIndex, indexes.Indexable):
title = indexes.CharField(model_attr='title')
date = indexes.DateTimeField(model_attr='publish_date')
description = indexes.CharField(model_attr='description')
keywords = indexes.MultiValueField()
text = indexes.CharField(document=True)
def full_prepare(self, *args, **kwargs):
widgypage_pre_index.send(sender=self)
return super(PageIndex, self).full_prepare(*args, **kwargs)
def get_model(self):
return WidgyPage
def index_queryset(self, using=None):
return self.get_model().objects.published()
def prepare_text(self, obj):
html = render_root({}, obj, 'root_node')
content = html_to_plaintext(html)
keywords = ' '.join(self.prepare_keywords(obj))
return ' '.join([obj.title, keywords, obj.description,
content])
def prepare_keywords(self, obj):
return [unicode(k) for k in obj.keywords.all()]
|
from haystack import indexes
from widgy.contrib.widgy_mezzanine import get_widgypage_model
from widgy.templatetags.widgy_tags import render_root
from widgy.utils import html_to_plaintext
from .signals import widgypage_pre_index
WidgyPage = get_widgypage_model()
class PageIndex(indexes.SearchIndex, indexes.Indexable):
title = indexes.CharField(model_attr='title')
date = indexes.DateTimeField(model_attr='publish_date')
description = indexes.CharField(model_attr='description')
keywords = indexes.MultiValueField()
get_absolute_url = indexes.CharField(model_attr='get_absolute_url')
text = indexes.CharField(document=True)
def full_prepare(self, *args, **kwargs):
widgypage_pre_index.send(sender=self)
return super(PageIndex, self).full_prepare(*args, **kwargs)
def get_model(self):
return WidgyPage
def index_queryset(self, using=None):
return self.get_model().objects.published()
def prepare_text(self, obj):
html = render_root({}, obj, 'root_node')
content = html_to_plaintext(html)
keywords = ' '.join(self.prepare_keywords(obj))
return ' '.join([obj.title, keywords, obj.description,
content])
def prepare_keywords(self, obj):
return [unicode(k) for k in obj.keywords.all()]
|
Index the URL of the WidgyPage.
|
Index the URL of the WidgyPage.
This way, you don't have to fetch the page object when you want to put a
link in the search results.
|
Python
|
apache-2.0
|
j00bar/django-widgy,j00bar/django-widgy,j00bar/django-widgy
|
46741fdbda00a8b1574dfdf0689c8a26454d28f6
|
actions/cloudbolt_plugins/aws/poll_for_init_complete.py
|
actions/cloudbolt_plugins/aws/poll_for_init_complete.py
|
import sys
import time
from infrastructure.models import Server
from jobs.models import Job
TIMEOUT = 600
def is_reachable(server):
"""
:type server: Server
"""
instance_id = server.ec2serverinfo.instance_id
ec2_region = server.ec2serverinfo.ec2_region
rh = server.resource_handler.cast()
rh.connect_ec2(ec2_region)
wc = rh.resource_technology.work_class
instance = wc.get_instance(instance_id)
conn = instance.connection
status = conn.get_all_instance_status(instance_id)
return True if status[0].instance_status.details[u'reachability'] == u'passed' else False
def run(job, logger=None):
assert isinstance(job, Job)
assert job.type == u'provision'
server = job.server_set.first()
timeout = time.time() + TIMEOUT
while True:
if is_reachable(server):
job.set_progress("EC2 instance is reachable.")
break
elif time.time() > timeout:
job.set_progress("Waited {} seconds. Continuing...".format(TIMEOUT))
break
else:
time.sleep(2)
return "", "", ""
if __name__ == '__main__':
if len(sys.argv) != 2:
print ' Usage: {} <job_id>'.format(sys.argv[0])
sys.exit(1)
print run(Job.objects.get(id=sys.argv[1]))
|
import time
from jobs.models import Job
TIMEOUT = 600
def is_reachable(server):
instance_id = server.ec2serverinfo.instance_id
ec2_region = server.ec2serverinfo.ec2_region
rh = server.resource_handler.cast()
rh.connect_ec2(ec2_region)
wc = rh.resource_technology.work_class
instance = wc.get_instance(instance_id)
status = instance.connection.get_all_instance_status(instance_id)
return True if status[0].instance_status.details[u'reachability'] == u'passed' else False
def run(job, logger=None, **kwargs):
assert isinstance(job, Job) and job.type == u'provision'
server = job.server_set.first()
timeout = time.time() + TIMEOUT
while True:
if is_reachable(server):
job.set_progress("EC2 instance is reachable.")
break
elif time.time() > timeout:
job.set_progress("Waited {} seconds. Continuing...".format(TIMEOUT))
break
else:
time.sleep(2)
return "", "", ""
|
Clean up poll for init complete script
|
Clean up poll for init complete script
|
Python
|
apache-2.0
|
CloudBoltSoftware/cloudbolt-forge,CloudBoltSoftware/cloudbolt-forge,CloudBoltSoftware/cloudbolt-forge,CloudBoltSoftware/cloudbolt-forge
|
fd77039104175a4b5702b46b21a2fa223676ddf4
|
bowser/Database.py
|
bowser/Database.py
|
import json
import redis
class Database(object):
def __init__(self):
self.redis = redis.StrictRedis(host='redis', port=6379, db=0)
def set_data_of_server_channel(self, server, channel, data):
self.redis.hmset(server, {channel: json.dumps(data)})
def fetch_data_of_server_channel(self, server, channel):
data = self.redis.hget(server, channel)
json_data = json.loads(data.decode('utf-8'))
return json_data
|
import json
import redis
class Database(object):
def __init__(self):
self.redis = redis.StrictRedis(host='redis', port=6379, db=0)
def set_data_of_server_channel(self, server, channel, data):
self.redis.hmset(server, {channel: json.dumps(data)})
def fetch_data_of_server_channel(self, server, channel):
data = self.redis.hget(server, channel)
if data is None:
raise KeyError
json_data = json.loads(data.decode('utf-8'))
return json_data
|
Raise KeyErrors for missing data in redis
|
fix: Raise KeyErrors for missing data in redis
|
Python
|
mit
|
kevinkjt2000/discord-minecraft-server-status
|
3f166b110d4e8623966ca29c71445973da4876f9
|
armstrong/hatband/forms.py
|
armstrong/hatband/forms.py
|
from django import forms
from django.db import models
from . import widgets
RICH_TEXT_DBFIELD_OVERRIDES = {
models.TextField: {'widget': widgets.RichTextWidget},
}
class BackboneFormMixin(object):
class Media:
js = (
'hatband/js/jquery-1.6.2.min.js',
'hatband/js/underscore.js',
'hatband/js/backbone.js',
'hatband/js/backbone-inline-base.js')
class OrderableGenericKeyLookupForm(BackboneFormMixin, forms.ModelForm):
class Meta:
widgets = {
"content_type": forms.HiddenInput(),
"object_id": widgets.GenericKeyWidget(),
"order": forms.HiddenInput(),
}
|
from django import forms
from django.conf import settings
from django.db import models
from . import widgets
RICH_TEXT_DBFIELD_OVERRIDES = {
models.TextField: {'widget': widgets.RichTextWidget},
}
class BackboneFormMixin(object):
if getattr(settings, "ARMSTRONG_ADMIN_PROVIDE_STATIC", True):
class Media:
js = (
'hatband/js/jquery-1.6.2.min.js',
'hatband/js/underscore.js',
'hatband/js/backbone.js',
'hatband/js/backbone-inline-base.js')
class OrderableGenericKeyLookupForm(BackboneFormMixin, forms.ModelForm):
class Meta:
widgets = {
"content_type": forms.HiddenInput(),
"object_id": widgets.GenericKeyWidget(),
"order": forms.HiddenInput(),
}
|
Make it possible to turn off admin JS
|
Make it possible to turn off admin JS
|
Python
|
apache-2.0
|
armstrong/armstrong.hatband,armstrong/armstrong.hatband,texastribune/armstrong.hatband,armstrong/armstrong.hatband,texastribune/armstrong.hatband,texastribune/armstrong.hatband
|
e369824a1bd337e9245d010b93734832af4e0376
|
cetacean/response.py
|
cetacean/response.py
|
#!/usr/bin/env python
# encoding: utf-8
import json
import re
from .resource import Resource
class Response(Resource):
"""Represents an HTTP response that is hopefully a HAL document."""
def __init__(self, response):
"""Pass it a Requests response object.
:response: A response object from the Requests library.
"""
self._response = response
self._hal_regex = re.compile(r"application/hal\+json")
self._parsed_hal = None
def is_hal(self):
"""Test if a response was a HAL document or not.
:returns: True or False
"""
return bool(self._hal_regex.match(self._response.headers['content-type']))
@property
def _hal(self):
"""Returns the parsed HAL body of the response
:returns: A parsed HAL body (dicts and lists) or an empty dictionary.
"""
if self._parsed_hal != None: return self._parsed_hal
self._parsed_hal = self._parse_hal()
return self._parsed_hal
def _parse_hal(self):
"""Parses the JSON body of the response.
:returns: A parsed JSON body (dicts and lists) or an empty dictionary.
"""
if not self.is_hal(): return {}
try:
return json.loads(self._response.content)
except ValueError, e:
return {}
|
#!/usr/bin/env python
# encoding: utf-8
import json
import re
from .resource import Resource
class Response(Resource):
"""Represents an HTTP response that is hopefully a HAL document."""
_hal_regex = re.compile(r"application/hal\+json")
def __init__(self, response):
"""Pass it a Requests response object.
:response: A response object from the Requests library.
"""
self._response = response
self._parsed_hal = None
def is_hal(self):
"""Test if a response was a HAL document or not.
:returns: True or False
"""
return bool(self._hal_regex.match(self._response.headers['content-type']))
@property
def _hal(self):
"""Returns the parsed HAL body of the response
:returns: A parsed HAL body (dicts and lists) or an empty dictionary.
"""
if self._parsed_hal != None: return self._parsed_hal
self._parsed_hal = self._parse_hal()
return self._parsed_hal
def _parse_hal(self):
"""Parses the JSON body of the response.
:returns: A parsed JSON body (dicts and lists) or an empty dictionary.
"""
if not self.is_hal(): return {}
try:
return json.loads(self._response.content)
except ValueError, e:
return {}
|
Move _hal_regex to class scope.
|
Move _hal_regex to class scope.
|
Python
|
mit
|
nanorepublica/cetacean-python,benhamill/cetacean-python
|
46db910f9b9a150b785ea3b36a9e4f73db326d78
|
loader.py
|
loader.py
|
from etl import get_local_handles, ingest_feeds, CSV_ETL_CLASSES
from local import LocalConfig
from interface import Marcotti
if __name__ == "__main__":
settings = LocalConfig()
marcotti = Marcotti(settings)
with marcotti.create_session() as sess:
for entity, etl_class in CSV_ETL_CLASSES:
data_file = settings.CSV_DATA[entity]
if data_file is None:
continue
if entity in ['Salaries', 'Partials', 'FieldStats', 'GkStats', 'LeaguePoints']:
params = (sess, settings.COMPETITION_NAME, settings.SEASON_NAME)
else:
params = (sess,)
ingest_feeds(get_local_handles, settings.CSV_DATA_DIR, data_file, etl_class(*params))
|
import os
import logging
from etl import get_local_handles, ingest_feeds, CSV_ETL_CLASSES
from local import LocalConfig
from interface import Marcotti
LOG_FORMAT = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s: %(message)s')
ch = logging.FileHandler(os.path.join(LocalConfig().LOG_DIR, 'marcotti.log'))
ch.setLevel(logging.INFO)
ch.setFormatter(LOG_FORMAT)
logger = logging.getLogger('loader')
logger.setLevel(logging.INFO)
logger.addHandler(ch)
if __name__ == "__main__":
settings = LocalConfig()
marcotti = Marcotti(settings)
logger.info("Data ingestion start")
with marcotti.create_session() as sess:
for entity, etl_class in CSV_ETL_CLASSES:
data_file = settings.CSV_DATA[entity]
if data_file is None:
logger.info("Skipping ingestion into %s data model", entity)
else:
if type(data_file) is list:
data_file = os.path.join(*data_file)
logger.info("Ingesting %s into %s data model",
os.path.join(settings.CSV_DATA_DIR, data_file), entity)
if entity in ['Salaries', 'Partials', 'FieldStats', 'GkStats', 'LeaguePoints']:
params = (sess, settings.COMPETITION_NAME, settings.SEASON_NAME)
else:
params = (sess,)
ingest_feeds(get_local_handles, settings.CSV_DATA_DIR, data_file, etl_class(*params))
logger.info("Data ingestion complete")
|
Add logging messages to data ingestion tool
|
Add logging messages to data ingestion tool
|
Python
|
mit
|
soccermetrics/marcotti-mls
|
dfc6b2d2d8cda75349dfab33d9639b5ea24cc520
|
contentcuration/contentcuration/ricecooker_versions.py
|
contentcuration/contentcuration/ricecooker_versions.py
|
import xmlrpclib
from socket import gaierror
VERSION_OK = "0.5.13"
try:
pypi = xmlrpclib.ServerProxy('https://pypi.python.org/pypi')
VERSION_OK = pypi.package_releases('ricecooker')[0]
except gaierror:
pass
VERSION_OK_MESSAGE = "Ricecooker v{} is up-to-date."
VERSION_SOFT_WARNING = "0.5.6"
VERSION_SOFT_WARNING_MESSAGE = "You are using Ricecooker v{}, however v{} is available. You should consider upgrading your Ricecooker."
VERSION_HARD_WARNING = "0.3.13"
VERSION_HARD_WARNING_MESSAGE = "Ricecooker v{} is deprecated. Any channels created with this version will be unlinked with any future upgrades. You are strongly recommended to upgrade to v{}."
VERSION_ERROR = None
VERSION_ERROR_MESSAGE = "Ricecooker v{} is no longer compatible. You must upgrade to v{} to continue."
|
import xmlrpclib
from socket import gaierror, error
VERSION_OK = "0.6.0"
try:
pypi = xmlrpclib.ServerProxy('https://pypi.python.org/pypi')
VERSION_OK = pypi.package_releases('ricecooker')[0]
except (gaierror, error):
pass
VERSION_OK_MESSAGE = "Ricecooker v{} is up-to-date."
VERSION_SOFT_WARNING = "0.5.6"
VERSION_SOFT_WARNING_MESSAGE = "You are using Ricecooker v{}, however v{} is available. You should consider upgrading your Ricecooker."
VERSION_HARD_WARNING = "0.3.13"
VERSION_HARD_WARNING_MESSAGE = "Ricecooker v{} is deprecated. Any channels created with this version will be unlinked with any future upgrades. You are strongly recommended to upgrade to v{}."
VERSION_ERROR = None
VERSION_ERROR_MESSAGE = "Ricecooker v{} is no longer compatible. You must upgrade to v{} to continue."
|
Add error handling to reduce dependency on pypi
|
Add error handling to reduce dependency on pypi
|
Python
|
mit
|
DXCanas/content-curation,DXCanas/content-curation,jayoshih/content-curation,jayoshih/content-curation,jayoshih/content-curation,jayoshih/content-curation,fle-internal/content-curation,fle-internal/content-curation,fle-internal/content-curation,fle-internal/content-curation,DXCanas/content-curation,DXCanas/content-curation
|
6a83ff3a2d1aca0a3663a36ca9502d3d86ea2a93
|
pirx/base.py
|
pirx/base.py
|
class Settings(object):
def __init__(self):
self._settings = {}
def __setattr__(self, name, value):
if name.startswith('_'):
super(Settings, self).__setattr__(name, value)
else:
self._settings[name] = value
def write(self):
for name, value in self._settings.iteritems():
print '%s = %s' % (name.upper(), value.__repr__())
|
import collections
class Settings(object):
def __init__(self):
self._settings = collections.OrderedDict()
def __setattr__(self, name, value):
if name.startswith('_'):
super(Settings, self).__setattr__(name, value)
else:
self._settings[name] = value
def write(self):
for name, value in self._settings.iteritems():
print '%s = %s' % (name.upper(), value.__repr__())
|
Store settings with the OrderedDict
|
Store settings with the OrderedDict
|
Python
|
mit
|
piotrekw/pirx
|
6cfc9de7fe8fd048a75845a69bdeefc7c742bae4
|
oneall/django_oneall/management/commands/emaillogin.py
|
oneall/django_oneall/management/commands/emaillogin.py
|
# -*- coding: utf-8 -*-
from django.core.management.base import BaseCommand
from django.core.urlresolvers import reverse
from ...auth import EmailTokenAuthBackend
class Command(BaseCommand):
help = "E-mail login without sending the actual e-mail."
def add_arguments(self, parser):
parser.add_argument('email', type=str)
def handle(self, email, **options):
if '@' not in email:
self.stderr.write("Failed. E-mail is mandatory.")
return 1
query_string = EmailTokenAuthBackend().issue(email)
self.stdout.write("Complete login with: %s?%s" % (reverse('oneall-login'), query_string))
|
# -*- coding: utf-8 -*-
from django.core.mail import EmailMessage
from django.core.management.base import BaseCommand
from django.core.urlresolvers import reverse
from ...auth import EmailTokenAuthBackend
class Command(BaseCommand):
help = "Issues an e-mail login token."
def add_arguments(self, parser):
parser.add_argument('-s', '--send', dest='send', action='store_true',
help="Actually e-mail the token instead of only displaying it.")
parser.add_argument('email', type=str)
def handle(self, email, send, **options):
if '@' not in email:
self.stderr.write("Failed. E-mail is mandatory.")
return
query_string = EmailTokenAuthBackend().issue(email)
msg = "Complete login with: %s?%s" % (reverse('oneall-login'), query_string)
self.stdout.write(msg)
if send:
mail = EmailMessage()
mail.to = [email]
mail.subject = 'Login Test'
mail.body = msg
try:
sent = mail.send()
self.stdout.write("Sent %d message." % sent)
except ConnectionError as e:
self.stderr.write(str(e))
|
Add the possibility of testing SMTP from the command-line.
|
Add the possibility of testing SMTP from the command-line.
|
Python
|
mit
|
leandigo/django-oneall,ckot/django-oneall,leandigo/django-oneall,ckot/django-oneall
|
7a936665eff8a6a8f6889334ad2238cbfcded18b
|
member.py
|
member.py
|
import requests
from credentials import label_id
from gmailauth import refresh
access_token = refresh()
headers = {'Authorization': ('Bearer ' + access_token)}
def list_messages(headers):
params = {'labelIds': label_id, 'q': 'newer_than:3d'}
r = requests.get('https://www.googleapis.com/gmail/v1/users/me/messages',
headers=headers, params=params)
j = r.json()
messages = []
if 'messages' in j:
messages.extend(j['messages'])
# return messages
message_ids = []
for item in messages:
message_ids.append(item['id'])
return message_ids
print(list_messages(headers))
def get_message(headers, identity):
params = {'id': identity, format: 'metadata'}
r = requests.get('https://www.googleapis.com/gmail/v1/users/me/messages/id',
headers=headers, params=params)
j = r.json()
print(r.status_code, r.reason)
h = j['payload']
subject = ''
for header in h['headers']:
if header['name'] == 'Subject':
subject = header['value']
break
print(subject)
for item in list_messages(headers):
get_message(headers, item)
# get_message(headers, list_messages(headers))
|
import requests
from base64 import urlsafe_b64decode
from credentials import label_id, url1, url2
from gmailauth import refresh
# access_token = refresh()
headers = {'Authorization': ('Bearer ' + access_token)}
def list_messages(headers):
params = {'labelIds': label_id, 'q': 'newer_than:2d'}
r = requests.get('https://www.googleapis.com/gmail/v1/users/me/messages',
headers=headers, params=params)
j = r.json()
messages = []
if 'messages' in j:
messages.extend(j['messages'])
# return messages
message_ids = []
for item in messages:
message_ids.append(item['id'])
return message_ids
def get_message(headers, identity):
params = {'id': identity, 'format': 'raw'}
r = requests.get('https://www.googleapis.com/gmail/v1/users/me/messages/id',
headers=headers, params=params)
j = r.json()
raw = j['raw']
d = urlsafe_b64decode(raw)
p = d.decode()
s = p.find('https')
l = len(p)
print(p[s:l])
print('----------')
return(p[s:l])
# for item in list_messages(headers):
# get_message(headers, item)
|
Return the order details URL from email body.
|
Return the order details URL from email body.
There is currently no Agile API method that will return the order
details for an activity so the URL from the email must be used in
conjunction with a web scraper to get the relevant details.
|
Python
|
mit
|
deadlyraptor/reels
|
ed11fa0ebc365b8a7b0f31c8b09bf23b891e44b6
|
discover_tests.py
|
discover_tests.py
|
"""
Simple auto test discovery.
From http://stackoverflow.com/a/17004409
"""
import os
import sys
import unittest
def additional_tests():
setup_file = sys.modules['__main__'].__file__
setup_dir = os.path.abspath(os.path.dirname(setup_file))
return unittest.defaultTestLoader.discover(setup_dir)
|
"""
Simple auto test discovery.
From http://stackoverflow.com/a/17004409
"""
import os
import sys
import unittest
if not hasattr(unittest.defaultTestLoader, 'discover'):
import unittest2 as unittest
def additional_tests():
setup_file = sys.modules['__main__'].__file__
setup_dir = os.path.abspath(os.path.dirname(setup_file))
return unittest.defaultTestLoader.discover(setup_dir)
|
Allow test discovery on Py26 with unittest2
|
Allow test discovery on Py26 with unittest2
|
Python
|
mit
|
QuLogic/python-future,michaelpacer/python-future,PythonCharmers/python-future,QuLogic/python-future,krischer/python-future,PythonCharmers/python-future,krischer/python-future,michaelpacer/python-future
|
63f04662f5ca22443ab6080f559ac898302cf103
|
tests/integration/conftest.py
|
tests/integration/conftest.py
|
def pytest_collection_modifyitems(session, config, items):
# Ensure that all tests with require a redeploy are run after
# tests that don't need a redeploy.
final_list = []
on_redeploy_tests = []
for item in items:
if item.get_marker('on_redeploy') is not None:
on_redeploy_tests.append(item)
else:
final_list.append(item)
final_list.extend(on_redeploy_tests)
items[:] = final_list
|
DEPLOY_TEST_BASENAME = 'test_features.py'
def pytest_collection_modifyitems(session, config, items):
# Ensure that all tests with require a redeploy are run after
# tests that don't need a redeploy.
start, end = _get_start_end_index(DEPLOY_TEST_BASENAME, items)
marked = []
unmarked = []
for item in items[start:end]:
if item.get_marker('on_redeploy') is not None:
marked.append(item)
else:
unmarked.append(item)
items[start:end] = unmarked + marked
def _get_start_end_index(basename, items):
# precondition: all the tests for test_features.py are
# in a contiguous range. This is the case because pytest
# will group all tests in a module together.
matched = [item.fspath.basename == basename for item in items]
return (
matched.index(True),
len(matched) - list(reversed(matched)).index(True)
)
|
Reorder redeploy tests within a single module
|
Reorder redeploy tests within a single module
The original code for on_redeploy was making the
assumption that there was only one integration test file.
When test_package.py was added, the tests always failed
because the redeploy tests were run *after* the package tests
which messed with the module scope fixtures.
Now we ensure we only reorder tests within test_features.py.
|
Python
|
apache-2.0
|
awslabs/chalice
|
7a0c0e6ed56e847b7b6300c1a0b4a427f26b296d
|
app/PRESUBMIT.py
|
app/PRESUBMIT.py
|
#!/usr/bin/python
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Makes sure that the app/ code is cpplint clean."""
INCLUDE_CPP_FILES_ONLY = (
r'.*\.cc$', r'.*\.h$'
)
EXCLUDE = (
# Autogenerated window resources files are off limits
r'.*resource.h$',
)
def CheckChangeOnUpload(input_api, output_api):
results = []
black_list = input_api.DEFAULT_BLACK_LIST + EXCLUDE
sources = lambda x: input_api.FilterSourceFile(
x, white_list=INCLUDE_CPP_FILES_ONLY, black_list=black_list)
results.extend(input_api.canned_checks.CheckChangeLintsClean(
input_api, output_api, sources))
return results
|
#!/usr/bin/python
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Makes sure that the app/ code is cpplint clean."""
INCLUDE_CPP_FILES_ONLY = (
r'.*\.cc$', r'.*\.h$'
)
EXCLUDE = (
# Autogenerated window resources files are off limits
r'.*resource.h$',
)
def CheckChangeOnUpload(input_api, output_api):
results = []
black_list = input_api.DEFAULT_BLACK_LIST + EXCLUDE
sources = lambda x: input_api.FilterSourceFile(
x, white_list=INCLUDE_CPP_FILES_ONLY, black_list=black_list)
results.extend(input_api.canned_checks.CheckChangeLintsClean(
input_api, output_api, sources))
return results
def GetPreferredTrySlaves():
return ['win', 'linux', 'linux_view', 'linux_chromeos', 'mac']
|
Make all changes to app/ run on all trybot platforms, not just the big three. Anyone who's changing a header here may break the chromeos build.
|
Make all changes to app/ run on all trybot platforms, not just the big three.
Anyone who's changing a header here may break the chromeos build.
BUG=none
TEST=none
Review URL: http://codereview.chromium.org/2838027
git-svn-id: dd90618784b6a4b323ea0c23a071cb1c9e6f2ac7@51000 4ff67af0-8c30-449e-8e8b-ad334ec8d88c
|
Python
|
bsd-3-clause
|
wistoch/meego-app-browser,wistoch/meego-app-browser,wistoch/meego-app-browser,wistoch/meego-app-browser,wistoch/meego-app-browser,wistoch/meego-app-browser,wistoch/meego-app-browser,wistoch/meego-app-browser,wistoch/meego-app-browser,wistoch/meego-app-browser
|
d42314b323aa0f8c764d72a5ebebc0e7d5ac88f3
|
nova/api/openstack/compute/schemas/v3/create_backup.py
|
nova/api/openstack/compute/schemas/v3/create_backup.py
|
# Copyright 2014 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.api.validation import parameter_types
create_backup = {
'type': 'object',
'properties': {
'create_backup': {
'type': 'object',
'properties': {
'name': parameter_types.name,
'backup_type': {
'type': 'string',
'enum': ['daily', 'weekly'],
},
'rotation': {
'type': ['integer', 'string'],
'pattern': '^[0-9]+$',
'minimum': 0,
},
'metadata': {
'type': 'object',
}
},
'required': ['name', 'backup_type', 'rotation'],
'additionalProperties': False,
},
},
'required': ['create_backup'],
'additionalProperties': False,
}
|
# Copyright 2014 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.api.validation import parameter_types
create_backup = {
'type': 'object',
'properties': {
'create_backup': {
'type': 'object',
'properties': {
'name': parameter_types.name,
'backup_type': {
'type': 'string',
},
'rotation': {
'type': ['integer', 'string'],
'pattern': '^[0-9]+$',
'minimum': 0,
},
'metadata': {
'type': 'object',
}
},
'required': ['name', 'backup_type', 'rotation'],
'additionalProperties': False,
},
},
'required': ['create_backup'],
'additionalProperties': False,
}
|
Remove param check for backup type on v2.1 API
|
Remove param check for backup type on v2.1 API
The backup type is only used by glance, so nova check it make
no sense; currently we have daily and weekly as only valid param
but someone may add 'monthly' as param. nova should allow it
and delegate the error. This patch removes check on v2.1 API.
Change-Id: I59bbc0f589c8c280eb8cd87aa279898fffaeab7a
Closes-Bug: #1361490
|
Python
|
apache-2.0
|
devendermishrajio/nova,affo/nova,projectcalico/calico-nova,whitepages/nova,klmitch/nova,jianghuaw/nova,cernops/nova,Stavitsky/nova,fnordahl/nova,blueboxgroup/nova,CEG-FYP-OpenStack/scheduler,Francis-Liu/animated-broccoli,j-carpentier/nova,joker946/nova,hanlind/nova,rajalokan/nova,zhimin711/nova,silenceli/nova,ruslanloman/nova,isyippee/nova,akash1808/nova_test_latest,BeyondTheClouds/nova,belmiromoreira/nova,yatinkumbhare/openstack-nova,mmnelemane/nova,BeyondTheClouds/nova,JioCloud/nova_test_latest,mmnelemane/nova,mikalstill/nova,double12gzh/nova,sebrandon1/nova,cloudbase/nova-virtualbox,phenoxim/nova,devendermishrajio/nova_test_latest,NeCTAR-RC/nova,JioCloud/nova,apporc/nova,ruslanloman/nova,jianghuaw/nova,rahulunair/nova,whitepages/nova,adelina-t/nova,blueboxgroup/nova,noironetworks/nova,alaski/nova,adelina-t/nova,rahulunair/nova,felixma/nova,Juniper/nova,iuliat/nova,alexandrucoman/vbox-nova-driver,Tehsmash/nova,dims/nova,orbitfp7/nova,tealover/nova,yosshy/nova,mahak/nova,CEG-FYP-OpenStack/scheduler,JianyuWang/nova,cernops/nova,sebrandon1/nova,akash1808/nova_test_latest,Juniper/nova,varunarya10/nova_test_latest,Juniper/nova,double12gzh/nova,devendermishrajio/nova_test_latest,tudorvio/nova,BeyondTheClouds/nova,felixma/nova,alexandrucoman/vbox-nova-driver,cyx1231st/nova,CloudServer/nova,projectcalico/calico-nova,iuliat/nova,openstack/nova,mahak/nova,vmturbo/nova,eonpatapon/nova,jeffrey4l/nova,cloudbase/nova,rajalokan/nova,yosshy/nova,vmturbo/nova,nikesh-mahalka/nova,mandeepdhami/nova,mgagne/nova,mahak/nova,TwinkleChawla/nova,CloudServer/nova,belmiromoreira/nova,thomasem/nova,shail2810/nova,devendermishrajio/nova,JioCloud/nova_test_latest,NeCTAR-RC/nova,jeffrey4l/nova,cloudbase/nova-virtualbox,openstack/nova,zhimin711/nova,gooddata/openstack-nova,zzicewind/nova,Metaswitch/calico-nova,joker946/nova,LoHChina/nova,cyx1231st/nova,jianghuaw/nova,ted-gould/nova,raildo/nova,zaina/nova,ted-gould/nova,zaina/nova,petrutlucian94/nova,jianghuaw/nova,phenoxim/nova,mandeepdhami/nova,Yusuke1987/openstack_template,rajalokan/nova,cloudbase/nova,vmturbo/nova,affo/nova,akash1808/nova,gooddata/openstack-nova,bgxavier/nova,Stavitsky/nova,rajalokan/nova,MountainWei/nova,bgxavier/nova,fnordahl/nova,scripnichenko/nova,hanlind/nova,yatinkumbhare/openstack-nova,sebrandon1/nova,Francis-Liu/animated-broccoli,watonyweng/nova,barnsnake351/nova,JioCloud/nova,TwinkleChawla/nova,raildo/nova,bigswitch/nova,Tehsmash/nova,varunarya10/nova_test_latest,CCI-MOC/nova,shail2810/nova,gooddata/openstack-nova,dims/nova,zzicewind/nova,vmturbo/nova,tealover/nova,mgagne/nova,alvarolopez/nova,eonpatapon/nova,barnsnake351/nova,klmitch/nova,mikalstill/nova,rahulunair/nova,apporc/nova,edulramirez/nova,openstack/nova,isyippee/nova,tudorvio/nova,Metaswitch/calico-nova,JianyuWang/nova,kimjaejoong/nova,hanlind/nova,kimjaejoong/nova,bigswitch/nova,tangfeixiong/nova,petrutlucian94/nova,klmitch/nova,gooddata/openstack-nova,scripnichenko/nova,MountainWei/nova,cernops/nova,orbitfp7/nova,takeshineshiro/nova,klmitch/nova,edulramirez/nova,akash1808/nova,j-carpentier/nova,tangfeixiong/nova,silenceli/nova,nikesh-mahalka/nova,dawnpower/nova,CCI-MOC/nova,takeshineshiro/nova,LoHChina/nova,Juniper/nova,alaski/nova,noironetworks/nova,alvarolopez/nova,Yusuke1987/openstack_template,watonyweng/nova,thomasem/nova,cloudbase/nova,mikalstill/nova,dawnpower/nova
|
720c6dbf9831b2b2ff701d0ca88303189583b9c4
|
opps/api/__init__.py
|
opps/api/__init__.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.http import HttpResponse
from django.contrib.auth import authenticate
from piston.handler import BaseHandler as Handler
from opps.api.models import ApiKey
class BaseHandler(Handler):
def read(self, request):
base = self.model.objects
if request.GET.items():
return base.filter(**request.GET.dict())
return base.all()
class ApiKeyAuthentication(object):
def __init__(self, auth_func=authenticate, method=['GET']):
self.auth_func = auth_func
self.method = method
def is_authenticated(self, request):
if request.method == 'GET' and 'GET' in self.method:
return True
try:
method = getattr(request, request.method)
except:
method = request.GET
try:
ApiKey.objects.get(
user__username=method.get('api_username'),
key=method.get('api_key'))
except ApiKey.DoesNotExist:
return False
return True
def challenge(self):
resp = HttpResponse("Authorization Required")
resp.status_code = 401
return resp
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.http import HttpResponse
from django.contrib.auth import authenticate
from piston.handler import BaseHandler as Handler
from opps.api.models import ApiKey
class BaseHandler(Handler):
def read(self, request):
base = self.model.objects
if request.GET.items():
return base.filter(**request.GET.dict())
return base.all()
def appendModel(Model, Filters):
m = Model.objects.filter(**Filters)
l = []
for i in m:
l.append(i.__dict__)
return l
class ApiKeyAuthentication(object):
def __init__(self, auth_func=authenticate, method=['GET']):
self.auth_func = auth_func
self.method = method
def is_authenticated(self, request):
if request.method == 'GET' and 'GET' in self.method:
return True
try:
method = getattr(request, request.method)
except:
method = request.GET
try:
ApiKey.objects.get(
user__username=method.get('api_username'),
key=method.get('api_key'))
except ApiKey.DoesNotExist:
return False
return True
def challenge(self):
resp = HttpResponse("Authorization Required")
resp.status_code = 401
return resp
|
Add method appendModel on api BaseHandler
|
Add method appendModel on api BaseHandler
|
Python
|
mit
|
YACOWS/opps,opps/opps,jeanmask/opps,opps/opps,williamroot/opps,YACOWS/opps,williamroot/opps,opps/opps,YACOWS/opps,williamroot/opps,williamroot/opps,YACOWS/opps,opps/opps,jeanmask/opps,jeanmask/opps,jeanmask/opps
|
54c4e434276b242de56529e63bb6c5c61d891412
|
indico/modules/events/surveys/tasks.py
|
indico/modules/events/surveys/tasks.py
|
# This file is part of Indico.
# Copyright (C) 2002 - 2015 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from celery.schedules import crontab
from indico.core.celery import celery
from indico.core.db import db
from indico.modules.events.surveys.models.surveys import Survey
@celery.periodic_task(name='survey_start_notifications', run_every=crontab(minute='*/30'))
def send_start_notifications():
opened_surveys = Survey.find_all(~Survey.is_deleted, ~Survey.start_notification_sent, Survey.has_started,
Survey.notifications_enabled)
try:
for survey in opened_surveys:
survey.send_start_notification()
finally:
db.session.commit()
|
# This file is part of Indico.
# Copyright (C) 2002 - 2015 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from celery.schedules import crontab
from indico.core.celery import celery
from indico.core.db import db
from indico.modules.events.surveys.models.surveys import Survey
@celery.periodic_task(name='survey_start_notifications', run_every=crontab(minute='*/30'))
def send_start_notifications():
active_surveys = Survey.find_all(Survey.is_active, ~Survey.start_notification_sent, Survey.notifications_enabled)
try:
for survey in active_surveys:
survey.send_start_notification()
finally:
db.session.commit()
|
Use safer condition for survey start notification
|
Use safer condition for survey start notification
|
Python
|
mit
|
mvidalgarcia/indico,ThiefMaster/indico,pferreir/indico,indico/indico,mic4ael/indico,DirkHoffmann/indico,ThiefMaster/indico,indico/indico,ThiefMaster/indico,indico/indico,mic4ael/indico,indico/indico,OmeGak/indico,OmeGak/indico,pferreir/indico,DirkHoffmann/indico,mic4ael/indico,DirkHoffmann/indico,OmeGak/indico,DirkHoffmann/indico,mic4ael/indico,mvidalgarcia/indico,pferreir/indico,ThiefMaster/indico,mvidalgarcia/indico,mvidalgarcia/indico,pferreir/indico,OmeGak/indico
|
fff234587be9b63270b345345f607df381031bdc
|
opendebates/tests/test_context_processors.py
|
opendebates/tests/test_context_processors.py
|
import urlparse
from django.test import TestCase, override_settings
from mock import patch, Mock
from opendebates.context_processors import global_vars
from opendebates.tests.factories import SubmissionFactory
class NumberOfVotesTest(TestCase):
def test_number_of_votes(self):
mock_request = Mock()
with patch('opendebates.utils.cache') as mock_cache:
mock_cache.get.return_value = 2
context = global_vars(mock_request)
self.assertEqual(2, int(context['NUMBER_OF_VOTES']))
class ThemeTests(TestCase):
def setUp(self):
self.idea = SubmissionFactory()
@override_settings(SITE_THEME={'HASHTAG': 'TestHashtag'})
def test_email_url(self):
email_url = self.idea.email_url()
fields = urlparse.parse_qs(urlparse.urlparse(email_url).query)
self.assertTrue('subject' in fields, fields)
self.assertTrue('#TestHashtag' in fields['subject'][0], fields['subject'][0])
|
import urlparse
from django.test import TestCase, override_settings
from mock import patch, Mock
from opendebates.context_processors import global_vars
from opendebates.tests.factories import SubmissionFactory
class NumberOfVotesTest(TestCase):
def test_number_of_votes(self):
mock_request = Mock()
with patch('opendebates.utils.cache') as mock_cache:
mock_cache.get.return_value = 2
context = global_vars(mock_request)
self.assertEqual(2, int(context['NUMBER_OF_VOTES']))
class ThemeTests(TestCase):
def setUp(self):
self.idea = SubmissionFactory()
@override_settings(SITE_THEME={
'EMAIL_SUBJECT': 'THE EMAIL SUBJECT',
'EMAIL_BODY': 'THE EMAIL BODY\nAND SECOND LINE',
})
def test_email_url(self):
email_url = self.idea.email_url()
fields = urlparse.parse_qs(urlparse.urlparse(email_url).query)
self.assertTrue('subject' in fields, fields)
self.assertEqual('THE EMAIL SUBJECT', fields['subject'][0], fields['subject'][0])
self.assertEqual('THE EMAIL BODY\nAND SECOND LINE', fields['body'][0], fields['body'][0])
|
Fix test_email_url() after changes to email templating for sharing emails
|
Fix test_email_url() after changes to email templating for sharing emails
|
Python
|
apache-2.0
|
caktus/django-opendebates,caktus/django-opendebates,caktus/django-opendebates,ejucovy/django-opendebates,ejucovy/django-opendebates,ejucovy/django-opendebates,caktus/django-opendebates,ejucovy/django-opendebates
|
d0461fa033bdca4fffeff718219f8b71123449d7
|
pskb_website/models/__init__.py
|
pskb_website/models/__init__.py
|
"""
Public model API
"""
from .article import search_for_article
from .article import get_available_articles
from .article import read_article
from .article import save_article
from .article import delete_article
from .article import branch_article
from .article import branch_or_save_article
from .article import get_articles_for_author
from .article import get_public_articles_for_author
from .article import save_article_meta_data
from .article import find_article_by_title
from .article import change_article_stack
from .file import read_file
from .file import read_redirects
from .file import update_article_listing
from .file import published_articles
from .file import in_review_articles
from .file import draft_articles
from .user import find_user
from .email_list import add_subscriber
from .image import save_image
from .lib import to_json
|
"""
Public model API
"""
from .article import search_for_article
from .article import get_available_articles
from .article import read_article
from .article import save_article
from .article import delete_article
from .article import branch_article
from .article import branch_or_save_article
from .article import get_articles_for_author
from .article import get_public_articles_for_author
from .article import find_article_by_title
from .article import change_article_stack
from .file import read_file
from .file import read_redirects
from .file import update_article_listing
from .user import find_user
from .email_list import add_subscriber
from .image import save_image
from .lib import to_json
|
Remove some functions from exported model API that are not used outside model layer
|
Remove some functions from exported model API that are not used outside model layer
- Just some refactoring to trim down the number of things exported that aren't
necessary at this time.
|
Python
|
agpl-3.0
|
paulocheque/guides-cms,pluralsight/guides-cms,pluralsight/guides-cms,paulocheque/guides-cms,paulocheque/guides-cms,pluralsight/guides-cms
|
77d264bd25e0556eb3680b845de22b62d2ebd3e6
|
bouncer/embed_detector.py
|
bouncer/embed_detector.py
|
import fnmatch
import re
from urllib.parse import urlparse
# Hardcoded URL patterns where client is assumed to be embedded.
#
# Only the hostname and path are included in the pattern. The path must be
# specified; use "example.com/*" to match all URLs on a particular domain.
#
# Patterns are shell-style wildcards ('*' matches any number of chars, '?'
# matches a single char).
PATTERNS = [
"h.readthedocs.io/*",
"web.hypothes.is/blog/*",
]
COMPILED_PATTERNS = [re.compile(fnmatch.translate(pat)) for pat in PATTERNS]
def url_embeds_client(url):
"""
Test whether ``url`` is known to embed the client.
This currently just tests the URL against the pattern list ``PATTERNS``.
Only the hostname and path of the URL are tested. Returns false for non-HTTP
URLs.
:return: True if the URL matches a pattern.
"""
parsed_url = urlparse(url)
if not parsed_url.scheme.startswith("http"):
return False
path = parsed_url.path
if not path:
path = "/"
netloc_and_path = parsed_url.netloc + path
for pat in COMPILED_PATTERNS:
if pat.fullmatch(netloc_and_path):
return True
return False
|
import fnmatch
import re
from urllib.parse import urlparse
# Hardcoded URL patterns where client is assumed to be embedded.
#
# Only the hostname and path are included in the pattern. The path must be
# specified; use "example.com/*" to match all URLs on a particular domain.
#
# Patterns are shell-style wildcards ('*' matches any number of chars, '?'
# matches a single char).
PATTERNS = [
# Hypothesis websites.
"h.readthedocs.io/*",
"web.hypothes.is/blog/*",
# Publisher partners:
# American Psychological Organization.
"psycnet.apa.org/fulltext/*",
"awspntest.apa.org/fulltext/*",
]
COMPILED_PATTERNS = [re.compile(fnmatch.translate(pat)) for pat in PATTERNS]
def url_embeds_client(url):
"""
Test whether ``url`` is known to embed the client.
This currently just tests the URL against the pattern list ``PATTERNS``.
Only the hostname and path of the URL are tested. Returns false for non-HTTP
URLs.
:return: True if the URL matches a pattern.
"""
parsed_url = urlparse(url)
if not parsed_url.scheme.startswith("http"):
return False
path = parsed_url.path
if not path:
path = "/"
netloc_and_path = parsed_url.netloc + path
for pat in COMPILED_PATTERNS:
if pat.fullmatch(netloc_and_path):
return True
return False
|
Add APA websites to URL patterns where client is known to be embedded.
|
Add APA websites to URL patterns where client is known to be embedded.
URL patterns provided by Kadidra McCloud at APA.
Fixes https://github.com/hypothesis/product-backlog/issues/814
|
Python
|
bsd-2-clause
|
hypothesis/bouncer,hypothesis/bouncer,hypothesis/bouncer
|
ab035185e2c2023280c29aa5239deac820ec873d
|
openprescribing/openprescribing/settings/e2etest.py
|
openprescribing/openprescribing/settings/e2etest.py
|
from __future__ import absolute_import
from .test import *
DATABASES = {
"default": {
"ENGINE": "django.contrib.gis.db.backends.postgis",
"NAME": utils.get_env_setting("E2E_DB_NAME"),
"USER": utils.get_env_setting("DB_USER"),
"PASSWORD": utils.get_env_setting("DB_PASS"),
"HOST": utils.get_env_setting("DB_HOST", "127.0.0.1"),
}
}
PIPELINE_METADATA_DIR = os.path.join(APPS_ROOT, "pipeline", "metadata")
PIPELINE_DATA_BASEDIR = os.path.join(APPS_ROOT, "pipeline", "e2e-test-data", "data", "")
PIPELINE_IMPORT_LOG_PATH = os.path.join(
APPS_ROOT, "pipeline", "e2e-test-data", "log.json"
)
SLACK_SENDING_ACTIVE = True
BQ_DEFAULT_TABLE_EXPIRATION_MS = 24 * 60 * 60 * 1000 # 24 hours
|
from __future__ import absolute_import
from .test import *
DATABASES = {
"default": {
"ENGINE": "django.contrib.gis.db.backends.postgis",
"NAME": utils.get_env_setting("E2E_DB_NAME"),
"USER": utils.get_env_setting("DB_USER"),
"PASSWORD": utils.get_env_setting("DB_PASS"),
"HOST": utils.get_env_setting("DB_HOST", "127.0.0.1"),
}
}
PIPELINE_METADATA_DIR = os.path.join(APPS_ROOT, "pipeline", "metadata")
PIPELINE_DATA_BASEDIR = os.path.join(APPS_ROOT, "pipeline", "e2e-test-data", "data", "")
PIPELINE_IMPORT_LOG_PATH = os.path.join(
APPS_ROOT, "pipeline", "e2e-test-data", "log.json"
)
SLACK_SENDING_ACTIVE = True
BQ_DEFAULT_TABLE_EXPIRATION_MS = 24 * 60 * 60 * 1000 # 24 hours
# We want to use the real measure definitions, not the test ones!
MEASURE_DEFINITIONS_PATH = os.path.join(APPS_ROOT, "measure_definitions")
|
Use real measure definitions in e2e tests
|
Use real measure definitions in e2e tests
|
Python
|
mit
|
ebmdatalab/openprescribing,annapowellsmith/openpresc,ebmdatalab/openprescribing,annapowellsmith/openpresc,ebmdatalab/openprescribing,ebmdatalab/openprescribing,annapowellsmith/openpresc,annapowellsmith/openpresc
|
0bb36aebdf0766c9244c6e317df89ddda86361b0
|
polls/admin.py
|
polls/admin.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import admin
from .models import Question, Choice, Answer
class ChoiceInline(admin.TabularInline):
model = Choice
class QuestionAdmin(admin.ModelAdmin):
inlines = [
ChoiceInline,
]
admin.site.register(Question, QuestionAdmin)
admin.site.register(Answer)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import admin
from .models import Question, Choice, Answer
class ChoiceInline(admin.TabularInline):
model = Choice
def copy_question(modeladmin, request, queryset):
for orig in queryset:
q = Question(question_text="Kopie van "+orig.question_text)
q.save()
for orig_choice in orig.choice_set.all():
c = Choice(question=q, choice_text=orig_choice.choice_text)
c.save()
copy_question.short_description = "Kopieer stemmingen"
class QuestionAdmin(admin.ModelAdmin):
inlines = [
ChoiceInline,
]
actions = [copy_question]
admin.site.register(Question, QuestionAdmin)
admin.site.register(Answer)
|
Allow Questions to be copied
|
Allow Questions to be copied
|
Python
|
apache-2.0
|
gerard-/votingapp,gerard-/votingapp
|
6ca27fba516ddc63ad6bae98b20e5f9a42b37451
|
examples/plotting/file/image.py
|
examples/plotting/file/image.py
|
import numpy as np
from bokeh.plotting import *
from bokeh.objects import Range1d
N = 1000
x = np.linspace(0, 10, N)
y = np.linspace(0, 10, N)
xx, yy = np.meshgrid(x, y)
d = np.sin(xx)*np.cos(yy)
output_file("image.html", title="image.py example")
image(
image=[d], x=[0], y=[0], dw=[10], dh=[10], palette=["Spectral-11"],
x_range=[0, 10], y_range=[0, 10],
tools="pan,wheel_zoom,box_zoom,reset,previewsave", name="image_example"
)
curplot().x_range = [5, 10]
show() # open a browser
|
import numpy as np
from bokeh.plotting import *
N = 1000
x = np.linspace(0, 10, N)
y = np.linspace(0, 10, N)
xx, yy = np.meshgrid(x, y)
d = np.sin(xx)*np.cos(yy)
output_file("image.html", title="image.py example")
image(
image=[d], x=[0], y=[0], dw=[10], dh=[10], palette=["Spectral-11"],
x_range=[0, 10], y_range=[0, 10],
tools="pan,wheel_zoom,box_zoom,reset,previewsave", name="image_example"
)
show() # open a browser
|
Fix example and remove extraneous import.
|
Fix example and remove extraneous import.
|
Python
|
bsd-3-clause
|
birdsarah/bokeh,srinathv/bokeh,justacec/bokeh,eteq/bokeh,saifrahmed/bokeh,eteq/bokeh,rothnic/bokeh,dennisobrien/bokeh,deeplook/bokeh,draperjames/bokeh,tacaswell/bokeh,daodaoliang/bokeh,abele/bokeh,abele/bokeh,phobson/bokeh,Karel-van-de-Plassche/bokeh,percyfal/bokeh,ericdill/bokeh,timsnyder/bokeh,CrazyGuo/bokeh,ptitjano/bokeh,quasiben/bokeh,rothnic/bokeh,ericmjl/bokeh,rs2/bokeh,philippjfr/bokeh,ericmjl/bokeh,rhiever/bokeh,stonebig/bokeh,timothydmorton/bokeh,rs2/bokeh,azjps/bokeh,roxyboy/bokeh,aiguofer/bokeh,justacec/bokeh,draperjames/bokeh,stuart-knock/bokeh,paultcochrane/bokeh,aiguofer/bokeh,birdsarah/bokeh,awanke/bokeh,ChristosChristofidis/bokeh,roxyboy/bokeh,laurent-george/bokeh,ahmadia/bokeh,saifrahmed/bokeh,birdsarah/bokeh,mutirri/bokeh,jplourenco/bokeh,htygithub/bokeh,laurent-george/bokeh,canavandl/bokeh,daodaoliang/bokeh,bokeh/bokeh,ahmadia/bokeh,schoolie/bokeh,schoolie/bokeh,evidation-health/bokeh,maxalbert/bokeh,dennisobrien/bokeh,ChristosChristofidis/bokeh,saifrahmed/bokeh,roxyboy/bokeh,muku42/bokeh,phobson/bokeh,jakirkham/bokeh,josherick/bokeh,ericmjl/bokeh,eteq/bokeh,ptitjano/bokeh,mutirri/bokeh,muku42/bokeh,timsnyder/bokeh,lukebarnard1/bokeh,DuCorey/bokeh,percyfal/bokeh,phobson/bokeh,laurent-george/bokeh,bokeh/bokeh,draperjames/bokeh,canavandl/bokeh,carlvlewis/bokeh,xguse/bokeh,carlvlewis/bokeh,mutirri/bokeh,DuCorey/bokeh,lukebarnard1/bokeh,muku42/bokeh,Karel-van-de-Plassche/bokeh,ptitjano/bokeh,tacaswell/bokeh,xguse/bokeh,akloster/bokeh,PythonCharmers/bokeh,aavanian/bokeh,roxyboy/bokeh,evidation-health/bokeh,alan-unravel/bokeh,mindriot101/bokeh,aavanian/bokeh,msarahan/bokeh,aiguofer/bokeh,bsipocz/bokeh,caseyclements/bokeh,percyfal/bokeh,tacaswell/bokeh,PythonCharmers/bokeh,ChristosChristofidis/bokeh,ChinaQuants/bokeh,timothydmorton/bokeh,mutirri/bokeh,ericdill/bokeh,timsnyder/bokeh,KasperPRasmussen/bokeh,matbra/bokeh,aavanian/bokeh,KasperPRasmussen/bokeh,satishgoda/bokeh,josherick/bokeh,srinathv/bokeh,ahmadia/bokeh,caseyclements/bokeh,draperjames/bokeh,stonebig/bokeh,clairetang6/bokeh,almarklein/bokeh,carlvlewis/bokeh,almarklein/bokeh,laurent-george/bokeh,satishgoda/bokeh,ericdill/bokeh,srinathv/bokeh,philippjfr/bokeh,deeplook/bokeh,msarahan/bokeh,timsnyder/bokeh,justacec/bokeh,rothnic/bokeh,aiguofer/bokeh,azjps/bokeh,DuCorey/bokeh,azjps/bokeh,PythonCharmers/bokeh,percyfal/bokeh,awanke/bokeh,bsipocz/bokeh,maxalbert/bokeh,almarklein/bokeh,dennisobrien/bokeh,bokeh/bokeh,DuCorey/bokeh,CrazyGuo/bokeh,matbra/bokeh,rs2/bokeh,josherick/bokeh,ericdill/bokeh,dennisobrien/bokeh,ChinaQuants/bokeh,clairetang6/bokeh,muku42/bokeh,stuart-knock/bokeh,Karel-van-de-Plassche/bokeh,bokeh/bokeh,khkaminska/bokeh,paultcochrane/bokeh,khkaminska/bokeh,xguse/bokeh,ChristosChristofidis/bokeh,jplourenco/bokeh,KasperPRasmussen/bokeh,ChinaQuants/bokeh,azjps/bokeh,abele/bokeh,msarahan/bokeh,maxalbert/bokeh,rs2/bokeh,alan-unravel/bokeh,awanke/bokeh,paultcochrane/bokeh,philippjfr/bokeh,timothydmorton/bokeh,htygithub/bokeh,quasiben/bokeh,rhiever/bokeh,evidation-health/bokeh,rhiever/bokeh,ericmjl/bokeh,msarahan/bokeh,timsnyder/bokeh,deeplook/bokeh,saifrahmed/bokeh,philippjfr/bokeh,xguse/bokeh,rhiever/bokeh,KasperPRasmussen/bokeh,tacaswell/bokeh,jplourenco/bokeh,phobson/bokeh,clairetang6/bokeh,ptitjano/bokeh,aavanian/bokeh,matbra/bokeh,Karel-van-de-Plassche/bokeh,bsipocz/bokeh,KasperPRasmussen/bokeh,Karel-van-de-Plassche/bokeh,ericmjl/bokeh,draperjames/bokeh,DuCorey/bokeh,justacec/bokeh,ChinaQuants/bokeh,eteq/bokeh,jakirkham/bokeh,PythonCharmers/bokeh,schoolie/bokeh,akloster/bokeh,htygithub/bokeh,lukebarnard1/bokeh,bsipocz/bokeh,schoolie/bokeh,azjps/bokeh,caseyclements/bokeh,akloster/bokeh,stuart-knock/bokeh,stuart-knock/bokeh,mindriot101/bokeh,CrazyGuo/bokeh,abele/bokeh,khkaminska/bokeh,maxalbert/bokeh,rs2/bokeh,CrazyGuo/bokeh,jakirkham/bokeh,bokeh/bokeh,khkaminska/bokeh,quasiben/bokeh,satishgoda/bokeh,clairetang6/bokeh,canavandl/bokeh,daodaoliang/bokeh,ahmadia/bokeh,gpfreitas/bokeh,josherick/bokeh,philippjfr/bokeh,phobson/bokeh,dennisobrien/bokeh,matbra/bokeh,satishgoda/bokeh,ptitjano/bokeh,akloster/bokeh,percyfal/bokeh,caseyclements/bokeh,htygithub/bokeh,canavandl/bokeh,carlvlewis/bokeh,paultcochrane/bokeh,mindriot101/bokeh,schoolie/bokeh,lukebarnard1/bokeh,gpfreitas/bokeh,gpfreitas/bokeh,alan-unravel/bokeh,evidation-health/bokeh,timothydmorton/bokeh,aavanian/bokeh,gpfreitas/bokeh,jakirkham/bokeh,awanke/bokeh,rothnic/bokeh,aiguofer/bokeh,stonebig/bokeh,jplourenco/bokeh,stonebig/bokeh,deeplook/bokeh,jakirkham/bokeh,alan-unravel/bokeh,mindriot101/bokeh,birdsarah/bokeh,srinathv/bokeh,daodaoliang/bokeh
|
5d8b217659fdd4a7248a60b430a24fe909bca805
|
test/test_acoustics.py
|
test/test_acoustics.py
|
import numpy as np
import pyfds as fds
def test_acoustic_material():
water = fds.AcousticMaterial(1500, 1000)
water.bulk_viscosity = 1e-3
water.shear_viscosity = 1e-3
assert np.isclose(water.absorption_coef, 7e-3 / 3)
|
import numpy as np
import pyfds as fds
def test_acoustic_material():
water = fds.AcousticMaterial(1500, 1000)
water.bulk_viscosity = 1e-3
water.shear_viscosity = 1e-3
assert np.isclose(water.absorption_coef, 7e-3 / 3)
def test_acoustic1d_create_matrices():
fld = fds.Acoustic1D(t_delta=1, t_samples=1,
x_delta=1, x_samples=3,
material=fds.AcousticMaterial(700, 0.01, bulk_viscosity=1))
fld.create_matrices()
assert np.allclose(fld.a_p_v.toarray(), [[-4900, 4900, 0], [0, -4900, 4900], [0, 0, -4900]])
assert np.allclose(fld.a_v_p.toarray(), [[100, 0, 0], [-100, 100, 0], [0, -100, 100]])
assert np.allclose(fld.a_v_v.toarray(), [[-200, 100, 0], [100, -200, 100], [0, 100, -200]])
|
Add test case for Acoustic1D.create_matrices().
|
Add test case for Acoustic1D.create_matrices().
|
Python
|
bsd-3-clause
|
emtpb/pyfds
|
ebd9949177db3e2db51b47b74254908e300edc13
|
process_test.py
|
process_test.py
|
"""
Copyright 2016 EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import argparse, time
from pycompss.api.task import task
from pycompss.api.parameter import *
#class process_test:
#
# def __init__(self):
# self.ready = True
@task(x = IN)
def main(x):
print time.time(), x
y = range(1)
#pt = process_test()
map(main, y)
|
"""
Copyright 2016 EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from pycompss.api.parameter import *
from pycompss.api.task import task
def main(x):
from pycompss.api.api import compss_wait_on
print "Main process:"
results = []
for i in x:
results.append(print_time(i))
results = compss_wait_on(results)
print results
@task(x = IN, returns = int)
def print_time(x):
import time
x = time.time()
return x
if __name__ == "__main__":
y = range(10)
main(y)
|
Test script to work out the method for creating tasks
|
Test script to work out the method for creating tasks
|
Python
|
apache-2.0
|
Multiscale-Genomics/mg-process-fastq,Multiscale-Genomics/mg-process-fastq,Multiscale-Genomics/mg-process-fastq
|
8d55ea0cfbafc9f6dc1044ba27c3313c36ea73c6
|
pombola/south_africa/templatetags/za_people_display.py
|
pombola/south_africa/templatetags/za_people_display.py
|
from django import template
register = template.Library()
NO_PLACE_ORGS = ('parliament', 'national-assembly', )
MEMBER_ORGS = ('parliament', 'national-assembly', )
@register.assignment_tag()
def should_display_place(organisation):
return organisation.slug not in NO_PLACE_ORGS
@register.assignment_tag()
def should_display_position(organisation, position_title):
should_display = True
if organisation.slug in MEMBER_ORGS and unicode(position_title) in (u'Member',):
should_display = False
if 'ncop' == organisation.slug and unicode(position_title) in (u'Delegate',):
should_display = False
return should_display
|
from django import template
register = template.Library()
NO_PLACE_ORGS = ('parliament', 'national-assembly', )
MEMBER_ORGS = ('parliament', 'national-assembly', )
@register.assignment_tag()
def should_display_place(organisation):
if not organisation:
return True
return organisation.slug not in NO_PLACE_ORGS
@register.assignment_tag()
def should_display_position(organisation, position_title):
should_display = True
if organisation.slug in MEMBER_ORGS and unicode(position_title) in (u'Member',):
should_display = False
if 'ncop' == organisation.slug and unicode(position_title) in (u'Delegate',):
should_display = False
return should_display
|
Fix display of people on constituency office page
|
[ZA] Fix display of people on constituency office page
This template tag was being called without an organisation, so in
production it was just silently failing, but in development it was
raising an exception.
This adds an extra check so that if there is no organisation then we
just short circuit and return `True`.
|
Python
|
agpl-3.0
|
mysociety/pombola,mysociety/pombola,mysociety/pombola,mysociety/pombola,mysociety/pombola,mysociety/pombola
|
e44dc4d68845845f601803f31e10833a24cdb27c
|
prosite_app.py
|
prosite_app.py
|
#!/bin/env python3
# Prosite regular expressions matcher
# Copyright (c) 2014 Tomasz Truszkowski
# All rights reserved.
import prosite_matcher
if __name__ == '__main__':
print("\n Hi, this is Prosite Matcher! \n")
sequence = input("Sequence: ")
regex = input("Regular expression: ")
prositeMatcher = prosite_matcher.PrositeMatcher()
prositeMatcher.compile(regex)
matches, ranges = prositeMatcher.get_matches(sequence)
print("Found patterns: ", end="")
if (len(matches) > 0):
print(sequence[ 0 : ranges[0][0] ], end="")
for i in range(0, len(matches)):
print("\033[91m", end="")
print(sequence[ ranges[i][0] : ranges[i][1] ], end="")
print("\033[0m", end="")
if (i < len(matches) - 1):
print(sequence[ ranges[i][1] : ranges[i + 1][0] ], end="")
print(sequence[ ranges[len(ranges) - 1][1] : len(sequence)])
else:
print(sequence)
print("")
for elem in list(zip(matches, ranges)):
print(elem[0], end=" ")
print(elem[1])
print("")
|
#!/bin/env python3
# Prosite regular expressions matcher
# Copyright (c) 2014 Tomasz Truszkowski
# All rights reserved.
import prosite_matcher
if __name__ == '__main__':
print("\n Hi, this is Prosite Matcher! \n")
sequence = input("Sequence: ")
regex = input("Regular expression: ")
if sequence != None and sequence != "" and regex != None and regex != "":
prositeMatcher = prosite_matcher.PrositeMatcher()
prositeMatcher.compile(regex)
matches, ranges = prositeMatcher.get_matches(sequence)
print("Found patterns: ", end="")
if (len(matches) > 0):
print(sequence[ 0 : ranges[0][0] ], end="")
for i in range(0, len(matches)):
print("\033[91m", end="")
print(sequence[ ranges[i][0] : ranges[i][1] ], end="")
print("\033[0m", end="")
if (i < len(matches) - 1):
print(sequence[ ranges[i][1] : ranges[i + 1][0] ], end="")
print(sequence[ ranges[len(ranges) - 1][1] : len(sequence)])
else:
print(sequence)
print("")
for elem in list(zip(matches, ranges)):
print(elem[0], end=" ")
print(elem[1])
print("")
else:
print("Sequence and regular expression can't be empty.")
|
Add check for empty sequence or regex.
|
Add check for empty sequence or regex.
|
Python
|
mit
|
stack-overflow/py_finite_state
|
017aa00a7b1f7a4a8a95f9c41576d4595d4085af
|
src/python/SparkSQLTwitter.py
|
src/python/SparkSQLTwitter.py
|
# A simple demo for working with SparkSQL and Tweets
from pyspark import SparkContext, SparkConf
from pyspark.sql import HiveContext, Row, IntegerType
import json
import sys
if __name__ == "__main__":
inputFile = sys.argv[1]
conf = SparkConf().setAppName("SparkSQLTwitter")
sc = SparkContext()
hiveCtx = HiveContext(sc)
print "Loading tweets from " + inputFile
input = hiveCtx.jsonFile(inputFile)
input.registerTempTable("tweets")
topTweets = hiveCtx.sql("SELECT text, retweetCount FROM tweets ORDER BY retweetCount LIMIT 10")
print topTweets.collect()
topTweetText = topTweets.map(lambda row : row.text)
print topTweetText.collect()
# Make a happy person row
happyPeopleRDD = sc.parallelize([Row(name="holden", favouriteBeverage="coffee")])
happyPeopleSchemaRDD = hiveCtx.inferSchema(happyPeopleRDD)
happyPeopleSchemaRDD.registerTempTable("happy_people")
# Make a UDF to tell us how long some text is
hiveCtx.registerFunction("strLenPython", lambda x: len(x), IntegerType())
lengthSchemaRDD = hiveCtx.sql("SELECT strLenPython('text') FROM tweets LIMIT 10")
print lengthSchemaRDD.collect()
sc.stop()
|
# A simple demo for working with SparkSQL and Tweets
from pyspark import SparkContext, SparkConf
from pyspark.sql import HiveContext, Row
from pyspark.sql.types import IntegerType
import json
import sys
if __name__ == "__main__":
inputFile = sys.argv[1]
conf = SparkConf().setAppName("SparkSQLTwitter")
sc = SparkContext()
hiveCtx = HiveContext(sc)
print "Loading tweets from " + inputFile
input = hiveCtx.jsonFile(inputFile)
input.registerTempTable("tweets")
topTweets = hiveCtx.sql("SELECT text, retweetCount FROM tweets ORDER BY retweetCount LIMIT 10")
print topTweets.collect()
topTweetText = topTweets.map(lambda row : row.text)
print topTweetText.collect()
# Make a happy person row
happyPeopleRDD = sc.parallelize([Row(name="holden", favouriteBeverage="coffee")])
happyPeopleSchemaRDD = hiveCtx.inferSchema(happyPeopleRDD)
happyPeopleSchemaRDD.registerTempTable("happy_people")
# Make a UDF to tell us how long some text is
hiveCtx.registerFunction("strLenPython", lambda x: len(x), IntegerType())
lengthSchemaRDD = hiveCtx.sql("SELECT strLenPython('text') FROM tweets LIMIT 10")
print lengthSchemaRDD.collect()
sc.stop()
|
Fix IntegerType import for Spark SQL
|
Fix IntegerType import for Spark SQL
|
Python
|
mit
|
DINESHKUMARMURUGAN/learning-spark,mohitsh/learning-spark,shimizust/learning-spark,JerryTseng/learning-spark,databricks/learning-spark,qingkaikong/learning-spark-examples,huixiang/learning-spark,qingkaikong/learning-spark-examples,obinsanni/learning-spark,bhagatsingh/learning-spark,holdenk/learning-spark-examples,NBSW/learning-spark,UsterNes/learning-spark,holdenk/learning-spark-examples,gaoxuesong/learning-spark,noprom/learning-spark,rex1100/learning-spark,kpraveen420/learning-spark,baokunguo/learning-spark-examples,gaoxuesong/learning-spark,shimizust/learning-spark,zaxliu/learning-spark,junwucs/learning-spark,holdenk/learning-spark-examples,noprom/learning-spark,negokaz/learning-spark,ellis429/learning-spark-examples,ellis429/learning-spark,mmirolim/learning-spark,asarraf/learning-spark,junwucs/learning-spark,jindalcastle/learning-spark,SunGuo/learning-spark,gaoxuesong/learning-spark,JerryTseng/learning-spark,bhagatsingh/learning-spark,XiaoqingWang/learning-spark,jaehyuk/learning-spark,huixiang/learning-spark,junwucs/learning-spark,asarraf/learning-spark,ellis429/learning-spark-examples,anjuncc/learning-spark-examples,concerned3rdparty/learning-spark,shimizust/learning-spark,JerryTseng/learning-spark,mmirolim/learning-spark,tengteng/learning-spark,JerryTseng/learning-spark,bhagatsingh/learning-spark,diogoaurelio/learning-spark,ellis429/learning-spark,DINESHKUMARMURUGAN/learning-spark,jaehyuk/learning-spark,anjuncc/learning-spark-examples,dsdinter/learning-spark-examples,jaehyuk/learning-spark,ramyasrigangula/learning-spark,diogoaurelio/learning-spark,holdenk/learning-spark-examples,DINESHKUMARMURUGAN/learning-spark,ellis429/learning-spark-examples,coursera4ashok/learning-spark,kod3r/learning-spark,ramyasrigangula/learning-spark,qingkaikong/learning-spark-examples,negokaz/learning-spark,XiaoqingWang/learning-spark,asarraf/learning-spark,mohitsh/learning-spark,ellis429/learning-spark-examples,anjuncc/learning-spark-examples,jindalcastle/learning-spark,obinsanni/learning-spark,UsterNes/learning-spark,diogoaurelio/learning-spark,jaehyuk/learning-spark,diogoaurelio/learning-spark,GatsbyNewton/learning-spark,ellis429/learning-spark,kod3r/learning-spark,baokunguo/learning-spark-examples,SunGuo/learning-spark,asarraf/learning-spark,ramyasrigangula/learning-spark,noprom/learning-spark,SunGuo/learning-spark,baokunguo/learning-spark-examples,UsterNes/learning-spark,coursera4ashok/learning-spark,kpraveen420/learning-spark,mohitsh/learning-spark,rex1100/learning-spark,jaehyuk/learning-spark,mmirolim/learning-spark,zaxliu/learning-spark,huixiang/learning-spark,mohitsh/learning-spark,NBSW/learning-spark,feynman0825/learning-spark,UsterNes/learning-spark,huydx/learning-spark,mmirolim/learning-spark,SunGuo/learning-spark,jindalcastle/learning-spark,obinsanni/learning-spark,bhagatsingh/learning-spark,dsdinter/learning-spark-examples,XiaoqingWang/learning-spark,concerned3rdparty/learning-spark,shimizust/learning-spark,dsdinter/learning-spark-examples,asarraf/learning-spark,ellis429/learning-spark,coursera4ashok/learning-spark,feynman0825/learning-spark,NBSW/learning-spark,concerned3rdparty/learning-spark,feynman0825/learning-spark,jindalcastle/learning-spark,XiaoqingWang/learning-spark,zaxliu/learning-spark,negokaz/learning-spark,kod3r/learning-spark,negokaz/learning-spark,obinsanni/learning-spark,NBSW/learning-spark,tengteng/learning-spark,qingkaikong/learning-spark-examples,DINESHKUMARMURUGAN/learning-spark,bhagatsingh/learning-spark,feynman0825/learning-spark,kpraveen420/learning-spark,holdenk/learning-spark-examples,huydx/learning-spark,XiaoqingWang/learning-spark,huydx/learning-spark,kod3r/learning-spark,JerryTseng/learning-spark,obinsanni/learning-spark,concerned3rdparty/learning-spark,junwucs/learning-spark,jindalcastle/learning-spark,huydx/learning-spark,concerned3rdparty/learning-spark,SunGuo/learning-spark,coursera4ashok/learning-spark,mmirolim/learning-spark,negokaz/learning-spark,ellis429/learning-spark-examples,GatsbyNewton/learning-spark,huydx/learning-spark,noprom/learning-spark,shimizust/learning-spark,tengteng/learning-spark,diogoaurelio/learning-spark,ellis429/learning-spark,tengteng/learning-spark,GatsbyNewton/learning-spark,NBSW/learning-spark,databricks/learning-spark,kpraveen420/learning-spark,databricks/learning-spark,qingkaikong/learning-spark-examples,anjuncc/learning-spark-examples,huixiang/learning-spark,dsdinter/learning-spark-examples,zaxliu/learning-spark,mohitsh/learning-spark,gaoxuesong/learning-spark,rex1100/learning-spark,databricks/learning-spark,GatsbyNewton/learning-spark,anjuncc/learning-spark-examples,kod3r/learning-spark,noprom/learning-spark,junwucs/learning-spark,kpraveen420/learning-spark,tengteng/learning-spark,databricks/learning-spark,coursera4ashok/learning-spark,feynman0825/learning-spark,huixiang/learning-spark,dsdinter/learning-spark-examples,baokunguo/learning-spark-examples,ramyasrigangula/learning-spark,baokunguo/learning-spark-examples,gaoxuesong/learning-spark,ramyasrigangula/learning-spark,UsterNes/learning-spark,GatsbyNewton/learning-spark,zaxliu/learning-spark,DINESHKUMARMURUGAN/learning-spark
|
4b172a9b2b9a9a70843bd41ad858d6f3120769b0
|
tests/test_funcargs.py
|
tests/test_funcargs.py
|
from django.test.client import Client
from pytest_django.client import RequestFactory
pytest_plugins = ['pytester']
def test_params(testdir):
testdir.makeconftest("""
import os, sys
import pytest_django as plugin
sys.path.insert(0, os.path.realpath(os.path.join(os.path.dirname(plugin.__file__), '../')))
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
pytest_plugins = ['django']
""")
p = testdir.makepyfile("""
import py
@py.test.params([dict(arg1=1, arg2=1), dict(arg1=1, arg2=2)])
def test_myfunc(arg1, arg2):
assert arg1 == arg2
""")
result = testdir.runpytest("-v", p)
assert result.stdout.fnmatch_lines([
"*test_myfunc*0*PASS*",
"*test_myfunc*1*FAIL*",
"*1 failed, 1 passed*"
])
def test_client(client):
assert isinstance(client, Client)
def test_rf(rf):
assert isinstance(rf, RequestFactory)
|
from django.test.client import Client
from pytest_django.client import RequestFactory
import py
pytest_plugins = ['pytester']
def test_params(testdir):
# Setting up the path isn't working - plugin.__file__ points to the wrong place
return
testdir.makeconftest("""
import os, sys
import pytest_django as plugin
sys.path.insert(0, os.path.realpath(os.path.join(os.path.dirname(plugin.__file__), '../')))
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
pytest_plugins = ['django']
""")
p = testdir.makepyfile("""
import py
@py.test.params([dict(arg1=1, arg2=1), dict(arg1=1, arg2=2)])
def test_myfunc(arg1, arg2):
assert arg1 == arg2
""")
result = testdir.runpytest("-v", p)
assert result.stdout.fnmatch_lines([
"*test_myfunc*0*PASS*",
"*test_myfunc*1*FAIL*",
"*1 failed, 1 passed*"
])
def test_client(client):
assert isinstance(client, Client)
def test_rf(rf):
assert isinstance(rf, RequestFactory)
|
Disable params test for now
|
Disable params test for now
|
Python
|
bsd-3-clause
|
ojake/pytest-django,pelme/pytest-django,hoh/pytest-django,thedrow/pytest-django,pombredanne/pytest_django,felixonmars/pytest-django,ktosiek/pytest-django,RonnyPfannschmidt/pytest_django,aptivate/pytest-django,davidszotten/pytest-django,reincubate/pytest-django,bforchhammer/pytest-django,tomviner/pytest-django,bfirsh/pytest_django
|
849552b1a2afdd89552e7c0395fc7be1786d5cbc
|
pybossa/auth/user.py
|
pybossa/auth/user.py
|
# -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2013 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
from flask.ext.login import current_user
def create(user=None):
if current_user.is_authenticated():
if current_user.admin:
return True
else:
return False
else:
return False
def read(user=None):
return True
def update(user):
return create(user)
def delete(user):
return update(user)
|
# -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2013 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
from flask.ext.login import current_user
def create(user=None): # pragma: no cover
if current_user.is_authenticated():
if current_user.admin:
return True
else:
return False
else:
return False
def read(user=None): # pragma: no cover
return True
def update(user): # pragma: no cover
return create(user)
def delete(user): # pragma: no cover
return update(user)
|
Exclude it from coverage as these permissions are not used yet.
|
Exclude it from coverage as these permissions are not used yet.
|
Python
|
agpl-3.0
|
PyBossa/pybossa,PyBossa/pybossa,CulturePlex/pybossa,jean/pybossa,inteligencia-coletiva-lsd/pybossa,harihpr/tweetclickers,stefanhahmann/pybossa,stefanhahmann/pybossa,geotagx/pybossa,geotagx/pybossa,CulturePlex/pybossa,OpenNewsLabs/pybossa,proyectos-analizo-info/pybossa-analizo-info,proyectos-analizo-info/pybossa-analizo-info,Scifabric/pybossa,proyectos-analizo-info/pybossa-analizo-info,jean/pybossa,CulturePlex/pybossa,inteligencia-coletiva-lsd/pybossa,harihpr/tweetclickers,Scifabric/pybossa,OpenNewsLabs/pybossa
|
5c9bdb1260562f0623807ce9a5751d33c806374a
|
pyfr/nputil.py
|
pyfr/nputil.py
|
# -*- coding: utf-8 -*-
import numpy as np
_npeval_syms = {'__builtins__': None,
'exp': np.exp, 'log': np.log,
'sin': np.sin, 'asin': np.arcsin,
'cos': np.cos, 'acos': np.arccos,
'tan': np.tan, 'atan': np.arctan, 'atan2': np.arctan2,
'abs': np.abs, 'pow': np.power, 'sqrt': np.sqrt,
'pi': np.pi}
def npeval(expr, locals):
# Allow '^' to be used for exponentiation
expr = expr.replace('^', '**')
return eval(expr, _npeval_syms, locals)
_range_eval_syms = {'__builtins__': None,
'range': lambda s,e,n: list(np.linspace(s, e, n))}
def range_eval(expr):
return [float(t) for t in eval(expr, _range_eval_syms, None)]
_ctype_map = {np.float32: 'float', np.float64: 'double'}
def npdtype_to_ctype(dtype):
return _ctype_map[np.dtype(dtype).type]
|
# -*- coding: utf-8 -*-
import numpy as np
def npaligned(shape, dtype, alignb=32):
nbytes = np.prod(shape)*np.dtype(dtype).itemsize
buf = np.zeros(nbytes + alignb, dtype=np.uint8)
off = -buf.ctypes.data % alignb
return buf[off:nbytes + off].view(dtype).reshape(shape)
_npeval_syms = {'__builtins__': None,
'exp': np.exp, 'log': np.log,
'sin': np.sin, 'asin': np.arcsin,
'cos': np.cos, 'acos': np.arccos,
'tan': np.tan, 'atan': np.arctan, 'atan2': np.arctan2,
'abs': np.abs, 'pow': np.power, 'sqrt': np.sqrt,
'pi': np.pi}
def npeval(expr, locals):
# Allow '^' to be used for exponentiation
expr = expr.replace('^', '**')
return eval(expr, _npeval_syms, locals)
_range_eval_syms = {'__builtins__': None,
'range': lambda s,e,n: list(np.linspace(s, e, n))}
def range_eval(expr):
return [float(t) for t in eval(expr, _range_eval_syms, None)]
_ctype_map = {np.float32: 'float', np.float64: 'double'}
def npdtype_to_ctype(dtype):
return _ctype_map[np.dtype(dtype).type]
|
Add support for allocating aligned NumPy arrays.
|
Add support for allocating aligned NumPy arrays.
|
Python
|
bsd-3-clause
|
tjcorona/PyFR,tjcorona/PyFR,BrianVermeire/PyFR,Aerojspark/PyFR,iyer-arvind/PyFR,tjcorona/PyFR
|
126c58d78360e69c2d16a40f9396a8158844e2b1
|
tests/test_creators.py
|
tests/test_creators.py
|
"""
Test the post methods.
"""
def test_matrix_creation_endpoint(client):
response = client.post('/matrix', {
'bibliography': '12312312',
'fields': 'title,description',
})
print(response.json())
assert response.status_code == 200
|
"""
Test the post methods.
"""
from condor.models import Bibliography
def test_matrix_creation_endpoint(client, session):
bib = Bibliography(eid='123', description='lorem')
session.add(bib)
session.flush()
response = client.post('/matrix', {
'bibliography': '123',
'fields': 'title,description',
})
response = client.get(f"/matrix/{response.json().get('eid')}")
assert response.status_code == 200
assert response.json().get('bibliography_eid') == '123'
|
Create test for matrix post endpoint
|
Create test for matrix post endpoint
|
Python
|
mit
|
odarbelaeze/condor-api
|
e94d39bf330312dc46697a689b56f7518ebd501c
|
footer/magic/images.py
|
footer/magic/images.py
|
#import PIL
import cairosvg
from django.template import Template, Context
def make_svg(context):
svg_tmpl = Template("""
<svg xmlns="http://www.w3.org/2000/svg"
width="500" height="600" viewBox="0 0 500 400">
<text x="0" y="0" font-family="Verdana" font-size="10" fill="blue" dy="0">
{{ name }} {{ text }}
{% for k,v in data.items %}
{% if v.items %}
<tspan x="0" dy="1.0em">
{{ k }}:
</tspan>
{% for kk, vv in v.items %}
<tspan x="0" dy="1.0em">
{{ kk }}: {{ vv }}
</tspan>
{% endfor %}
{% else %}
<tspan x="0" dy="1.0em">
{{ k }}: {{ v }}
</tspan>
{% endif %}
{% endfor %}
</text>
</svg>
""".strip())
svg = svg_tmpl.render(Context({'data': context}))
return svg
def write_svg_to_png(svg_raw, outfile):
cairosvg.svg2png(bytestring=svg_raw, write_to=outfile)
return outfile
|
#import PIL
import cairosvg
from django.template import Template, Context
def make_svg(context):
svg_tmpl = Template("""
<svg xmlns="http://www.w3.org/2000/svg"
width="500" height="600" viewBox="0 0 500 400">
<text x="0" y="0" font-family="Verdana" font-size="10" fill="blue" dy="0">
{{ name }} {{ text }}
{% for k,v in data.items %}
{% if v.items %}
<tspan x="0" dy="1.0em">
{{ k }}:
</tspan>
{% for kk, vv in v.items %}
<tspan x="0" dy="1.0em">
{{ kk }}: <tspan fill="red" dy="0.0em">{{ vv }}</tspan>
</tspan>
{% endfor %}
{% else %}
<tspan x="0" dy="1.0em">
{{ k }}: <tspan fill="red" dy="0.0em">{{ v }}</tspan>
</tspan>
{% endif %}
{% endfor %}
</text>
</svg>
""".strip())
svg = svg_tmpl.render(Context({'data': context}))
return svg
def write_svg_to_png(svg_raw, outfile):
cairosvg.svg2png(bytestring=svg_raw, write_to=outfile)
return outfile
|
Add some color to SVG key/values
|
Add some color to SVG key/values
|
Python
|
mit
|
mihow/footer,mihow/footer,mihow/footer,mihow/footer
|
ebdafecea8c5b6597a7b2e2822afc98b9c47bb05
|
toast/math/__init__.py
|
toast/math/__init__.py
|
def lerp(fromValue, toValue, step):
return fromValue + (toValue - fromValue) * step
|
def lerp(fromValue, toValue, percent):
return fromValue + (toValue - fromValue) * percent
|
Refactor to lerp param names.
|
Refactor to lerp param names.
|
Python
|
mit
|
JoshuaSkelly/Toast,JSkelly/Toast
|
b2e743a19f13c898b2d95595a7a7175eca4bdb2c
|
results/urls.py
|
results/urls.py
|
__author__ = 'ankesh'
from django.conf.urls import patterns, url
import views
urlpatterns = patterns('',
url(r'^(?P<filename>[a-zA-Z0-9]+)/$', views.show_result, name='showResult'),
url(r'^compare/(?P<filename>[a-zA-Z0-9]+)/$', views.compare_result, name='compareResult'),
)
|
__author__ = 'ankesh'
from django.conf.urls import patterns, url
import views
urlpatterns = patterns('',
url(r'^(?P<filename>[a-zA-Z0-9]+)/$', views.show_result, name='showResult'),
url(r'^compare/(?P<filename>[a-zA-Z0-9]+)/$', views.compare_result, name='compareResult'),
url(r'^recent/$', views.recent_results, name='recentResults'),
)
|
Update URLs to include recent results
|
Update URLs to include recent results
|
Python
|
bsd-2-clause
|
ankeshanand/benchmark,ankeshanand/benchmark,ankeshanand/benchmark,ankeshanand/benchmark
|
7e3dfe47598401f4d5b96a377927473bb8adc244
|
bush/aws/base.py
|
bush/aws/base.py
|
from bush.aws.session import create_session
class AWSBase:
# USAGE = ""
# SUB_COMMANDS = []
def __init__(self, options, resource_name):
self.name = resource_name
self.options = options
self.session = create_session(options)
self.resource = self.session.resource(resource_name)
self.client = self.session.client(resource_name)
|
from bush.aws.session import create_session
class AWSBase:
# USAGE = ""
# SUB_COMMANDS = []
def __init__(self, options, resource_name):
self.name = resource_name
self.options = options
self.session = create_session(options)
@property
def resource(self):
if not hasattr(self, '__resource'):
self.__set_resource()
return self.__resource
@property
def client(self):
if not hasattr(self, '__client'):
self.__set_client()
return self.__client
def __set_resource(self):
self.__resource = self.session.resource(self.name)
def __set_client(self):
self.__client = self.session.client(self.name)
|
Set resource and client when it is needed
|
Set resource and client when it is needed
|
Python
|
mit
|
okamos/bush
|
2425f5a3b0b3f465eec86de9873696611dfda04a
|
example_project/users/social_pipeline.py
|
example_project/users/social_pipeline.py
|
import hashlib
from rest_framework.response import Response
def auto_logout(*args, **kwargs):
"""Do not compare current user with new one"""
return {'user': None}
def save_avatar(strategy, details, user=None, *args, **kwargs):
"""Get user avatar from social provider."""
if user:
backend_name = kwargs['backend'].__class__.__name__.lower()
response = kwargs.get('response', {})
social_thumb = None
if 'facebook' in backend_name:
if 'id' in response:
social_thumb = (
'http://graph.facebook.com/{0}/picture?type=normal'
).format(response['id'])
elif 'twitter' in backend_name and response.get('profile_image_url'):
social_thumb = response['profile_image_url']
elif 'googleoauth2' in backend_name and response.get('image', {}).get('url'):
social_thumb = response['image']['url'].split('?')[0]
else:
social_thumb = 'http://www.gravatar.com/avatar/'
social_thumb += hashlib.md5(user.email.lower().encode('utf8')).hexdigest()
social_thumb += '?size=100'
if social_thumb and user.social_thumb != social_thumb:
user.social_thumb = social_thumb
strategy.storage.user.changed(user)
def check_for_email(backend, uid, user=None, *args, **kwargs):
if not kwargs['details'].get('email'):
return Response({'error': "Email wasn't provided by facebook"}, status=400)
|
import hashlib
from rest_framework.response import Response
def auto_logout(*args, **kwargs):
"""Do not compare current user with new one"""
return {'user': None}
def save_avatar(strategy, details, user=None, *args, **kwargs):
"""Get user avatar from social provider."""
if user:
backend_name = kwargs['backend'].__class__.__name__.lower()
response = kwargs.get('response', {})
social_thumb = None
if 'facebook' in backend_name:
if 'id' in response:
social_thumb = (
'http://graph.facebook.com/{0}/picture?type=normal'
).format(response['id'])
elif 'twitter' in backend_name and response.get('profile_image_url'):
social_thumb = response['profile_image_url']
elif 'googleoauth2' in backend_name and response.get('image', {}).get('url'):
social_thumb = response['image']['url'].split('?')[0]
else:
social_thumb = 'http://www.gravatar.com/avatar/'
social_thumb += hashlib.md5(user.email.lower().encode('utf8')).hexdigest()
social_thumb += '?size=100'
if social_thumb and user.social_thumb != social_thumb:
user.social_thumb = social_thumb
strategy.storage.user.changed(user)
def check_for_email(backend, uid, user=None, *args, **kwargs):
if not kwargs['details'].get('email'):
return Response({'error': "Email wasn't provided by oauth provider"}, status=400)
|
Fix error message in example project
|
Fix error message in example project
|
Python
|
mit
|
st4lk/django-rest-social-auth,st4lk/django-rest-social-auth,st4lk/django-rest-social-auth
|
20f6df95d302ea79d11208ada6218a2c99d397e3
|
common.py
|
common.py
|
import json
from base64 import b64encode
# http://stackoverflow.com/a/4256027/212555
def del_none(o):
"""
Delete keys with the value ``None`` in a dictionary, recursively.
This alters the input so you may wish to ``copy`` the dict first.
"""
if isinstance(o, dict):
d = o
else:
d = o.__dict__
for key, value in list(d.items()):
if value is None:
del d[key]
elif isinstance(value, dict):
del_none(value)
return d
def _to_json_dict(o):
if isinstance(o, bytes):
try:
return o.decode("ASCII")
except UnicodeError:
return b64encode(o)
if isinstance(o, set):
return list(o)
return o.__dict__
def to_json(o):
return json.dumps(del_none(o), default=_to_json_dict, indent=4)
|
import json
from base64 import b64encode
# http://stackoverflow.com/a/4256027/212555
def del_none(o):
"""
Delete keys with the value ``None`` in a dictionary, recursively.
This alters the input so you may wish to ``copy`` the dict first.
"""
if isinstance(o, dict):
d = o.copy()
else:
d = o.__dict__.copy()
for key, value in list(d.items()):
if value is None:
del d[key]
elif isinstance(value, dict):
del_none(value)
return d
def _to_json_dict(o):
if isinstance(o, bytes):
try:
return o.decode("ASCII")
except UnicodeError:
return b64encode(o)
if isinstance(o, set):
return list(o)
return o.__dict__
def to_json(o):
return json.dumps(del_none(o), default=_to_json_dict, indent=4)
|
Make a copy of dicts before deleting things from them when printing.
|
Make a copy of dicts before deleting things from them when printing.
|
Python
|
bsd-2-clause
|
brendanlong/mpeg-ts-inspector,brendanlong/mpeg-ts-inspector
|
fd7027ae889d61949998ea02fbb56dbc8e6005a4
|
polling_stations/apps/data_importers/management/commands/import_cheltenham.py
|
polling_stations/apps/data_importers/management/commands/import_cheltenham.py
|
from data_importers.management.commands import BaseHalaroseCsvImporter
class Command(BaseHalaroseCsvImporter):
council_id = "CHT"
addresses_name = (
"2022-05-05/2022-02-25T12:48:35.558843/polling_station_export-2022-02-25.csv"
)
stations_name = (
"2022-05-05/2022-02-25T12:48:35.558843/polling_station_export-2022-02-25.csv"
)
elections = ["2022-05-05"]
def address_record_to_dict(self, record):
if record.housepostcode in [
"GL50 2RF",
"GL52 6RN",
"GL52 2ES",
"GL53 7AJ",
"GL50 3RB",
"GL53 0HL",
"GL50 2DZ",
]:
return None
return super().address_record_to_dict(record)
|
from data_importers.management.commands import BaseHalaroseCsvImporter
class Command(BaseHalaroseCsvImporter):
council_id = "CHT"
addresses_name = (
"2022-05-05/2022-02-25T12:48:35.558843/polling_station_export-2022-02-25.csv"
)
stations_name = (
"2022-05-05/2022-02-25T12:48:35.558843/polling_station_export-2022-02-25.csv"
)
elections = ["2022-05-05"]
def address_record_to_dict(self, record):
if record.housepostcode in [
"GL50 2RF",
"GL52 6RN",
"GL52 2ES",
"GL53 7AJ",
"GL50 3RB",
"GL53 0HL",
"GL50 2DZ",
]:
return None
return super().address_record_to_dict(record)
def station_record_to_dict(self, record):
if record.pollingstationnumber == "191":
record = record._replace(pollingstationaddress_1="")
return super().station_record_to_dict(record)
|
Fix to CHT station name
|
Fix to CHT station name
|
Python
|
bsd-3-clause
|
DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations
|
f2cdd8eb42afb9db5465e062544631684cabd24f
|
wagtail/contrib/wagtailfrontendcache/signal_handlers.py
|
wagtail/contrib/wagtailfrontendcache/signal_handlers.py
|
from django.db import models
from django.db.models.signals import post_delete
from wagtail.wagtailcore.models import Page
from wagtail.wagtailcore.signals import page_published
from wagtail.contrib.wagtailfrontendcache.utils import purge_page_from_cache
def page_published_signal_handler(instance, **kwargs):
purge_page_from_cache(instance)
def post_delete_signal_handler(instance, **kwargs):
purge_page_from_cache(instance)
def register_signal_handlers():
# Get list of models that are page types
indexed_models = [model for model in models.get_models() if issubclass(model, Page)]
# Loop through list and register signal handlers for each one
for model in indexed_models:
page_published.connect(page_published_signal_handler, sender=model)
post_delete.connect(post_delete_signal_handler, sender=model)
|
from django.db import models
from wagtail.wagtailcore.models import Page
from wagtail.wagtailcore.signals import page_published, page_unpublished
from wagtail.contrib.wagtailfrontendcache.utils import purge_page_from_cache
def page_published_signal_handler(instance, **kwargs):
purge_page_from_cache(instance)
def page_unpublished_signal_handler(instance, **kwargs):
purge_page_from_cache(instance)
def register_signal_handlers():
# Get list of models that are page types
indexed_models = [model for model in models.get_models() if issubclass(model, Page)]
# Loop through list and register signal handlers for each one
for model in indexed_models:
page_published.connect(page_published_signal_handler, sender=model)
page_unpublished.connect(page_unpublished_signal_handler, sender=model)
|
Use page_unpublished signal in frontend cache invalidator
|
Use page_unpublished signal in frontend cache invalidator
|
Python
|
bsd-3-clause
|
Pennebaker/wagtail,chrxr/wagtail,jorge-marques/wagtail,jnns/wagtail,kurtw/wagtail,jorge-marques/wagtail,kaedroho/wagtail,WQuanfeng/wagtail,willcodefortea/wagtail,chimeno/wagtail,JoshBarr/wagtail,nimasmi/wagtail,zerolab/wagtail,torchbox/wagtail,kaedroho/wagtail,nutztherookie/wagtail,jorge-marques/wagtail,Klaudit/wagtail,mjec/wagtail,torchbox/wagtail,kurtrwall/wagtail,dresiu/wagtail,jorge-marques/wagtail,chimeno/wagtail,darith27/wagtail,Toshakins/wagtail,taedori81/wagtail,nealtodd/wagtail,iansprice/wagtail,WQuanfeng/wagtail,willcodefortea/wagtail,quru/wagtail,chrxr/wagtail,jnns/wagtail,gasman/wagtail,darith27/wagtail,takeshineshiro/wagtail,jnns/wagtail,iansprice/wagtail,willcodefortea/wagtail,timorieber/wagtail,Klaudit/wagtail,mikedingjan/wagtail,gogobook/wagtail,inonit/wagtail,Klaudit/wagtail,tangentlabs/wagtail,zerolab/wagtail,zerolab/wagtail,JoshBarr/wagtail,FlipperPA/wagtail,KimGlazebrook/wagtail-experiment,thenewguy/wagtail,nrsimha/wagtail,tangentlabs/wagtail,chrxr/wagtail,stevenewey/wagtail,JoshBarr/wagtail,taedori81/wagtail,rsalmaso/wagtail,WQuanfeng/wagtail,serzans/wagtail,Tivix/wagtail,bjesus/wagtail,Toshakins/wagtail,davecranwell/wagtail,gogobook/wagtail,wagtail/wagtail,mephizzle/wagtail,mixxorz/wagtail,rjsproxy/wagtail,wagtail/wagtail,stevenewey/wagtail,zerolab/wagtail,rsalmaso/wagtail,hanpama/wagtail,taedori81/wagtail,Tivix/wagtail,mjec/wagtail,dresiu/wagtail,hamsterbacke23/wagtail,janusnic/wagtail,nealtodd/wagtail,nilnvoid/wagtail,mixxorz/wagtail,Pennebaker/wagtail,marctc/wagtail,serzans/wagtail,nilnvoid/wagtail,Toshakins/wagtail,thenewguy/wagtail,Pennebaker/wagtail,rv816/wagtail,benjaoming/wagtail,m-sanders/wagtail,nutztherookie/wagtail,jorge-marques/wagtail,chimeno/wagtail,FlipperPA/wagtail,marctc/wagtail,nimasmi/wagtail,torchbox/wagtail,mikedingjan/wagtail,m-sanders/wagtail,m-sanders/wagtail,gogobook/wagtail,dresiu/wagtail,nrsimha/wagtail,nilnvoid/wagtail,kurtrwall/wagtail,hanpama/wagtail,kurtrwall/wagtail,iho/wagtail,timorieber/wagtail,rsalmaso/wagtail,inonit/wagtail,chrxr/wagtail,wagtail/wagtail,jordij/wagtail,thenewguy/wagtail,quru/wagtail,takeflight/wagtail,mephizzle/wagtail,kurtw/wagtail,100Shapes/wagtail,benemery/wagtail,stevenewey/wagtail,takeshineshiro/wagtail,chimeno/wagtail,davecranwell/wagtail,takeflight/wagtail,taedori81/wagtail,zerolab/wagtail,gogobook/wagtail,hamsterbacke23/wagtail,wagtail/wagtail,nutztherookie/wagtail,janusnic/wagtail,mjec/wagtail,mephizzle/wagtail,rjsproxy/wagtail,iansprice/wagtail,jordij/wagtail,kaedroho/wagtail,benjaoming/wagtail,nrsimha/wagtail,benjaoming/wagtail,nrsimha/wagtail,darith27/wagtail,taedori81/wagtail,mayapurmedia/wagtail,benemery/wagtail,benjaoming/wagtail,quru/wagtail,serzans/wagtail,thenewguy/wagtail,mikedingjan/wagtail,JoshBarr/wagtail,Toshakins/wagtail,inonit/wagtail,Tivix/wagtail,kaedroho/wagtail,gasman/wagtail,rsalmaso/wagtail,dresiu/wagtail,chimeno/wagtail,inonit/wagtail,mikedingjan/wagtail,mayapurmedia/wagtail,mephizzle/wagtail,KimGlazebrook/wagtail-experiment,iho/wagtail,hamsterbacke23/wagtail,kaedroho/wagtail,nimasmi/wagtail,rjsproxy/wagtail,iho/wagtail,rjsproxy/wagtail,dresiu/wagtail,mayapurmedia/wagtail,torchbox/wagtail,stevenewey/wagtail,bjesus/wagtail,nealtodd/wagtail,rv816/wagtail,darith27/wagtail,hanpama/wagtail,takeshineshiro/wagtail,bjesus/wagtail,jordij/wagtail,KimGlazebrook/wagtail-experiment,nimasmi/wagtail,nutztherookie/wagtail,KimGlazebrook/wagtail-experiment,marctc/wagtail,jordij/wagtail,takeshineshiro/wagtail,Tivix/wagtail,gasman/wagtail,thenewguy/wagtail,rv816/wagtail,benemery/wagtail,davecranwell/wagtail,nilnvoid/wagtail,janusnic/wagtail,janusnic/wagtail,timorieber/wagtail,iansprice/wagtail,kurtw/wagtail,iho/wagtail,takeflight/wagtail,quru/wagtail,takeflight/wagtail,mixxorz/wagtail,marctc/wagtail,mayapurmedia/wagtail,kurtw/wagtail,Pennebaker/wagtail,gasman/wagtail,mixxorz/wagtail,m-sanders/wagtail,serzans/wagtail,mixxorz/wagtail,WQuanfeng/wagtail,rsalmaso/wagtail,timorieber/wagtail,100Shapes/wagtail,gasman/wagtail,100Shapes/wagtail,jnns/wagtail,hamsterbacke23/wagtail,willcodefortea/wagtail,tangentlabs/wagtail,hanpama/wagtail,tangentlabs/wagtail,nealtodd/wagtail,mjec/wagtail,FlipperPA/wagtail,FlipperPA/wagtail,bjesus/wagtail,davecranwell/wagtail,wagtail/wagtail,rv816/wagtail,benemery/wagtail,Klaudit/wagtail,kurtrwall/wagtail
|
add50f0356756469c1ee1e52f13faee7df85f280
|
tests/rest/rest_test_suite.py
|
tests/rest/rest_test_suite.py
|
import sys
import os
sys.path.append(os.path.join(os.path.dirname(__file__), "../"))
from http_test_suite import HTTPTestSuite
from mozdef_util.utilities.dot_dict import DotDict
import mock
from configlib import OptionParser
class RestTestDict(DotDict):
@property
def __dict__(self):
return self
class RestTestSuite(HTTPTestSuite):
def setup(self):
sample_config = RestTestDict()
sample_config.configfile = os.path.join(os.path.dirname(__file__), 'index.conf')
OptionParser.parse_args = mock.Mock(return_value=(sample_config, {}))
from rest import index
self.application = index.application
super(RestTestSuite, self).setup()
|
import sys
import os
sys.path.append(os.path.join(os.path.dirname(__file__), "../"))
from http_test_suite import HTTPTestSuite
from mozdef_util.utilities.dot_dict import DotDict
import mock
from configlib import OptionParser
import importlib
class RestTestDict(DotDict):
@property
def __dict__(self):
return self
class RestTestSuite(HTTPTestSuite):
def setup(self):
sample_config = RestTestDict()
sample_config.configfile = os.path.join(os.path.dirname(__file__), 'index.conf')
OptionParser.parse_args = mock.Mock(return_value=(sample_config, {}))
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "../../rest"))
import plugins
importlib.reload(plugins)
from rest import index
self.application = index.application
super(RestTestSuite, self).setup()
|
Fix import path for rest plugins
|
Fix import path for rest plugins
|
Python
|
mpl-2.0
|
mozilla/MozDef,mozilla/MozDef,mpurzynski/MozDef,mpurzynski/MozDef,mozilla/MozDef,mpurzynski/MozDef,jeffbryner/MozDef,jeffbryner/MozDef,mozilla/MozDef,mpurzynski/MozDef,jeffbryner/MozDef,jeffbryner/MozDef
|
5b516cd3e6363c4c995022c358fabeb0cc543115
|
tests/test_route_requester.py
|
tests/test_route_requester.py
|
import unittest
from pydirections.route_requester import DirectionsRequest
from pydirections.exceptions import InvalidModeError, InvalidAPIKeyError, InvalidAlternativeError
class TestOptionalParameters(unittest.TestCase):
requester = DirectionsRequest(origin="San Francisco, CA", destination="Palo Alto, CA")
def test_invalid_mode(self):
"""
Tests the is_valid_mode function for an invalid input
"""
with self.assertRaises(InvalidModeError):
requester.set_mode("flying")
def test_invalid_alternative(self):
"""
Tests for error handling when an invalid value is provided to
the set_alternative function
"""
with self.assertRaises(InvalidAlternativeError):
requester.set_alternatives('False')
class TestAPIKey(unittest.TestCase):
requester = DirectionsRequest(origin="San Francisco, CA", destination="Palo Alto, CA")
def test_invalid_api_key(self):
invalid_key = 123456
with self.assertRaises(InvalidAPIKeyError):
requester.set_api_key(invalid_key)
if __name__ == '__main__':
unittest.main()
|
import unittest
from pydirections.route_requester import DirectionsRequest
from pydirections.exceptions import InvalidModeError, InvalidAPIKeyError, InvalidAlternativeError
requester = DirectionsRequest(origin="San Francisco, CA", destination="Palo Alto, CA")
class TestOptionalParameters(unittest.TestCase):
def test_invalid_mode(self):
"""
Tests the is_valid_mode function for an invalid input
"""
with self.assertRaises(InvalidModeError):
requester.set_mode("flying")
def test_invalid_alternative(self):
"""
Tests for error handling when an invalid value is provided to
the set_alternative function
"""
with self.assertRaises(InvalidAlternativeError):
requester.set_alternatives('False')
class TestAPIKey(unittest.TestCase):
def test_invalid_api_key(self):
invalid_key = 123456
with self.assertRaises(InvalidAPIKeyError):
requester.set_api_key(invalid_key)
if __name__ == '__main__':
unittest.main()
|
Fix bug in unit tests
|
Fix bug in unit tests
|
Python
|
apache-2.0
|
apranav19/pydirections
|
9b720026722ce92a8c0e05aa041d6e861c5e4e82
|
changes/api/jobstep_deallocate.py
|
changes/api/jobstep_deallocate.py
|
from __future__ import absolute_import, division, unicode_literals
from changes.api.base import APIView
from changes.constants import Status
from changes.config import db
from changes.jobs.sync_job_step import sync_job_step
from changes.models import JobStep
class JobStepDeallocateAPIView(APIView):
def post(self, step_id):
to_deallocate = JobStep.query.get(step_id)
if to_deallocate is None:
return '', 404
if to_deallocate.status != Status.allocated:
return {
"error": "Only {0} job steps may be deallocated.",
"actual_status": to_deallocate.status.name
}, 400
to_deallocate.status = Status.pending_allocation
to_deallocate.date_started = None
to_deallocate.date_finished = None
db.session.add(to_deallocate)
db.session.commit()
sync_job_step.delay(
step_id=to_deallocate.id.hex,
task_id=to_deallocate.id.hex,
parent_task_id=to_deallocate.job_id.hex,
)
return self.respond(to_deallocate)
|
from __future__ import absolute_import, division, unicode_literals
from changes.api.base import APIView
from changes.constants import Status
from changes.config import db
from changes.jobs.sync_job_step import sync_job_step
from changes.models import JobStep
class JobStepDeallocateAPIView(APIView):
def post(self, step_id):
to_deallocate = JobStep.query.get(step_id)
if to_deallocate is None:
return '', 404
if to_deallocate.status not in (Status.in_progress, Status.allocated):
return {
"error": "Only allocated and running job steps may be deallocated.",
"actual_status": to_deallocate.status.name
}, 400
to_deallocate.status = Status.pending_allocation
to_deallocate.date_started = None
to_deallocate.date_finished = None
db.session.add(to_deallocate)
db.session.commit()
sync_job_step.delay(
step_id=to_deallocate.id.hex,
task_id=to_deallocate.id.hex,
parent_task_id=to_deallocate.job_id.hex,
)
return self.respond(to_deallocate)
|
Allow running jobsteps to be deallocated
|
Allow running jobsteps to be deallocated
|
Python
|
apache-2.0
|
dropbox/changes,bowlofstew/changes,dropbox/changes,bowlofstew/changes,bowlofstew/changes,dropbox/changes,wfxiang08/changes,wfxiang08/changes,dropbox/changes,bowlofstew/changes,wfxiang08/changes,wfxiang08/changes
|
64cd71fd171cd1b76b111aedc94423006176f811
|
src/tldt/cli.py
|
src/tldt/cli.py
|
import argparse
import tldt
def main():
parser = argparse.ArgumentParser(description="cacat")
parser.add_argument("head_repo")
parser.add_argument("head_sha")
parser.add_argument("base_repo")
parser.add_argument("base_sha")
args = parser.parse_args()
tldt.main(head_repo=args.head_repo,
head_sha=args.head_sha,
base_repo=args.base_repo,
base_sha=args.base_sha)
if __name__ == '__main__':
main()
|
import argparse
import os.path
import tldt
def main():
user_home = os.path.expanduser("~")
parser = argparse.ArgumentParser(description="cacat")
parser.add_argument("head_repo")
parser.add_argument("head_sha")
parser.add_argument("base_repo")
parser.add_argument("base_sha")
parser.add_argument("--configuration", default=os.path.join(user_home, "tldt.ini"))
args = parser.parse_args()
tldt.main(head_repo=args.head_repo,
head_sha=args.head_sha,
base_repo=args.base_repo,
base_sha=args.base_sha,
configuration_path=args.configuration)
if __name__ == '__main__':
main()
|
Add configuration file option with default to ~/tldt.ini
|
Add configuration file option with default to ~/tldt.ini
|
Python
|
unlicense
|
rciorba/tldt,rciorba/tldt
|
45e04697303eb85330bd61f1b386e483fc42f49b
|
src/oscar/templatetags/currency_filters.py
|
src/oscar/templatetags/currency_filters.py
|
from decimal import Decimal as D
from decimal import InvalidOperation
from babel.numbers import format_currency
from django import template
from django.conf import settings
from django.utils.translation import get_language, to_locale
register = template.Library()
@register.filter(name='currency')
def currency(value, currency=None):
"""
Format decimal value as currency
"""
try:
value = D(value)
except (TypeError, InvalidOperation):
return ""
# Using Babel's currency formatting
# http://babel.pocoo.org/en/latest/api/numbers.html#babel.numbers.format_currency
OSCAR_CURRENCY_FORMAT = getattr(settings, 'OSCAR_CURRENCY_FORMAT', None)
kwargs = {
'currency': currency or settings.OSCAR_DEFAULT_CURRENCY,
'locale': to_locale(get_language() or settings.LANGUAGE_CODE)
}
if isinstance(OSCAR_CURRENCY_FORMAT, dict):
kwargs.update(OSCAR_CURRENCY_FORMAT.get(currency, {}))
else:
kwargs['format'] = OSCAR_CURRENCY_FORMAT
return format_currency(value, **kwargs)
|
from decimal import Decimal as D
from decimal import InvalidOperation
from babel.numbers import format_currency
from django import template
from django.conf import settings
from django.utils.translation import get_language, to_locale
register = template.Library()
@register.filter(name='currency')
def currency(value, currency=None):
"""
Format decimal value as currency
"""
if currency is None:
currency = settings.OSCAR_DEFAULT_CURRENCY
try:
value = D(value)
except (TypeError, InvalidOperation):
return ""
# Using Babel's currency formatting
# http://babel.pocoo.org/en/latest/api/numbers.html#babel.numbers.format_currency
OSCAR_CURRENCY_FORMAT = getattr(settings, 'OSCAR_CURRENCY_FORMAT', None)
kwargs = {
'currency': currency,
'locale': to_locale(get_language() or settings.LANGUAGE_CODE)
}
if isinstance(OSCAR_CURRENCY_FORMAT, dict):
kwargs.update(OSCAR_CURRENCY_FORMAT.get(currency, {}))
else:
kwargs['format'] = OSCAR_CURRENCY_FORMAT
return format_currency(value, **kwargs)
|
Fix for missing default in currency filter
|
Fix for missing default in currency filter
|
Python
|
bsd-3-clause
|
solarissmoke/django-oscar,solarissmoke/django-oscar,django-oscar/django-oscar,solarissmoke/django-oscar,solarissmoke/django-oscar,django-oscar/django-oscar,django-oscar/django-oscar,django-oscar/django-oscar
|
6cf42d661facf1c11de545959b91c073709eac8e
|
webapp_tests.py
|
webapp_tests.py
|
#!/usr/bin/env python
import os
import webapp
import unittest
class WebappTestExtractingServiceDomainsFromLinks(unittest.TestCase):
#def setUp(self):
# self.app = webapp.app.test_client()
def test_extract_service_domain_from_link(self):
status, domain = webapp.extract_service_domain_from_link('https://foo.service.gov.uk/blah')
assert True == status
assert "foo.service.gov.uk" == domain
def test_extract_nonservice_domain_from_link(self):
status, domain = webapp.extract_service_domain_from_link('https://foo.foo.gov.uk/blah')
assert False == status
if __name__ == '__main__':
unittest.main()
|
#!/usr/bin/env python
import os
import webapp
import unittest
class WebappTestExtractingServiceDomainsFromLinks(unittest.TestCase):
#def setUp(self):
# self.app = webapp.app.test_client()
def test_extract_service_domain_from_link(self):
status, domain = webapp.extract_service_domain_from_link('https://foo.service.gov.uk/blah')
assert True == status
assert "foo.service.gov.uk" == domain
def test_extract_nonservice_domain_from_link(self):
status, domain = webapp.extract_service_domain_from_link('https://foo.foo.gov.uk/blah')
assert False == status
class WebappTestExtractingServiceLinkFromSlug(unittest.TestCase):
def test_find_link_from_slug(self):
status, link = webapp.find_link_from_slug('/lasting-power-of-attorney')
assert True == status
assert "https://lastingpowerofattorney.service.gov.uk/" == link
def test_fail_to_find_link_from_slug(self):
status, link = webapp.find_link_from_slug('/bank-holidays')
assert False == status
if __name__ == '__main__':
unittest.main()
|
Add a test for extracting service domain from a link
|
Add a test for extracting service domain from a link
|
Python
|
mit
|
alphagov/service-domain-checker
|
2f6dc1d43bd402152c7807e905cc808899a640d2
|
mail/views.py
|
mail/views.py
|
import logging
from django.core.mail import EmailMessage
from django.http import JsonResponse
from django.middleware import csrf
from django.views.decorators.csrf import csrf_exempt
from django.shortcuts import redirect
from rest_framework.decorators import api_view
@csrf_exempt
@api_view(['POST', 'GET'])
def send_contact_message(request):
if request.method == 'POST':
from_name = request.POST.get("from_name", "")
from_address = request.POST.get("from_address", "")
from_string = '{} <{}>'.format(from_name, from_address)
subject = request.POST.get("subject", "")
message_body = request.POST.get("message_body", "")
# Add subject: to_address to this dict to add a new email address.
# Subject will map to the email being sent to to prevent misuse of our email server.
emails = {
'Bulk Order': 'rej2@rice.edu',
}
try:
to_address = emails[subject].split(',')
email = EmailMessage(subject,
message_body,
'noreply@openstax.org',
to_address,
reply_to=[from_string])
email.send()
except KeyError:
logging.error("EMAIL FAILED TO SEND: subject:{}")
return redirect('/confirmation?contact')
# if this is not posting a message, let's send the csfr token back
else:
csrf_token = csrf.get_token(request)
data = {'csrf_token': csrf_token}
return JsonResponse(data)
|
import logging
from django.core.mail import EmailMessage
from django.http import JsonResponse
from django.middleware import csrf
from django.views.decorators.csrf import csrf_exempt
from django.shortcuts import redirect
from rest_framework.decorators import api_view
@csrf_exempt
@api_view(['POST', 'GET'])
def send_contact_message(request):
if request.method == 'POST':
from_name = request.POST.get("from_name", "")
from_address = request.POST.get("from_address", "")
from_string = '{} <{}>'.format(from_name, from_address)
subject = request.POST.get("subject", "")
message_body = request.POST.get("message_body", "")
# Add subject: to_address to this dict to add a new email address.
# Subject will map to the email being sent to to prevent misuse of our email server.
emails = {
'Bulk Order': 'Tory.Watterson@rice.edu',
}
try:
to_address = emails[subject].split(',')
email = EmailMessage(subject,
message_body,
'noreply@openstax.org',
to_address,
reply_to=[from_string])
email.send()
except KeyError:
logging.error("EMAIL FAILED TO SEND: subject:{}")
return redirect('/confirmation?contact')
# if this is not posting a message, let's send the csfr token back
else:
csrf_token = csrf.get_token(request)
data = {'csrf_token': csrf_token}
return JsonResponse(data)
|
Change bulk order email address to Tory
|
Change bulk order email address to Tory
|
Python
|
agpl-3.0
|
openstax/openstax-cms,openstax/openstax-cms,Connexions/openstax-cms,openstax/openstax-cms,Connexions/openstax-cms,openstax/openstax-cms
|
c12a1b53166c34c074e018fcc149a0aa2db56b43
|
helpscout/models/folder.py
|
helpscout/models/folder.py
|
# -*- coding: utf-8 -*-
# Copyright 2017-TODAY LasLabs Inc.
# License MIT (https://opensource.org/licenses/MIT).
import properties
from .. import BaseModel
class Folder(BaseModel):
name = properties.String(
'Folder name',
required=True,
)
type = properties.StringChoice(
'The type of folder.',
choices=['needsattention',
'drafts',
'assigned',
'open',
'closed',
'spam',
'mine',
],
default='drafts',
required=True,
)
user_id = properties.Integer(
'If the folder type is ``MyTickets``, this represents the Help Scout '
'user to which this folder belongs. Otherwise it is empty.',
)
total_count = properties.Integer(
'Total number of conversations in this folder.',
)
active_count = properties.Integer(
'Total number of conversations in this folder that are in an active '
'state (vs pending).',
)
modified_at = properties.DateTime(
'UTC time when this folder was modified.',
)
|
# -*- coding: utf-8 -*-
# Copyright 2017-TODAY LasLabs Inc.
# License MIT (https://opensource.org/licenses/MIT).
import properties
from .. import BaseModel
class Folder(BaseModel):
name = properties.String(
'Folder name',
required=True,
)
type = properties.StringChoice(
'The type of folder.',
choices=['needsattention',
'drafts',
'assigned',
'open',
'closed',
'spam',
'mine',
'team',
],
default='drafts',
required=True,
)
user_id = properties.Integer(
'If the folder type is ``MyTickets``, this represents the Help Scout '
'user to which this folder belongs. Otherwise it is empty.',
)
total_count = properties.Integer(
'Total number of conversations in this folder.',
)
active_count = properties.Integer(
'Total number of conversations in this folder that are in an active '
'state (vs pending).',
)
modified_at = properties.DateTime(
'UTC time when this folder was modified.',
)
|
Add 'team' to Folder type options
|
[ADD] Add 'team' to Folder type options
|
Python
|
mit
|
LasLabs/python-helpscout
|
bccfc6d3c0035e2a5668607ebb7dd6047ee1942f
|
kokki/cookbooks/mdadm/recipes/default.py
|
kokki/cookbooks/mdadm/recipes/default.py
|
from kokki import *
if env.config.mdadm.arrays:
Package("mdadm")
Execute("mdadm-update-conf",
action = "nothing",
command = ("("
"echo DEVICE partitions > /etc/mdadm/mdadm.conf"
"; mdadm --detail --scan >> /etc/mdadm/mdadm.conf"
")"
))
for array in env.config.mdadm.arrays:
env.cookbooks.mdadm.Array(**array)
if array.get('fstype'):
if array['fstype'] == "xfs":
Package("xfsprogs")
Execute("mkfs.%(fstype)s -f %(device)s" % dict(fstype=array['fstype'], device=array['name']),
not_if = """if [ "`file -s %(device)s`" = "%(device)s: data" ]; then exit 1; fi""" % dict(device=array['name']))
if array.get('mount_point'):
Mount(array['mount_point'],
device = array['name'],
fstype = array['fstype'],
options = array['fsoptions'] if array.get('fsoptions') is not None else ["noatime"],
action = ["mount", "enable"])
|
from kokki import *
if env.config.mdadm.arrays:
Package("mdadm")
Execute("mdadm-update-conf",
action = "nothing",
command = ("("
"echo DEVICE partitions > /etc/mdadm/mdadm.conf"
"; mdadm --detail --scan >> /etc/mdadm/mdadm.conf"
")"
))
for array in env.config.mdadm.arrays:
fstype = array.pop('fstype', None)
fsoptions = array.pop('fsoptions', None)
mount_point = array.pop('mount_point', None)
env.cookbooks.mdadm.Array(**array)
if fstype:
if fstype == "xfs":
Package("xfsprogs")
Execute("mkfs.%(fstype)s -f %(device)s" % dict(fstype=fstype, device=array['name']),
not_if = """if [ "`file -s %(device)s`" = "%(device)s: data" ]; then exit 1; fi""" % dict(device=array['name']))
if mount_point:
Mount(mount_point,
device = array['name'],
fstype = fstype,
options = fsoptions if fsoptions is not None else ["noatime"],
action = ["mount", "enable"])
|
Fix to mounting mdadm raid arrays
|
Fix to mounting mdadm raid arrays
|
Python
|
bsd-3-clause
|
samuel/kokki
|
4f2e23fe260e5f061d7d57821908492f14a2c56a
|
withtool/subprocess.py
|
withtool/subprocess.py
|
import subprocess
def run(command):
try:
subprocess.check_call(command, shell=True)
except:
pass
|
import subprocess
def run(command):
try:
subprocess.check_call(command, shell=True)
except Exception:
pass
|
Set expected exception class in "except" block
|
Set expected exception class in "except" block
Fix issue E722 of flake8
|
Python
|
mit
|
renanivo/with
|
ff308a17c79fe2c27dcb2a1f888ee1332f6fdc11
|
events.py
|
events.py
|
# encoding: utf-8
import Natural.util as util
import sublime, sublime_plugin
class PerformEventListener(sublime_plugin.EventListener):
"""Suggest subroutine completions for the perform statement."""
def on_query_completions(self, view, prefix, points):
if not util.is_natural_file(view):
return None
texts = util.text_preceding_points(view, points)
if all([text.strip().endswith('perform') for text in texts]):
subroutines = util.find_text_by_selector(view,
'entity.name.function.natural')
if not subroutines:
return None
subroutines.sort()
completions = [[sub, sub] for sub in subroutines]
return (completions, sublime.INHIBIT_WORD_COMPLETIONS)
|
# encoding: utf-8
import Natural.util as util
import sublime, sublime_plugin
class PerformEventListener(sublime_plugin.EventListener):
"""Suggest subroutine completions for the perform statement."""
def on_query_completions(self, view, prefix, points):
if not util.is_natural_file(view):
return None
texts = util.text_preceding_points(view, points)
if all([text.strip().endswith('perform') for text in texts]):
subroutines = util.find_text_by_selector(view,
'entity.name.function.natural')
if not subroutines:
return None
subroutines.sort()
completions = [[sub, sub] for sub in subroutines]
return (completions, sublime.INHIBIT_WORD_COMPLETIONS)
class AddRulerToColumn72Listener(sublime_plugin.EventListener):
"""Add a ruler to column 72 when a Natural file is opened. If the user has
other rulers, they're not messed with."""
def on_load(self, view):
if not util.is_natural_file(view):
return
rulers = view.settings().get('rulers')
if 72 not in rulers:
rulers.append(72)
rulers.sort()
view.settings().set('rulers', rulers)
|
Add a ruler to column 72
|
Add a ruler to column 72
|
Python
|
mit
|
andref/Unnatural-Sublime-Package
|
5a2f848badcdf9bf968e23cfb55f53eb023d18a4
|
tests/helper.py
|
tests/helper.py
|
import unittest
import os
import yaml
from functools import wraps
from cmd import init_db, seed_db
from models import db
from scuevals_api import create_app
class TestCase(unittest.TestCase):
def setUp(self):
app = create_app()
app.config['SQLALCHEMY_DATABASE_URI'] = os.environ['TEST_DATABASE_URL']
app.testing = True
self.appx = app
self.app = app.test_client()
with app.app_context():
init_db(app, db)
seed_db(db)
def tearDown(self):
with self.appx.app_context():
db.session.remove()
db.drop_all()
def use_data(file):
def use_data_decorator(f):
@wraps(f)
def wrapper(*args):
with open(os.path.join('fixtures/data', file), 'r') as stream:
data = yaml.load(stream)
args = args + (data, )
return f(*args)
return wrapper
return use_data_decorator
|
import unittest
import os
import yaml
from functools import wraps
from flask_jwt_simple import create_jwt
from cmd import init_db, seed_db
from models import db, Student
from scuevals_api import create_app
class TestCase(unittest.TestCase):
def setUp(self):
app = create_app()
app.config['SQLALCHEMY_DATABASE_URI'] = os.environ['TEST_DATABASE_URL']
app.testing = True
self.appx = app
self.app = app.test_client()
student = Student(
id=0,
email='jdoe@scu.edu',
first_name='John',
last_name='Doe',
university_id=1
)
ident = {
'id': student.id,
'email': student.email,
'first_name': student.first_name,
'last_name': student.last_name
}
with app.app_context():
db.drop_all()
init_db(app, db)
seed_db(db)
db.session.add(student)
db.session.commit()
self.jwt = create_jwt(identity=ident)
def tearDown(self):
with self.appx.app_context():
db.session.remove()
db.drop_all()
def use_data(file):
def use_data_decorator(f):
@wraps(f)
def wrapper(*args):
with open(os.path.join('fixtures/data', file), 'r') as stream:
data = yaml.load(stream)
args = args + (data, )
return f(*args)
return wrapper
return use_data_decorator
|
Add authentication to base TestCase
|
Add authentication to base TestCase
|
Python
|
agpl-3.0
|
SCUEvals/scuevals-api,SCUEvals/scuevals-api
|
9ed0848a869fc3a4e16890af609259d18b622056
|
ideascaly/utils.py
|
ideascaly/utils.py
|
# IdeaScaly
# Copyright 2015 Jorge Saldivar
# See LICENSE for details.
import six
import dateutil.parser
from datetime import datetime
def parse_datetime(str_date):
date_is = dateutil.parser.parse(str_date)
return date_is
def parse_html_value(html):
return html[html.find('>')+1:html.rfind('<')]
def parse_a_href(atag):
start = atag.find('"') + 1
end = atag.find('"', start)
return atag[start:end]
def convert_to_utf8_str(arg):
# written by Michael Norton (http://docondev.blogspot.com/)
if isinstance(arg, six.text_type):
arg = arg.encode('utf-8')
elif not isinstance(arg, bytes):
arg = six.text_type(arg).encode('utf-8')
return arg
def import_simplejson():
try:
import simplejson as json
except ImportError:
try:
import json # Python 2.6+
except ImportError:
raise ImportError("Can't load a json library")
return json
|
# IdeaScaly
# Copyright 2015 Jorge Saldivar
# See LICENSE for details.
import six
import dateutil.parser
from datetime import datetime
def parse_datetime(str_date):
try:
date_is = dateutil.parser.parse(str_date)
return date_is
except:
print("Invalid date: %s" % str_date)
return None
def parse_html_value(html):
return html[html.find('>')+1:html.rfind('<')]
def parse_a_href(atag):
start = atag.find('"') + 1
end = atag.find('"', start)
return atag[start:end]
def convert_to_utf8_str(arg):
# written by Michael Norton (http://docondev.blogspot.com/)
if isinstance(arg, six.text_type):
arg = arg.encode('utf-8')
elif not isinstance(arg, bytes):
arg = six.text_type(arg).encode('utf-8')
return arg
def import_simplejson():
try:
import simplejson as json
except ImportError:
try:
import json # Python 2.6+
except ImportError:
raise ImportError("Can't load a json library")
return json
|
Add try except to the method that parse idea datetime
|
Add try except to the method that parse idea datetime
|
Python
|
mit
|
joausaga/ideascaly
|
811421407379dedc217795000f6f2cbe54510f96
|
kolibri/core/utils/urls.py
|
kolibri/core/utils/urls.py
|
from django.urls import reverse
from six.moves.urllib.parse import urljoin
from kolibri.utils.conf import OPTIONS
def reverse_remote(
baseurl, viewname, urlconf=None, args=None, kwargs=None, current_app=None
):
# Get the reversed URL
reversed_url = reverse(
viewname, urlconf=urlconf, args=args, kwargs=kwargs, current_app=current_app
)
# Remove any configured URL prefix from the URL that is specific to this deployment
reversed_url = reversed_url.replace(OPTIONS["Deployment"]["URL_PATH_PREFIX"], "")
# Join the URL to baseurl, but remove any leading "/" to ensure that if there is a path prefix on baseurl
# it doesn't get ignored by the urljoin (which it would if the reversed_url had a leading '/',
# as it would be read as an absolute path)
return urljoin(baseurl, reversed_url.lstrip("/"))
|
from django.urls import reverse
from six.moves.urllib.parse import urljoin
from kolibri.utils.conf import OPTIONS
def reverse_remote(
baseurl, viewname, urlconf=None, args=None, kwargs=None, current_app=None
):
# Get the reversed URL
reversed_url = reverse(
viewname, urlconf=urlconf, args=args, kwargs=kwargs, current_app=current_app
)
# Remove any configured URL prefix from the URL that is specific to this deployment
prefix_length = len(OPTIONS["Deployment"]["URL_PATH_PREFIX"])
reversed_url = reversed_url[prefix_length:]
# Join the URL to baseurl, but remove any leading "/" to ensure that if there is a path prefix on baseurl
# it doesn't get ignored by the urljoin (which it would if the reversed_url had a leading '/',
# as it would be read as an absolute path)
return urljoin(baseurl, reversed_url.lstrip("/"))
|
Truncate rather than replace to prevent erroneous substitutions.
|
Truncate rather than replace to prevent erroneous substitutions.
|
Python
|
mit
|
learningequality/kolibri,learningequality/kolibri,learningequality/kolibri,learningequality/kolibri
|
fe873844448d4123520e9bd6afe3231d4c952850
|
job_runner/wsgi.py
|
job_runner/wsgi.py
|
"""
WSGI config for job_runner project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "job_runner.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
"""
WSGI config for job_runner project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
os.environ.setdefault(
"DJANGO_SETTINGS_MODULE", "job_runner.settings.env.development")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
Update default settings to development.
|
Update default settings to development.
|
Python
|
bsd-3-clause
|
spilgames/job-runner,spilgames/job-runner
|
741133b4fa502fc585c771abef96b6213d3f5214
|
pyheufybot/modules/nickservidentify.py
|
pyheufybot/modules/nickservidentify.py
|
from module_interface import Module, ModuleType
from message import IRCResponse, ResponseType
from pyheufybot import globalvars
class NickServIdentify(Module):
def __init__(self):
self.moduleType = ModuleType.PASSIVE
self.messageTypes = ["USER"]
self.helpText = "Attempts to log into NickServ with the password in the config"
def execute(self, message, serverInfo):
config = globalvars.botHandler.factories[serverInfo.name].config
passwordType = config.getSettingWithDefault("passwordType", None)
password = config.getSettingWithDefault("password", "")
if passwordType == "NickServ":
return [ IRCResponse("NickServ", password, responseType.MESSAGE) ]
else:
return []
|
from pyheufybot.module_interface import Module, ModuleType
from pyheufybot.message import IRCResponse, ResponseType
from pyheufybot import globalvars
class NickServIdentify(Module):
def __init__(self):
self.moduleType = ModuleType.PASSIVE
self.messageTypes = ["USER"]
self.helpText = "Attempts to log into NickServ with the password in the config"
def execute(self, message, serverInfo):
config = globalvars.botHandler.factories[serverInfo.name].config
passwordType = config.getSettingWithDefault("passwordType", None)
password = config.getSettingWithDefault("password", "")
if passwordType == "NickServ":
return [ IRCResponse("NickServ", "IDENTIFY " + password, responseType.MESSAGE) ]
else:
return []
|
Fix the syntax for NickServ logins
|
Fix the syntax for NickServ logins
|
Python
|
mit
|
Heufneutje/PyHeufyBot,Heufneutje/PyHeufyBot
|
34bd55b33e865c65386f934c7ac0b89f3cc76485
|
edgedb/lang/common/shell/reqs.py
|
edgedb/lang/common/shell/reqs.py
|
##
# Copyright (c) 2010 Sprymix Inc.
# All rights reserved.
#
# See LICENSE for details.
##
from metamagic import app
from metamagic.exceptions import MetamagicError
class UnsatisfiedRequirementError(MetamagicError):
pass
class CommandRequirement:
pass
class ValidApplication(CommandRequirement):
def __init__(self, args):
if not app.Application.active:
raise UnsatisfiedRequirementError('need active Application')
|
##
# Copyright (c) 2010 Sprymix Inc.
# All rights reserved.
#
# See LICENSE for details.
##
from metamagic.exceptions import MetamagicError
class UnsatisfiedRequirementError(MetamagicError):
pass
class CommandRequirement:
pass
|
Drop 'metamagic.app' package. Long live Node.
|
app: Drop 'metamagic.app' package. Long live Node.
|
Python
|
apache-2.0
|
edgedb/edgedb,edgedb/edgedb,edgedb/edgedb
|
63a7b11d3ae51a944bf2e70637dea503e455c2f5
|
fontdump/cli.py
|
fontdump/cli.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-#
from collections import OrderedDict
import requests
import cssutils
USER_AGENTS = OrderedDict()
USER_AGENTS['woff'] = 'Mozilla/5.0 AppleWebKit/537.36 Chrome/30', # Chrome
USER_AGENTS['ttf'] = 'Mozilla/5.0 (Linux; U; Android 2.1-update1;)', #Andord 2
USER_AGENTS['eot'] = 'Mozilla/4.0 (compatible; MSIE 6.0;)', # IE6
USER_AGENTS['woff'] = 'Mozilla/4.0 (iPad; CPU OS 4_0_1 ) AppleWebKit', #iOS<4.2
def main():
font_url = 'http://fonts.googleapis.com/css?family=Open+Sans:300,400,700,800|Dosis:300,400'
sheets={}
for (format, ua) in USER_AGENTS.items():
headers = {
'User-Agent': ua,
}
r =requests.get(font_url, headers=headers)
sheets[format] = cssutils.parseString(r.content)
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-#
import requests
import cssutils
USER_AGENTS = {
'woff': 'Mozilla/5.0 AppleWebKit/537.36 Chrome/30', # Chrome
'eot': 'Mozilla/4.0 (compatible; MSIE 6.0;)', # IE6
'ttf': 'Mozilla/5.0 (Linux; U; Android 2.1-update1;)', #Andord 2
'svg': 'Mozilla/4.0 (iPad; CPU OS 4_0_1 ) AppleWebKit', #iOS<4.2
}
def main():
font_url = 'http://fonts.googleapis.com/css?family=Open+Sans:300,400,700,800|Dosis:300,400'
sheets={}
for (format, ua) in USER_AGENTS.items():
headers = {
'User-Agent': ua,
}
r =requests.get(font_url, headers=headers)
sheets[format] = cssutils.parseString(r.content)
if __name__ == '__main__':
main()
|
Revert "The order of the formats matters. Use OrderedDict instead of dict"
|
Revert "The order of the formats matters. Use OrderedDict instead of dict"
I can't rely on the order of dict. The control flow is more complex.
This reverts commit 3389ed71971ddacd185bbbf8fe667a8651108c70.
|
Python
|
mit
|
glasslion/fontdump
|
ff9d613897774f3125f2b28905528962b1761deb
|
core/timeline.py
|
core/timeline.py
|
from collections import Counter
from matplotlib import pyplot as plt
import datetime
def create_timeline( data ):
if len(data) == 0:
print "Dataset empty."
return
dates = map( lambda d: d['date'], data )
timeline_data = Counter( dates )
x_axis = sorted( timeline_data )
y_axis = []
for date in x_axis:
y_axis.append( timeline_data[date] )
plt.plot_date( x = x_axis, y = y_axis, fmt = "r-" )
ymin, ymax = plt.ylim()
plt.ylim( 0, ymax + 1 )
|
from collections import Counter
from matplotlib import pyplot as plt
import datetime
import data_loader
def create_timeline( data ):
if len(data) == 0:
print "Dataset empty."
return
dates = map( lambda d: d['date'], data )
timeline_data = Counter( dates )
x_axis = sorted( timeline_data )
y_axis = []
for date in x_axis:
y_axis.append( timeline_data[date] )
plt.plot_date( x = x_axis, y = y_axis, fmt = "r-" )
ymin, ymax = plt.ylim()
plt.ylim( 0, ymax + 1 )
if __name__ == '__main__':
data = data_loader.load_facebook()
create_timeline(data)
plt.show()
|
Add standalone method for using from command line
|
Add standalone method for using from command line
|
Python
|
mit
|
HIIT/hybra-core,HIIT/hybra-core,HIIT/hybra-core,HIIT/hybra-core,HIIT/hybra-core
|
62f9608d50898d0a82e013d54454ed1edb004cff
|
fab_deploy/joyent/setup.py
|
fab_deploy/joyent/setup.py
|
from fabric.api import run, sudo
from fabric.contrib.files import append
from fab_deploy.base import setup as base_setup
class JoyentMixin(object):
def _set_profile(self):
append('/etc/profile', 'CC="gcc -m64"; export CC', use_sudo=True)
append('/etc/profile', 'LDSHARED="gcc -m64 -G"; export LDSHARED', use_sudo=True)
def _ssh_restart(self):
run('svcadm restart ssh')
class AppMixin(JoyentMixin):
packages = ['python27', 'py27-psycopg2', 'py27-setuptools',
'py27-imaging', 'py27-expat']
def _install_packages(self):
for package in self.packages:
sudo('pkg_add %s' % package)
sudo('easy_install-2.7 pip')
self._install_venv()
class LBSetup(JoyentMixin, base_setup.LBSetup):
pass
class AppSetup(AppMixin, base_setup.AppSetup):
pass
class DBSetup(JoyentMixin, base_setup.DBSetup):
pass
class SlaveSetup(JoyentMixin, base_setup.SlaveSetup):
pass
class DevSetup(AppMixin, base_setup.DevSetup):
pass
app_server = AppSetup()
lb_server = LBSetup()
dev_server = DevSetup()
db_server = DBSetup()
slave_db = SlaveSetup()
|
from fabric.api import run, sudo
from fabric.contrib.files import append
from fab_deploy.base import setup as base_setup
class JoyentMixin(object):
def _set_profile(self):
append('/etc/profile', 'CC="gcc -m64"; export CC', use_sudo=True)
append('/etc/profile', 'LDSHARED="gcc -m64 -G"; export LDSHARED', use_sudo=True)
def _ssh_restart(self):
run('svcadm restart ssh')
class AppMixin(JoyentMixin):
packages = ['python27', 'py27-psycopg2', 'py27-setuptools',
'py27-imaging', 'py27-expat']
def _set_profile(self):
JoyentMixin._set_profile(self)
base_setup.AppSetup._set_profile(self)
def _install_packages(self):
for package in self.packages:
sudo('pkg_add %s' % package)
sudo('easy_install-2.7 pip')
self._install_venv()
class LBSetup(JoyentMixin, base_setup.LBSetup):
pass
class AppSetup(AppMixin, base_setup.AppSetup):
pass
class DBSetup(JoyentMixin, base_setup.DBSetup):
pass
class SlaveSetup(JoyentMixin, base_setup.SlaveSetup):
pass
class DevSetup(AppMixin, base_setup.DevSetup):
pass
app_server = AppSetup()
lb_server = LBSetup()
dev_server = DevSetup()
db_server = DBSetup()
slave_db = SlaveSetup()
|
Add environ vars for joyent
|
Add environ vars for joyent
|
Python
|
mit
|
ff0000/red-fab-deploy2,ff0000/red-fab-deploy2,ff0000/red-fab-deploy2
|
fc3cf6966f66f0929c48b1f24bede295fb3aec35
|
Wahji-dev/setup/removeWahji.py
|
Wahji-dev/setup/removeWahji.py
|
#deletes wahji content
import os, shutil, platform
def rem(loc):
os.chdir(loc)
print "deleting content"
"""delete them folder and its contents"""
shutil.rmtree("themes")
"""delete .wahji file"""
os.remove(".wahji")
"""delete 4040.html file"""
os.remove("404.html")
"""delete content folder"""
shutil.rmtree("content")
|
#deletes wahji content
import os, shutil, platform
def rem(loc):
os.chdir(loc)
site = raw_input("Input site folder: ")
print "Are you sure you want to delete", site, "Y/N: "
confirm = raw_input()
if confirm == "Y" or confirm == "y":
"""delete site folder"""
shutil.rmtree(site)
print "Deleting site"
elif confirm == "N" or confirm == "n":
print "Site folder was not deleted"
|
Remove now asks for site file to be deleted
|
Remove now asks for site file to be deleted
|
Python
|
mit
|
mborn319/Wahji,mborn319/Wahji,mborn319/Wahji
|
6b55f079d595700cd84d12d4f351e52614d291c5
|
openacademy/model/openacademy_session.py
|
openacademy/model/openacademy_session.py
|
# -*- coding: utf-8 -*-
from openerp import fields,models
class Session(models.Model):
_name = 'openacademy.session'
name = fields.Char(required=True)
start_date = fields.Date()
duration = fields.Float(digits=(6,2), help="Duration in days")
seats = fields.Integer(string="Number of seats")
instructor_id = fields.Many2one('res.partner', string="Instructor")
course_id = fields.Many2one('openacademy.course',
ondelete='cascade', string="Course", required=True)
attendee_ids = fields.Many2many('res.partner', string='Attendees')
|
# -*- coding: utf-8 -*-
from openerp import fields,models
class Session(models.Model):
_name = 'openacademy.session'
name = fields.Char(required=True)
start_date = fields.Date()
duration = fields.Float(digits=(6,2), help="Duration in days")
seats = fields.Integer(string="Number of seats")
instructor_id = fields.Many2one('res.partner', string="Instructor",
domain = ['|',
('instructor', '=', True),
('category_id.name', 'ilike', "Teacher")])
course_id = fields.Many2one('openacademy.course',
ondelete='cascade', string="Course", required=True)
attendee_ids = fields.Many2many('res.partner', string='Attendees')
|
Add domain ir and ilike
|
[REF] openacademy: Add domain ir and ilike
|
Python
|
apache-2.0
|
arogel/openacademy-project
|
91d104a25db499ccef54878dcbfce42dbb4aa932
|
taskin/task.py
|
taskin/task.py
|
import abc
def do_flow(flow, result=None):
for item in flow:
print(item, result)
result = item(result)
return result
class MapTask(object):
def __init__(self, args, task):
self.args = args
self.task = task
self.pool = Pool(cpu_count())
def iter_input(self, input):
for args in self.args:
if not isinstance(args, (tuple, list)):
args = [args]
yield tuple([input] + args)
def __call__(self, input):
return self.pool.map(self.task, self.iter_input(input))
class IfTask(object):
def __init__(self, check, a, b):
self.check = check
self.a = a
self.b = b
def __call__(self, input):
if check(input):
return do_flow(self.a, input)
return do_flow(self.b, input)
|
from multiprocessing import Pool as ProcessPool
from multiprocessing.dummy import Pool as ThreadPool
from multiprocessing import cpu_count
def do_flow(flow, result=None):
for item in flow:
print(item, result)
result = item(result)
return result
class PoolAPI(object):
def map(self, *args, **kw):
return self.pool.map(*args, **kw)
class ThreadPool(PoolAPI):
def __init__(self, size=20):
self.size = size
self.pool = ThreadPool(self.size)
class ProcessPool(PoolAPI):
def __init__(self, size=None):
self.size = size or cpu_count()
self.pool = ProcessPool(self.size)
class MapTask(object):
pool_types = [
'thread', 'process'
]
def __init__(self, args, task, pool):
self.args = args
self.task = task
self.pool = pool
def iter_input(self, input):
for args in self.args:
if not isinstance(args, (tuple, list)):
args = [args]
yield tuple([input] + args)
def __call__(self, input):
return self.pool.map(self.task, self.iter_input(input))
class IfTask(object):
def __init__(self, check, a, b):
self.check = check
self.a = a
self.b = b
def __call__(self, input):
if check(input):
return do_flow(self.a, input)
return do_flow(self.b, input)
|
Add totally untested pools ;)
|
Add totally untested pools ;)
|
Python
|
bsd-3-clause
|
ionrock/taskin
|
247c4dcaf3e1c1f9c069ab8a2fc06cfcd75f8ea9
|
UM/Util.py
|
UM/Util.py
|
# Copyright (c) 2016 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
## Convert a value to a boolean
#
# \param \type{bool|str|int} any value.
# \return \type{bool}
def parseBool(value):
return value in [True, "True", "true", 1]
|
# Copyright (c) 2016 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
## Convert a value to a boolean
#
# \param \type{bool|str|int} any value.
# \return \type{bool}
def parseBool(value):
return value in [True, "True", "true", "Yes", "yes", 1]
|
Add "Yes" as an option for parsing bools
|
Add "Yes" as an option for parsing bools
CURA-2204
|
Python
|
agpl-3.0
|
onitake/Uranium,onitake/Uranium
|
6e3ddfc47487a8841a79d6265c96ba63005fccec
|
bnw_handlers/command_onoff.py
|
bnw_handlers/command_onoff.py
|
# -*- coding: utf-8 -*-
#from twisted.words.xish import domish
from base import *
import random
import bnw_core.bnw_objects as objs
@require_auth
@defer.inlineCallbacks
def cmd_on(request):
""" Включение доставки сообщений """
_ = yield objs.User.mupdate({'name':request.user['name']},{'$set':{'off':False}},safe=True)
if request.user['off']:
defer.returnValue(
dict(ok=True,desc='Welcome back!')
)
else:
defer.returnValue(
dict(ok=True,desc='Welcoooome baaaack, I said.')
)
@require_auth
@defer.inlineCallbacks
def cmd_off(request):
""" Выключение доставки сообщений """
_ = yield objs.User.mupdate({'name':request.user['name']},{'$set':{'off':True}},safe=True)
if request.user['off']:
defer.returnValue(
dict(ok=True,desc='See you later.')
)
else:
defer.returnValue(
dict(ok=True,desc='C u l8r!')
)
|
# -*- coding: utf-8 -*-
#from twisted.words.xish import domish
from base import *
import random
import bnw_core.bnw_objects as objs
@require_auth
@defer.inlineCallbacks
def cmd_on(request):
""" Включение доставки сообщений """
_ = yield objs.User.mupdate({'name':request.user['name']},{'$set':{'off':False}},safe=True)
if request.user.get('off',False):
defer.returnValue(
dict(ok=True,desc='Welcome back!')
)
else:
defer.returnValue(
dict(ok=True,desc='Welcoooome baaaack, I said.')
)
@require_auth
@defer.inlineCallbacks
def cmd_off(request):
""" Выключение доставки сообщений """
_ = yield objs.User.mupdate({'name':request.user['name']},{'$set':{'off':True}},safe=True)
if request.user.get('off',False):
defer.returnValue(
dict(ok=True,desc='See you later.')
)
else:
defer.returnValue(
dict(ok=True,desc='C u l8r!')
)
|
Fix on/off if there is no 'off' field.
|
Fix on/off if there is no 'off' field.
|
Python
|
bsd-2-clause
|
un-def/bnw,stiletto/bnw,un-def/bnw,stiletto/bnw,ojab/bnw,ojab/bnw,stiletto/bnw,un-def/bnw,ojab/bnw,stiletto/bnw,un-def/bnw,ojab/bnw
|
bbef6c5f235fc3320c9c748a32cb3af04d3903d1
|
list_all_users_in_group.py
|
list_all_users_in_group.py
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
import grp
import pwd
import inspect
import argparse
def list_all_users_in_group(groupname):
"""Get list of all users of group.
Get sorted list of all users of group GROUP,
including users with main group GROUP.
Origin in https://github.com/vazhnov/list_all_users_in_group
"""
try:
group = grp.getgrnam(groupname)
# On error "KeyError: 'getgrnam(): name not found: GROUP'"
except (KeyError):
return None
group_all_users_set = set(group.gr_mem)
for user in pwd.getpwall():
if user.pw_gid == group.gr_gid:
group_all_users_set.add(user.pw_name)
return sorted(group_all_users_set)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description=inspect.getdoc(list_all_users_in_group),
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('-d', '--delimiter', default='\n', help='Use DELIMITER instead of newline for users delimiter')
parser.add_argument('groupname', help='Group name')
args = parser.parse_args()
result = list_all_users_in_group(args.groupname)
if result:
print (args.delimiter.join(result))
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import grp
import pwd
import inspect
import argparse
def list_all_users_in_group(groupname):
"""Get list of all users of group.
Get sorted list of all users of group GROUP,
including users with main group GROUP.
Origin in https://github.com/vazhnov/list_all_users_in_group
"""
try:
group = grp.getgrnam(groupname)
# On error "KeyError: 'getgrnam(): name not found: GROUP'"
except KeyError:
return None
group_all_users_set = set(group.gr_mem)
for user in pwd.getpwall():
if user.pw_gid == group.gr_gid:
group_all_users_set.add(user.pw_name)
return sorted(group_all_users_set)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description=inspect.getdoc(list_all_users_in_group),
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('-d', '--delimiter', default='\n', help='Use DELIMITER instead of newline for users delimiter')
parser.add_argument('groupname', help='Group name')
args = parser.parse_args()
result = list_all_users_in_group(args.groupname)
if result:
print (args.delimiter.join(result))
|
Fix pylint: Unnecessary parens after u'print' keyword (superfluous-parens)
|
Fix pylint: Unnecessary parens after u'print' keyword (superfluous-parens)
|
Python
|
cc0-1.0
|
vazhnov/list_all_users_in_group
|
82e82805eae5b070aec49816977d2f50ff274d30
|
controllers/api/api_match_controller.py
|
controllers/api/api_match_controller.py
|
import json
import webapp2
from controllers.api.api_base_controller import ApiBaseController
from helpers.model_to_dict import ModelToDict
from models.match import Match
class ApiMatchControllerBase(ApiBaseController):
CACHE_KEY_FORMAT = "apiv2_match_controller_{}" # (match_key)
CACHE_VERSION = 2
CACHE_HEADER_LENGTH = 60 * 60
def __init__(self, *args, **kw):
super(ApiMatchControllerBase, self).__init__(*args, **kw)
self.match_key = self.request.route_kwargs["match_key"]
self._partial_cache_key = self.CACHE_KEY_FORMAT.format(self.match_key)
@property
def _validators(self):
return [("match_id_validator", self.match_key)]
def _set_match(self, match_key):
self.match = Match.get_by_id(match_key)
if self.match is None:
self._errors = json.dumps({"404": "%s match not found" % self.match_key})
self.abort(404)
class ApiMatchController(ApiMatchControllerBase):
def _track_call(self, match_key):
self._track_call_defer('match', match_key)
def _render(self, match_key):
self._set_match(match_key)
match_dict = ModelToDict.matchConverter(self.match)
return json.dumps(match_dict, ensure_ascii=True)
|
import json
import webapp2
from controllers.api.api_base_controller import ApiBaseController
from helpers.model_to_dict import ModelToDict
from models.match import Match
class ApiMatchControllerBase(ApiBaseController):
def __init__(self, *args, **kw):
super(ApiMatchControllerBase, self).__init__(*args, **kw)
self.match_key = self.request.route_kwargs["match_key"]
@property
def _validators(self):
return [("match_id_validator", self.match_key)]
def _set_match(self, match_key):
self.match = Match.get_by_id(match_key)
if self.match is None:
self._errors = json.dumps({"404": "%s match not found" % self.match_key})
self.abort(404)
class ApiMatchController(ApiMatchControllerBase):
CACHE_KEY_FORMAT = "apiv2_match_controller_{}" # (match_key)
CACHE_VERSION = 2
CACHE_HEADER_LENGTH = 60 * 60
def __init__(self, *args, **kw):
super(ApiMatchController, self).__init__(*args, **kw)
self._partial_cache_key = self.CACHE_KEY_FORMAT.format(self.match_key)
def _track_call(self, match_key):
self._track_call_defer('match', match_key)
def _render(self, match_key):
self._set_match(match_key)
match_dict = ModelToDict.matchConverter(self.match)
return json.dumps(match_dict, ensure_ascii=True)
|
Move cache key out of base class
|
Move cache key out of base class
|
Python
|
mit
|
josephbisch/the-blue-alliance,bvisness/the-blue-alliance,bdaroz/the-blue-alliance,bvisness/the-blue-alliance,fangeugene/the-blue-alliance,1fish2/the-blue-alliance,phil-lopreiato/the-blue-alliance,synth3tk/the-blue-alliance,jaredhasenklein/the-blue-alliance,tsteward/the-blue-alliance,nwalters512/the-blue-alliance,phil-lopreiato/the-blue-alliance,the-blue-alliance/the-blue-alliance,1fish2/the-blue-alliance,jaredhasenklein/the-blue-alliance,tsteward/the-blue-alliance,verycumbersome/the-blue-alliance,tsteward/the-blue-alliance,jaredhasenklein/the-blue-alliance,jaredhasenklein/the-blue-alliance,the-blue-alliance/the-blue-alliance,phil-lopreiato/the-blue-alliance,bdaroz/the-blue-alliance,bvisness/the-blue-alliance,the-blue-alliance/the-blue-alliance,phil-lopreiato/the-blue-alliance,tsteward/the-blue-alliance,verycumbersome/the-blue-alliance,fangeugene/the-blue-alliance,bdaroz/the-blue-alliance,josephbisch/the-blue-alliance,fangeugene/the-blue-alliance,bdaroz/the-blue-alliance,synth3tk/the-blue-alliance,fangeugene/the-blue-alliance,nwalters512/the-blue-alliance,bvisness/the-blue-alliance,jaredhasenklein/the-blue-alliance,verycumbersome/the-blue-alliance,josephbisch/the-blue-alliance,nwalters512/the-blue-alliance,bvisness/the-blue-alliance,synth3tk/the-blue-alliance,the-blue-alliance/the-blue-alliance,bdaroz/the-blue-alliance,nwalters512/the-blue-alliance,the-blue-alliance/the-blue-alliance,the-blue-alliance/the-blue-alliance,bvisness/the-blue-alliance,tsteward/the-blue-alliance,tsteward/the-blue-alliance,josephbisch/the-blue-alliance,jaredhasenklein/the-blue-alliance,1fish2/the-blue-alliance,1fish2/the-blue-alliance,1fish2/the-blue-alliance,josephbisch/the-blue-alliance,synth3tk/the-blue-alliance,nwalters512/the-blue-alliance,synth3tk/the-blue-alliance,josephbisch/the-blue-alliance,fangeugene/the-blue-alliance,phil-lopreiato/the-blue-alliance,1fish2/the-blue-alliance,verycumbersome/the-blue-alliance,verycumbersome/the-blue-alliance,fangeugene/the-blue-alliance,synth3tk/the-blue-alliance,verycumbersome/the-blue-alliance,nwalters512/the-blue-alliance,phil-lopreiato/the-blue-alliance,bdaroz/the-blue-alliance
|
b2771e6b33bd889c971b77e1b30c1cc5a0b9eb24
|
platform.py
|
platform.py
|
# Copyright 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from platformio.managers.platform import PlatformBase
class Ststm32Platform(PlatformBase):
def configure_default_packages(self, variables, targets):
board = variables.get("board")
if "mbed" in variables.get("pioframework",
[]) or board == "mxchip_az3166":
self.packages['toolchain-gccarmnoneeabi'][
'version'] = ">=1.60301.0"
if board == "mxchip_az3166":
self.frameworks['arduino'][
'script'] = "builder/frameworks/arduino/mxchip.py"
self.packages['tool-openocd']['type'] = "uploader"
return PlatformBase.configure_default_packages(self, variables,
targets)
|
# Copyright 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from platformio.managers.platform import PlatformBase
class Ststm32Platform(PlatformBase):
def configure_default_packages(self, variables, targets):
board = variables.get("board")
if "mbed" in variables.get("pioframework",
[]) or board == "mxchip_az3166":
self.packages['toolchain-gccarmnoneeabi'][
'version'] = ">=1.60301.0"
if board == "mxchip_az3166":
self.frameworks['arduino'][
'package'] = "framework-arduinostm32mxchip"
self.frameworks['arduino'][
'script'] = "builder/frameworks/arduino/mxchip.py"
self.packages['tool-openocd']['type'] = "uploader"
return PlatformBase.configure_default_packages(self, variables,
targets)
|
Use appropriate package for mxchip_az3166
|
Use appropriate package for mxchip_az3166
|
Python
|
apache-2.0
|
platformio/platform-ststm32,platformio/platform-ststm32
|
1096d0f13ebbc5900c21626a5caf6276b36229d8
|
Lib/test/test_coding.py
|
Lib/test/test_coding.py
|
import test.test_support, unittest
import os
class CodingTest(unittest.TestCase):
def test_bad_coding(self):
module_name = 'bad_coding'
self.verify_bad_module(module_name)
def test_bad_coding2(self):
module_name = 'bad_coding2'
self.verify_bad_module(module_name)
def verify_bad_module(self, module_name):
self.assertRaises(SyntaxError, __import__, 'test.' + module_name)
path = os.path.dirname(__file__)
filename = os.path.join(path, module_name + '.py')
fp = open(filename)
text = fp.read()
fp.close()
self.assertRaises(SyntaxError, compile, text, filename, 'exec')
def test_exec_valid_coding(self):
d = {}
exec('# coding: cp949\na = 5\n', d)
self.assertEqual(d['a'], 5)
def test_main():
test.test_support.run_unittest(CodingTest)
if __name__ == "__main__":
test_main()
|
import test.test_support, unittest
import os
class CodingTest(unittest.TestCase):
def test_bad_coding(self):
module_name = 'bad_coding'
self.verify_bad_module(module_name)
def test_bad_coding2(self):
module_name = 'bad_coding2'
self.verify_bad_module(module_name)
def verify_bad_module(self, module_name):
self.assertRaises(SyntaxError, __import__, 'test.' + module_name)
path = os.path.dirname(__file__)
filename = os.path.join(path, module_name + '.py')
fp = open(filename, encoding='utf-8')
text = fp.read()
fp.close()
self.assertRaises(SyntaxError, compile, text, filename, 'exec')
def test_exec_valid_coding(self):
d = {}
exec('# coding: cp949\na = 5\n', d)
self.assertEqual(d['a'], 5)
def test_main():
test.test_support.run_unittest(CodingTest)
if __name__ == "__main__":
test_main()
|
Fix a test failure on non-UTF-8 locales: bad_coding2.py is encoded in utf-8.
|
Fix a test failure on non-UTF-8 locales: bad_coding2.py is encoded
in utf-8.
|
Python
|
mit
|
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
|
37588d466928e9f25b55d627772120f16df095ec
|
model_presenter.py
|
model_presenter.py
|
import matplotlib
# Do not use X for plotting
matplotlib.use('Agg')
import matplotlib.pyplot as plt
from matplotlib.dates import DateFormatter
from matplotlib.ticker import FormatStrFormatter
from tempfile import NamedTemporaryFile
def plot_to_file(symbol, timestamp, close, score):
fig, ax1 = plt.subplots()
ax2 = ax1.twinx()
ax1.plot(timestamp, close, color='r', marker='.', label="close")
ax2.plot(timestamp, score, color='b', marker='.', label="score")
plt.title("%s: score %0.2f" % (symbol, score[-1]))
fig.autofmt_xdate()
ax1.xaxis.set_major_formatter(DateFormatter("%H:%M"))
ax1.yaxis.set_major_formatter(FormatStrFormatter('%.2f'))
h1, l1 = ax1.get_legend_handles_labels()
h2, l2 = ax2.get_legend_handles_labels()
ax1.legend(h1 + h2, l1 + l2)
png_file = NamedTemporaryFile(delete=False, suffix='.png')
png_file.close()
fig.set_dpi(100)
fig.set_size_inches(10, 4)
fig.set_tight_layout(True)
fig.savefig(png_file.name)
plt.close(fig)
return png_file.name
|
import matplotlib
# Do not use X for plotting
matplotlib.use('Agg')
import matplotlib.pyplot as plt
from matplotlib.dates import DateFormatter
from matplotlib.ticker import FormatStrFormatter
from tempfile import NamedTemporaryFile
def plot_to_file(symbol, timestamp, close, score):
fig, ax1 = plt.subplots()
ax2 = ax1.twinx()
ax1.plot(timestamp, close, color='r', marker='.', label="close")
ax2.plot(timestamp, score, color='b', marker='.', label="score")
plt.title("%s: score %0.2f" % (symbol, score[-1]))
fig.autofmt_xdate()
ax1.xaxis.set_major_formatter(DateFormatter("%H:%M"))
ax1.yaxis.set_major_formatter(FormatStrFormatter('%.2f'))
h1, l1 = ax1.get_legend_handles_labels()
h2, l2 = ax2.get_legend_handles_labels()
ax1.legend(h1 + h2, l1 + l2)
jpg_file = NamedTemporaryFile(delete=False, suffix='.jpg')
jpg_file.close()
fig.set_dpi(100)
fig.set_size_inches(10, 4)
fig.set_tight_layout(True)
fig.savefig(jpg_file.name, quality=50)
plt.close(fig)
return jpg_file.name
|
Compress the images to be sent
|
Compress the images to be sent
|
Python
|
mit
|
cjluo/money-monkey
|
9901044b2b3218714a3c807e982db518aa97a446
|
djangoautoconf/features/bae_settings.py
|
djangoautoconf/features/bae_settings.py
|
##################################
# Added for BAE
##################################
try:
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'LOCATION': const.CACHE_ADDR,
'TIMEOUT': 60,
}
}
except:
pass
try:
from bae.core import const
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': bae_secrets.database_name,
'USER': const.MYSQL_USER,
'PASSWORD': const.MYSQL_PASS,
'HOST': const.MYSQL_HOST,
'PORT': const.MYSQL_PORT,
}
}
SESSION_ENGINE = 'django.contrib.sessions.backends.cache'
###Or
#SESSION_ENGINE = 'django.contrib.sessions.backends.db'
##################################
except:
pass
EMAIL_BACKEND = 'django.core.mail.backends.bcms.EmailBackend'
try:
from objsys.baidu_mail import EmailBackend
EMAIL_BACKEND = 'objsys.baidu_mail.EmailBackend'
except:
EMAIL_BACKEND = 'django.core.mail.backends.dummy.EmailBackend'
|
##################################
# Added for BAE
##################################
try:
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'LOCATION': const.CACHE_ADDR,
'TIMEOUT': 60,
}
}
except:
pass
try:
from bae.core import const
import bae_secrets
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': bae_secrets.database_name,
'USER': const.MYSQL_USER,
'PASSWORD': const.MYSQL_PASS,
'HOST': const.MYSQL_HOST,
'PORT': const.MYSQL_PORT,
}
}
SESSION_ENGINE = 'django.contrib.sessions.backends.cache'
###Or
#SESSION_ENGINE = 'django.contrib.sessions.backends.db'
##################################
except:
pass
EMAIL_BACKEND = 'django.core.mail.backends.bcms.EmailBackend'
try:
from objsys.baidu_mail import EmailBackend
EMAIL_BACKEND = 'objsys.baidu_mail.EmailBackend'
except:
EMAIL_BACKEND = 'django.core.mail.backends.dummy.EmailBackend'
|
Move BAE secret into try catch block
|
Move BAE secret into try catch block
|
Python
|
bsd-3-clause
|
weijia/djangoautoconf,weijia/djangoautoconf
|
3ed807b44289c00d6a82b0c253f7ff8072336fdd
|
changes/jobs/cleanup_tasks.py
|
changes/jobs/cleanup_tasks.py
|
from __future__ import absolute_import
from datetime import datetime, timedelta
from changes.config import queue
from changes.constants import Status
from changes.models.task import Task
from changes.queue.task import TrackedTask
CHECK_TIME = timedelta(minutes=60)
EXPIRE_TIME = timedelta(days=7)
# NOTE: This isn't itself a TrackedTask, but probably should be.
def cleanup_tasks():
"""
Find any tasks which haven't checked in within a reasonable time period and
requeue them if necessary.
Additionally remove any old Task entries which are completed.
"""
now = datetime.utcnow()
pending_tasks = Task.query.filter(
Task.status != Status.finished,
Task.date_modified < now - CHECK_TIME,
)
for task in pending_tasks:
task_func = TrackedTask(queue.get_task(task.task_name))
task_func.delay(
task_id=task.task_id.hex,
parent_task_id=task.parent_id.hex if task.parent_id else None,
**task.data['kwargs']
)
Task.query.filter(
Task.status == Status.finished,
Task.date_modified < now - EXPIRE_TIME,
).delete()
|
from __future__ import absolute_import
from datetime import datetime, timedelta
from changes.config import queue, statsreporter
from changes.constants import Status
from changes.models.task import Task
from changes.queue.task import TrackedTask
CHECK_TIME = timedelta(minutes=60)
EXPIRE_TIME = timedelta(days=7)
# NOTE: This isn't itself a TrackedTask, but probably should be.
@statsreporter.timer('task_duration_cleanup_task')
def cleanup_tasks():
"""
Find any tasks which haven't checked in within a reasonable time period and
requeue them if necessary.
Additionally remove any old Task entries which are completed.
"""
now = datetime.utcnow()
pending_tasks = Task.query.filter(
Task.status != Status.finished,
Task.date_modified < now - CHECK_TIME,
)
for task in pending_tasks:
task_func = TrackedTask(queue.get_task(task.task_name))
task_func.delay(
task_id=task.task_id.hex,
parent_task_id=task.parent_id.hex if task.parent_id else None,
**task.data['kwargs']
)
deleted = Task.query.filter(
Task.status == Status.finished,
Task.date_modified < now - EXPIRE_TIME,
# Filtering by date_created isn't necessary, but it allows us to filter using an index on
# a value that doesn't update, which makes our deletion more efficient.
Task.date_created < now - EXPIRE_TIME,
).delete(synchronize_session=False)
statsreporter.stats().incr('tasks_deleted', deleted)
|
Make periodic expired task deletion more efficient
|
Make periodic expired task deletion more efficient
Summary: Also adds tracking for the number of tasks deleted at each run.
Test Plan: None
Reviewers: paulruan
Reviewed By: paulruan
Subscribers: changesbot, anupc
Differential Revision: https://tails.corp.dropbox.com/D232735
|
Python
|
apache-2.0
|
dropbox/changes,dropbox/changes,dropbox/changes,dropbox/changes
|
dd8c85a49a31693f43e6f6877a0657d63cbc1b01
|
auth0/v2/device_credentials.py
|
auth0/v2/device_credentials.py
|
from .rest import RestClient
class DeviceCredentials(object):
"""Auth0 connection endpoints
Args:
domain (str): Your Auth0 domain, e.g: 'username.auth0.com'
jwt_token (str): An API token created with your account's global
keys. You can create one by using the token generator in the
API Explorer: https://auth0.com/docs/api/v2
"""
def __init__(self, domain, jwt_token):
self.domain = domain
self.client = RestClient(jwt=jwt_token)
def _url(self, id=None):
url = 'https://%s/api/v2/device-credentials' % self.domain
if id is not None:
return url + '/' + id
return url
def get(self, user_id=None, client_id=None, type=None,
fields=[], include_fields=True):
params = {
'fields': ','.join(fields) or None,
'include_fields': str(include_fields).lower(),
'user_id': user_id,
'client_id': client_id,
'type': type,
}
return self.client.get(self._url(), params=params)
def create(self, body):
return self.client.post(self._url(), data=body)
def delete(self, id):
return self.client.delete(self._url(id))
|
from .rest import RestClient
class DeviceCredentials(object):
"""Auth0 connection endpoints
Args:
domain (str): Your Auth0 domain, e.g: 'username.auth0.com'
jwt_token (str): An API token created with your account's global
keys. You can create one by using the token generator in the
API Explorer: https://auth0.com/docs/api/v2
"""
def __init__(self, domain, jwt_token):
self.domain = domain
self.client = RestClient(jwt=jwt_token)
def _url(self, id=None):
url = 'https://%s/api/v2/device-credentials' % self.domain
if id is not None:
return url + '/' + id
return url
def get(self, user_id, client_id, type, fields=[], include_fields=True):
params = {
'fields': ','.join(fields) or None,
'include_fields': str(include_fields).lower(),
'user_id': user_id,
'client_id': client_id,
'type': type,
}
return self.client.get(self._url(), params=params)
def create(self, body):
return self.client.post(self._url(), data=body)
def delete(self, id):
return self.client.delete(self._url(id))
|
Remove default arguments for user_id, client_id and type
|
Remove default arguments for user_id, client_id and type
|
Python
|
mit
|
auth0/auth0-python,auth0/auth0-python
|
24ea32f71faab214a6f350d2d48b2f5715d8262d
|
manage.py
|
manage.py
|
from flask_restful import Api
from flask_migrate import Migrate, MigrateCommand
from flask_script import Manager
from app import app
from app import db
from app.auth import Register, Login
from app.bucketlist_api import BucketList, BucketListEntry
from app.bucketlist_items import BucketListItems, BucketListItemSingle
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
api = Api(app)
api.add_resource(Register, '/auth/register')
api.add_resource(Login, '/auth/login')
api.add_resource(BucketList, '/bucketlists')
api.add_resource(BucketListEntry, '/bucketlists/<int:bucketlist_id>')
api.add_resource(BucketListItems, '/bucketlists/<int:bucketlist_id>/items')
api.add_resource(BucketListItemSingle,
'/bucketlists/<int:bucketlist_id>/items/<int:item_id>')
if __name__ == '__main__':
manager.run()
|
from flask_restful import Api
from flask_migrate import Migrate, MigrateCommand
from flask_script import Manager
from app import app, db
from app.auth import Register, Login
from app.bucketlist_api import BucketLists, BucketListSingle
from app.bucketlist_items import BucketListItems, BucketListItemSingle
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
api = Api(app)
api.add_resource(Register, '/auth/register')
api.add_resource(Login, '/auth/login')
api.add_resource(BucketLists, '/bucketlists')
api.add_resource(BucketListSingle, '/bucketlists/<int:bucketlist_id>')
api.add_resource(BucketListItems, '/bucketlists/<int:bucketlist_id>/items')
api.add_resource(BucketListItemSingle,
'/bucketlists/<int:bucketlist_id>/items/<int:item_id>')
if __name__ == '__main__':
manager.run()
|
Set urls for bucketlist items endpoints
|
Set urls for bucketlist items endpoints
|
Python
|
mit
|
andela-bmwenda/cp2-bucketlist-api
|
638a1c434ef8202774675581c3659511fa9d1cd3
|
vehicles/management/commands/import_edinburgh.py
|
vehicles/management/commands/import_edinburgh.py
|
from django.contrib.gis.geos import Point
from busstops.models import Service
from ...models import Vehicle, VehicleLocation, VehicleJourney
from ..import_live_vehicles import ImportLiveVehiclesCommand
class Command(ImportLiveVehiclesCommand):
url = 'http://tfeapp.com/live/vehicles.php'
source_name = 'TfE'
services = Service.objects.filter(operator__in=('LOTH', 'EDTR', 'ECBU', 'NELB'), current=True)
def get_journey(self, item):
journey = VehicleJourney(
code=item['journey_id'],
destination=item['destination']
)
vehicle_defaults = {}
try:
journey.service = self.services.get(line_name=item['service_name'])
vehicle_defaults['operator'] = journey.service.operator.first()
except (Service.DoesNotExist, Service.MultipleObjectsReturned) as e:
if item['service_name'] not in {'ET1', 'MA1', '3BBT'}:
print(e, item['service_name'])
vehicle_code = item['vehicle_id']
if vehicle_code.isdigit():
vehicle_defaults['fleet_number'] = vehicle_code
journey.vehicle, vehicle_created = Vehicle.objects.update_or_create(
vehicle_defaults,
source=self.source,
code=vehicle_code
)
return journey, vehicle_created
def create_vehicle_location(self, item):
return VehicleLocation(
latlong=Point(item['longitude'], item['latitude']),
heading=item['heading']
)
|
from django.contrib.gis.geos import Point
from busstops.models import Service
from ...models import Vehicle, VehicleLocation, VehicleJourney
from ..import_live_vehicles import ImportLiveVehiclesCommand
class Command(ImportLiveVehiclesCommand):
url = 'http://tfeapp.com/live/vehicles.php'
source_name = 'TfE'
services = Service.objects.filter(operator__in=('LOTH', 'EDTR', 'ECBU', 'NELB'), current=True)
def get_journey(self, item):
journey = VehicleJourney(
code=item['journey_id'] or '',
destination=item['destination'] or ''
)
vehicle_defaults = {}
try:
journey.service = self.services.get(line_name=item['service_name'])
vehicle_defaults['operator'] = journey.service.operator.first()
except (Service.DoesNotExist, Service.MultipleObjectsReturned) as e:
if item['service_name'] not in {'ET1', 'MA1', '3BBT'}:
print(e, item['service_name'])
vehicle_code = item['vehicle_id']
if vehicle_code.isdigit():
vehicle_defaults['fleet_number'] = vehicle_code
journey.vehicle, vehicle_created = Vehicle.objects.update_or_create(
vehicle_defaults,
source=self.source,
code=vehicle_code
)
return journey, vehicle_created
def create_vehicle_location(self, item):
return VehicleLocation(
latlong=Point(item['longitude'], item['latitude']),
heading=item['heading']
)
|
Fix null Edinburgh journey code or destination
|
Fix null Edinburgh journey code or destination
|
Python
|
mpl-2.0
|
jclgoodwin/bustimes.org.uk,jclgoodwin/bustimes.org.uk,jclgoodwin/bustimes.org.uk,jclgoodwin/bustimes.org.uk
|
c9da64ac1c90abdee8fc72488a4bef58a95aa7c6
|
biwako/bin/fields/compounds.py
|
biwako/bin/fields/compounds.py
|
import io
from .base import Field, DynamicValue, FullyDecoded
class SubStructure(Field):
def __init__(self, structure, *args, **kwargs):
self.structure = structure
super(SubStructure, self).__init__(*args, **kwargs)
def read(self, file):
value = self.structure(file)
value_bytes = b''
# Force the evaluation of the entire structure in
# order to make sure other fields work properly
for field in self.structure._fields:
getattr(value, field.name)
value_bytes += value._raw_values[field.name]
raise FullyDecoded(value_bytes, value)
def encode(self, obj, value):
output = io.BytesIO()
value.save(output)
return output.getvalue()
class List(Field):
def __init__(self, field, *args, **kwargs):
super(List, self).__init__(*args, **kwargs)
self.field = field
def read(self, file):
value_bytes = b''
values = []
if self.instance:
instance_field = field.for_instance(self.instance)
for i in range(self.size):
bytes, value = instance_field.read_value(file)
value_bytes += bytes
values.append(value)
return values
def encode(self, obj, values):
encoded_values = []
for value in values:
encoded_values.append(self.field.encode(obj, value))
return b''.join(encoded_values)
|
import io
from .base import Field, DynamicValue, FullyDecoded
class SubStructure(Field):
def __init__(self, structure, *args, **kwargs):
self.structure = structure
super(SubStructure, self).__init__(*args, **kwargs)
def read(self, file):
value = self.structure(file)
value_bytes = b''
# Force the evaluation of the entire structure in
# order to make sure other fields work properly
for field in self.structure._fields:
getattr(value, field.name)
value_bytes += value._raw_values[field.name]
raise FullyDecoded(value_bytes, value)
def encode(self, obj, value):
output = io.BytesIO()
value.save(output)
return output.getvalue()
class List(Field):
def __init__(self, field, *args, **kwargs):
super(List, self).__init__(*args, **kwargs)
self.field = field
def read(self, file):
value_bytes = b''
values = []
if self.instance:
instance_field = self.field.for_instance(self.instance)
for i in range(self.size):
bytes, value = instance_field.read_value(file)
value_bytes += bytes
values.append(value)
raise FullyDecoded(value_bytes, values)
def encode(self, obj, values):
encoded_values = []
for value in values:
encoded_values.append(self.field.encode(obj, value))
return b''.join(encoded_values)
|
Fix List to use the new decoding system
|
Fix List to use the new decoding system
|
Python
|
bsd-3-clause
|
gulopine/steel
|
1e5e2a236277dc9ba11f9fe4aff3279f692da3f7
|
ploy/tests/conftest.py
|
ploy/tests/conftest.py
|
from mock import patch
import pytest
import os
import shutil
import tempfile
class Directory:
def __init__(self, directory):
self.directory = directory
def __getitem__(self, name):
path = os.path.join(self.directory, name)
assert not os.path.relpath(path, self.directory).startswith('..')
return File(path)
class File:
def __init__(self, path):
self.directory = os.path.dirname(path)
self.path = path
def fill(self, content):
if not os.path.exists(self.directory):
os.makedirs(self.directory)
with open(self.path, 'w') as f:
if isinstance(content, (list, tuple)):
content = '\n'.join(content)
f.write(content)
@pytest.yield_fixture
def tempdir():
""" Returns an object for easy use of a temporary directory which is
cleaned up afterwards.
Use tempdir[filepath] to access files.
Use .fill(lines) on the returned object to write content to the file.
"""
directory = tempfile.mkdtemp()
yield Directory(directory)
shutil.rmtree(directory)
@pytest.yield_fixture
def ployconf(tempdir):
""" Returns a Configfile object which manages ploy.conf.
"""
yield tempdir['etc/ploy.conf']
@pytest.yield_fixture
def os_execvp_mock():
with patch("os.execvp") as os_execvp_mock:
yield os_execvp_mock
|
from mock import patch
import pytest
import os
import shutil
import tempfile
class Directory:
def __init__(self, directory):
self.directory = directory
def __getitem__(self, name):
path = os.path.join(self.directory, name)
assert not os.path.relpath(path, self.directory).startswith('..')
return File(path)
class File:
def __init__(self, path):
self.directory = os.path.dirname(path)
self.path = path
def fill(self, content):
if not os.path.exists(self.directory):
os.makedirs(self.directory)
with open(self.path, 'w') as f:
if isinstance(content, (list, tuple)):
content = '\n'.join(content)
f.write(content)
def content(self):
with open(self.path) as f:
return f.read()
@pytest.yield_fixture
def tempdir():
""" Returns an object for easy use of a temporary directory which is
cleaned up afterwards.
Use tempdir[filepath] to access files.
Use .fill(lines) on the returned object to write content to the file.
"""
directory = tempfile.mkdtemp()
yield Directory(directory)
shutil.rmtree(directory)
@pytest.yield_fixture
def ployconf(tempdir):
""" Returns a Configfile object which manages ploy.conf.
"""
yield tempdir['etc/ploy.conf']
@pytest.yield_fixture
def os_execvp_mock():
with patch("os.execvp") as os_execvp_mock:
yield os_execvp_mock
|
Add convenience function to read tempdir files.
|
Add convenience function to read tempdir files.
|
Python
|
bsd-3-clause
|
fschulze/ploy,ployground/ploy
|
854709e1c2f5351c8e7af49e238fe54632f23ff5
|
takeyourmeds/settings/defaults/apps.py
|
takeyourmeds/settings/defaults/apps.py
|
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'allauth',
'allauth.account',
'djcelery',
'rest_framework',
'takeyourmeds.api',
'takeyourmeds.reminder',
'takeyourmeds.static',
'takeyourmeds.telephony',
'takeyourmeds.utils',
)
|
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.sites',
'django.contrib.staticfiles',
'allauth',
'allauth.account',
'djcelery',
'rest_framework',
'takeyourmeds.api',
'takeyourmeds.reminder',
'takeyourmeds.static',
'takeyourmeds.telephony',
'takeyourmeds.utils',
)
|
Return .sites to INSTALLED_APPS until we drop allauth
|
Return .sites to INSTALLED_APPS until we drop allauth
|
Python
|
mit
|
takeyourmeds/takeyourmeds-web,takeyourmeds/takeyourmeds-web,takeyourmeds/takeyourmeds-web,takeyourmeds/takeyourmeds-web
|
bf142af653dec7eb781faf6a45385a411bdfeee2
|
scripts/lib/paths.py
|
scripts/lib/paths.py
|
details_source = './source/details/'
xml_source = './source/raw_xml/'
course_dest = './source/courses/'
info_path = './courses/info.json'
term_dest = './courses/terms/'
mappings_path = './related-data/generated/'
handmade_path = './related-data/handmade/'
def find_details_subdir(clbid):
str_clbid = str(clbid).zfill(10)
n_thousand = int(int(clbid) / 1000)
thousands_subdir = (n_thousand * 1000)
return str(thousands_subdir).zfill(5) + '/' + str_clbid
def make_course_path(clbid):
clbid = str(clbid).zfill(10)
return course_dest + find_details_subdir(clbid) + '.json'
def make_html_path(clbid):
clbid = str(clbid).zfill(10)
return details_source + find_details_subdir(clbid) + '.html'
def make_xml_term_path(term):
return xml_source + str(term) + '.xml'
def make_json_term_path(term):
return term_dest + str(term) + '.json'
|
details_source = './source/details/'
xml_source = './source/raw_xml/'
course_dest = './source/courses/'
info_path = './build/info.json'
term_dest = './build/terms/'
mappings_path = './related-data/generated/'
handmade_path = './related-data/handmade/'
def find_details_subdir(clbid):
str_clbid = str(clbid).zfill(10)
n_thousand = int(int(clbid) / 1000)
thousands_subdir = (n_thousand * 1000)
return str(thousands_subdir).zfill(5) + '/' + str_clbid
def make_course_path(clbid):
clbid = str(clbid).zfill(10)
return course_dest + find_details_subdir(clbid) + '.json'
def make_html_path(clbid):
clbid = str(clbid).zfill(10)
return details_source + find_details_subdir(clbid) + '.html'
def make_xml_term_path(term):
return xml_source + str(term) + '.xml'
def make_json_term_path(term):
return term_dest + str(term) + '.json'
|
Put the build stuff back in /build
|
Put the build stuff back in /build
|
Python
|
mit
|
StoDevX/course-data-tools,StoDevX/course-data-tools
|
a1cf7353917bbcee569cb8b6bb0d6277ee41face
|
dependency_injector/__init__.py
|
dependency_injector/__init__.py
|
"""Dependency injector."""
from .catalog import AbstractCatalog
from .catalog import override
from .providers import Provider
from .providers import Delegate
from .providers import Factory
from .providers import Singleton
from .providers import ExternalDependency
from .providers import Class
from .providers import Object
from .providers import Function
from .providers import Value
from .providers import Callable
from .providers import Config
from .injections import Injection
from .injections import KwArg
from .injections import Attribute
from .injections import Method
from .injections import inject
from .utils import is_provider
from .utils import ensure_is_provider
from .utils import is_injection
from .utils import ensure_is_injection
from .utils import is_kwarg_injection
from .utils import is_attribute_injection
from .utils import is_method_injection
from .errors import Error
__all__ = (
# Catalogs
'AbstractCatalog',
'override',
# Providers
'Provider',
'Delegate',
'Factory',
'Singleton',
'ExternalDependency',
'Class',
'Object',
'Function',
'Value',
'Callable',
'Config',
# Injections
'KwArg',
'Attribute',
'Method',
'inject',
# Utils
'is_provider',
'ensure_is_provider',
'is_injection',
'ensure_is_injection',
'is_kwarg_injection',
'is_attribute_injection',
'is_method_injection',
# Errors
'Error',
)
|
"""Dependency injector."""
from .catalog import AbstractCatalog
from .catalog import override
from .providers import Provider
from .providers import Delegate
from .providers import Factory
from .providers import Singleton
from .providers import ExternalDependency
from .providers import Class
from .providers import Object
from .providers import Function
from .providers import Value
from .providers import Callable
from .providers import Config
from .injections import Injection
from .injections import KwArg
from .injections import Attribute
from .injections import Method
from .injections import inject
from .utils import is_provider
from .utils import ensure_is_provider
from .utils import is_injection
from .utils import ensure_is_injection
from .utils import is_kwarg_injection
from .utils import is_attribute_injection
from .utils import is_method_injection
from .errors import Error
__all__ = (
# Catalogs
'AbstractCatalog',
'override',
# Providers
'Provider',
'Delegate',
'Factory',
'Singleton',
'ExternalDependency',
'Class',
'Object',
'Function',
'Value',
'Callable',
'Config',
# Injections
'Injection',
'KwArg',
'Attribute',
'Method',
'inject',
# Utils
'is_provider',
'ensure_is_provider',
'is_injection',
'ensure_is_injection',
'is_kwarg_injection',
'is_attribute_injection',
'is_method_injection',
# Errors
'Error',
)
|
Add Injection into __all__ list of top level package
|
Add Injection into __all__ list of top level package
|
Python
|
bsd-3-clause
|
rmk135/dependency_injector,ets-labs/dependency_injector,rmk135/objects,ets-labs/python-dependency-injector
|
43ee2b8cfde4d0276cfd561063554705462001cf
|
openmm/run_test.py
|
openmm/run_test.py
|
#!/usr/bin/env python
from simtk import openmm
# Check major version number
assert openmm.Platform.getOpenMMVersion() == '7.1', "openmm.Platform.getOpenMMVersion() = %s" % openmm.Platform.getOpenMMVersion()
# Check git hash
assert openmm.version.git_revision == '1e5b258c0df6ab8b4350fd2c3cbf6c6f7795847c', "openmm.version.git_revision = %s" % openmm.version.git_revision
|
#!/usr/bin/env python
from simtk import openmm
# Check major version number
assert openmm.Platform.getOpenMMVersion() == '7.1', "openmm.Platform.getOpenMMVersion() = %s" % openmm.Platform.getOpenMMVersion()
# Check git hash
assert openmm.version.git_revision == '9567ddb304c48d336e82927adf2761e8780e9270', "openmm.version.git_revision = %s" % openmm.version.git_revision
|
Update git hash in test
|
Update git hash in test
|
Python
|
mit
|
peastman/conda-recipes,cwehmeyer/conda-recipes,jchodera/conda-recipes,peastman/conda-recipes,swails/conda-recipes,cwehmeyer/conda-recipes,cwehmeyer/conda-recipes,swails/conda-recipes,swails/conda-recipes,jchodera/conda-recipes,omnia-md/conda-recipes,jchodera/conda-recipes,omnia-md/conda-recipes,cwehmeyer/conda-recipes,omnia-md/conda-recipes,peastman/conda-recipes,jchodera/conda-recipes,swails/conda-recipes
|
37f08dab37601b7621743467d6b78fb0306b5054
|
lcapy/config.py
|
lcapy/config.py
|
# SymPy symbols to exclude
exclude = ('C', 'O', 'S', 'N', 'E', 'E1', 'Q')
# Aliases for SymPy symbols
aliases = {'delta': 'DiracDelta', 'step': 'Heaviside', 'u': 'Heaviside',
'j': 'I'}
# String replacements when printing as LaTeX. For example, SymPy uses
# theta for Heaviside's step.
latex_string_map = {r'\theta\left': r'u\left'}
import sympy as sym
print_expr_map = {sym.I: 'j'}
# Hack to print i as j
from sympy.printing.pretty.pretty_symbology import atoms_table
atoms_table['ImaginaryUnit'] = '\u2149'
|
# SymPy symbols to exclude
exclude = ('C', 'O', 'S', 'N', 'E', 'E1', 'Q', 'beta', 'gamma', 'zeta')
# Aliases for SymPy symbols
aliases = {'delta': 'DiracDelta', 'step': 'Heaviside', 'u': 'Heaviside',
'j': 'I'}
# String replacements when printing as LaTeX. For example, SymPy uses
# theta for Heaviside's step.
latex_string_map = {r'\theta\left': r'u\left'}
import sympy as sym
print_expr_map = {sym.I: 'j'}
# Hack to pretty print i as j
from sympy.printing.pretty.pretty_symbology import atoms_table
atoms_table['ImaginaryUnit'] = '\u2149'
|
Exclude beta, gamma, zeta functions
|
Exclude beta, gamma, zeta functions
|
Python
|
lgpl-2.1
|
mph-/lcapy
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.