commit
stringlengths 40
40
| old_file
stringlengths 4
236
| new_file
stringlengths 4
236
| old_contents
stringlengths 1
3.26k
| new_contents
stringlengths 16
4.43k
| subject
stringlengths 16
624
| message
stringlengths 17
3.29k
| lang
stringclasses 5
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
|
|---|---|---|---|---|---|---|---|---|---|
b278cf74b6ac57daee8e4ead6044f43ffd89a1f1
|
importer/importer/__init__.py
|
importer/importer/__init__.py
|
import aiohttp
import os.path
from datetime import datetime
from aioes import Elasticsearch
from .importer import import_data
from .kudago import KudaGo
from .utils import read_json_file
ELASTIC_ENDPOINTS = ['localhost:9200']
ELASTIC_ALIAS = 'theatrics'
async def initialize():
elastic = Elasticsearch(ELASTIC_ENDPOINTS)
module_path = os.path.dirname(__file__)
config_filename = os.path.join(module_path, 'configuration', 'index.json')
index_configuration = read_json_file(config_filename)
alias_name = ELASTIC_ALIAS
index_name = generate_index_name()
await elastic.indices.create(index_name, index_configuration)
await elastic.indices.put_alias(alias_name, index_name)
async def update(since):
async with aiohttp.ClientSession() as http_client:
elastic = Elasticsearch(ELASTIC_ENDPOINTS)
kudago = KudaGo(http_client)
await import_data(kudago, elastic, ELASTIC_ALIAS, since=since)
def generate_index_name():
return '{}-{}'.format(ELASTIC_ALIAS, int(datetime.now().timestamp()))
|
import aiohttp
import os.path
from datetime import datetime
from aioes import Elasticsearch
from .importer import import_data
from .kudago import KudaGo
from .utils import read_json_file
ELASTIC_ENDPOINTS = ['localhost:9200']
ELASTIC_ALIAS = 'theatrics'
# commands
async def initialize():
elastic = Elasticsearch(ELASTIC_ENDPOINTS)
index_name = await create_new_index(elastic)
await switch_alias_to_index(elastic, ELASTIC_ALIAS, index_name)
async def update(since):
async with aiohttp.ClientSession() as http_client:
elastic = Elasticsearch(ELASTIC_ENDPOINTS)
kudago = KudaGo(http_client)
await import_data(kudago, elastic, ELASTIC_ALIAS, since=since)
# index management
async def create_new_index(elastic):
module_path = os.path.dirname(__file__)
config_filename = os.path.join(module_path, 'configuration', 'index.json')
index_configuration = read_json_file(config_filename)
index_name = generate_index_name()
await elastic.indices.create(index_name, index_configuration)
return index_name
async def switch_alias_to_index(elastic, alias_name, index_name):
existing_aliases = await elastic.indices.get_aliases(name=alias_name)
actions = [{
'add': {
'index': index_name,
'alias': alias_name
}
}]
for existing_index_name in existing_aliases:
actions.append({
'remove': {
'index': existing_index_name,
'alias': alias_name,
}
})
await elastic.indices.update_aliases(actions)
def generate_index_name():
return '{}-{}'.format(ELASTIC_ALIAS, int(datetime.now().timestamp()))
|
Remove all previous aliases when initializing
|
Remove all previous aliases when initializing
|
Python
|
mit
|
despawnerer/theatrics,despawnerer/theatrics,despawnerer/theatrics
|
3dd205a9dad39abb12e7a05c178117545402c2e1
|
reinforcement-learning/train.py
|
reinforcement-learning/train.py
|
"""This is the agent which currently takes the action with proper q learning."""
import time
start = time.time()
from tqdm import tqdm
import env
import os
import rl
env.make("text")
episodes = 10000
import argparse
parser = argparse.ArgumentParser(description="Train agent on the falling game.")
parser.add_argument("--remove-file", help="Remove existing q table.", default=True)
parser.add_argument("--episodes", type=str, help="Number of episodes to train for.", default=10000)
args = parser.parse_args()
if args.remove_file == True:
os.remove("q-table.npy")
rl.load_q()
elif args.remove_file == "False":
rl.load_q()
else:
print("Invalid argument.")
quit()
episodes = int(args.episodes)
with tqdm(total=episodes) as pbar:
for episode in range(episodes):
env.reset()
episode_reward = 0
for t in range(100):
episode_reward += env.actual_reward
if env.done:
pbar.update(1)
break
action = rl.choose_action(rl.table[env.object[0]])
rl.q(env.player, action)
episode_reward += env.reward(action)
env.action(action)
env.update()
rl.save_q()
print("Q table:")
print(rl.table[env.object[0]])
|
"""This is the agent which currently takes the action with proper q learning."""
import time
start = time.time()
from tqdm import tqdm
import env
import os
import rl
env.make("text")
episodes = 10000
import argparse
parser = argparse.ArgumentParser(description="Train agent on the falling game.")
parser.add_argument("--remove-file", help="Remove existing q table.", default=True)
parser.add_argument("--episodes", type=str, help="Number of episodes to train for.", default=10000)
args = parser.parse_args()
if args.remove_file == True:
os.remove("q-table.npy")
rl.load_q()
elif args.remove_file == "False":
rl.load_q()
else:
print("Invalid argument.")
quit()
episodes = int(args.episodes)
with tqdm(total=episodes) as pbar:
for episode in range(episodes):
env.reset()
episode_reward = 0
for t in range(100):
episode_reward += env.actual_reward
if env.done:
pbar.update(1)
break
action = rl.choose_action(env.player, "train")
rl.q(env.player, action)
episode_reward += env.reward(action)
env.action(action)
env.update()
rl.save_q()
print("Q table:")
print(rl.table[env.object[0]])
|
Update to newest version of rl.py.
|
Update to newest version of rl.py.
|
Python
|
mit
|
danieloconell/Louis
|
f88c2135ddc197283bbfb8b481774deb613571cf
|
python/raindrops/raindrops.py
|
python/raindrops/raindrops.py
|
def raindrops(number):
if is_three_a_factor(number):
return "Pling"
return "{}".format(number)
def is_three_a_factor(number):
return number % 3 == 0
|
def raindrops(number):
if is_three_a_factor(number):
return "Pling"
if is_five_a_factor(number):
return "Plang"
return "{}".format(number)
def is_three_a_factor(number):
return number % 3 == 0
def is_five_a_factor(number):
return number % 5 == 0
|
Handle 5 as a factor
|
Handle 5 as a factor
|
Python
|
mit
|
rootulp/exercism,rootulp/exercism,rootulp/exercism,rootulp/exercism,rootulp/exercism,rootulp/exercism,rootulp/exercism,rootulp/exercism
|
2f10aa07422c8132218d8af336406629b336550c
|
docs/src/conf.py
|
docs/src/conf.py
|
# -*- coding: utf-8 -*-
import os
import stat
from os.path import join, abspath
from subprocess import call
def prepare(globs, locs):
# RTD defaults the current working directory to where conf.py resides.
# In our case, that means <root>/docs/src/.
cwd = os.getcwd()
root = abspath(join(cwd, '..', '..'))
os.chdir(root)
# Download the PHP binary & composer.phar if necessary
base = 'https://github.com/Erebot/Buildenv/releases/download/1.4.0'
for f in ('php', 'composer.phar'):
call(['curl', '-L', '-z', f, '-o', f, '%s/%s' % (base, f)])
# Make sure the PHP interpreter is executable
os.chmod('./php', stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
# Call composer to download/update dependencies as necessary
os.environ['COMPOSER_CACHE_DIR'] = './cache'
call(['./php', 'composer.phar', 'update', '-n', '--ignore-platform-reqs',
'--no-progress'], env=os.environ)
# Load the second-stage configuration file.
os.chdir(cwd)
conf = join(root, 'vendor', 'erebot', 'buildenv', 'sphinx', 'rtd.py')
print "Including the second configuration file (%s)..." % (conf, )
execfile(conf, globs, locs)
prepare(globals(), locals())
|
# -*- coding: utf-8 -*-
import os
import stat
from os.path import join, abspath
from subprocess import call
def prepare(globs, locs):
# RTD defaults the current working directory to where conf.py resides.
# In our case, that means <root>/docs/src/.
cwd = os.getcwd()
root = abspath(join(cwd, '..', '..'))
os.chdir(root)
# Download the PHP binary & composer.phar if necessary
base = 'https://github.com/Erebot/Buildenv/releases/download/1.4.0'
for f in ('php', 'composer.phar'):
call(['curl', '-L', '-z', f, '-o', f, '%s/%s' % (base, f)])
# Make sure the PHP interpreter is executable
os.chmod('./php', stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
# Call composer to download/update dependencies as necessary
os.environ['COMPOSER_CACHE_DIR'] = './cache'
call(['./php', 'composer.phar', 'update', '-n', '--ignore-platform-reqs',
'--no-progress'], env=os.environ)
# Load the second-stage configuration file.
os.chdir(cwd)
conf = join(root, 'vendor', 'erebot', 'buildenv', 'sphinx', 'rtd.py')
print "Including the second configuration file (%s)..." % (conf, )
exec(compile(open(conf).read(), conf, 'exec'), globs, locs)
prepare(globals(), locals())
|
Replace execfile with py3-compatible equivalent
|
Replace execfile with py3-compatible equivalent
|
Python
|
bsd-3-clause
|
fpoirotte/XRL
|
5e5a6a55d43bf66c7f71d054b92a66528bf2a571
|
driver/driver.py
|
driver/driver.py
|
from abc import ABCMeta, abstractmethod
class Driver(metaclass=ABCMeta):
@abstractmethod
def create(self):
pass
@abstractmethod
def resize(self, id, quota):
pass
@abstractmethod
def clone(self, id):
pass
@abstractmethod
def remove(self, id):
pass
@abstractmethod
def expose(self, id):
pass
|
from abc import ABCMeta, abstractmethod
class Driver(metaclass=ABCMeta):
@abstractmethod
def create(self, requirements):
pass
@abstractmethod
def _set_quota(self, id, quota):
pass
@abstractmethod
def resize(self, id, quota):
pass
@abstractmethod
def clone(self, id):
pass
@abstractmethod
def remove(self, id):
pass
@abstractmethod
def expose(self, id, host, permissions):
pass
|
Fix inconsistency in parameters with base class
|
Fix inconsistency in parameters with base class
|
Python
|
apache-2.0
|
PressLabs/cobalt,PressLabs/cobalt
|
1fe377ec1957d570a1dcc860c2bda415088bf6be
|
dwight_chroot/platform_utils.py
|
dwight_chroot/platform_utils.py
|
import os
import pwd
import subprocess
from .exceptions import CommandFailed
def get_user_shell():
return pwd.getpwuid(os.getuid()).pw_shell
def execute_command_assert_success(cmd, **kw):
returned = execute_command(cmd, **kw)
if returned.returncode != 0:
raise CommandFailed("Command {0!r} failed with exit code {1}".format(cmd, returned))
return returned
def execute_command(cmd, **kw):
returned = subprocess.Popen(cmd, shell=True, **kw)
returned.wait()
return returned
|
import os
import pwd
import subprocess
from .exceptions import CommandFailed
def get_user_shell():
return pwd.getpwuid(os.getuid()).pw_shell
def execute_command_assert_success(cmd, **kw):
returned = execute_command(cmd, **kw)
if returned.returncode != 0:
raise CommandFailed("Command {0!r} failed with exit code {1}".format(cmd, returned.returncode))
return returned
def execute_command(cmd, **kw):
returned = subprocess.Popen(cmd, shell=True, **kw)
returned.wait()
return returned
|
Fix execute_command_assert_success return code logging
|
Fix execute_command_assert_success return code logging
|
Python
|
bsd-3-clause
|
vmalloc/dwight,vmalloc/dwight,vmalloc/dwight
|
b1c5f75c266f5f5b9976ce2ca7c2b9065ef41bb1
|
groupmestats/generatestats.py
|
groupmestats/generatestats.py
|
import argparse
import webbrowser
from .groupserializer import GroupSerializer
from .statistic import all_statistics
from .statistics import *
def gstat_stats():
parser = argparse.ArgumentParser(description="Generates stats for a group")
parser.add_argument("-g", "--group", dest="group_name", required=True,
help="Group to generate stats for.")
parser.add_argument("-s", "--stat", dest="stats", default=[],
action="append",
help=("Name of stat to generate. This may be specified"
" more than once. Choices: %s "
% ", ".join(all_statistics.keys())))
parser.add_argument("--all-stats", action="store_true", default=False,
help="Generate all possible stats.")
parser.add_argument("--ignore-user", dest="ignore_users", default=[],
action="append",
help="User to ignore. May be specified more than once.")
args = parser.parse_args()
stats = [stat_class() for name, stat_class in all_statistics.items()
if args.all_stats or name in args.stats]
if not stats:
parser.print_help()
raise RuntimeError("Must specify a valid --stat or use --all-stats")
(group, messages) = GroupSerializer.load(args.group_name)
for stat in stats:
stat.calculate(group, messages, ignore_users=args.ignore_users)
for stat in stats:
output_html_filename = stat.show()
try:
# webbrowser.open_new_tab(output_html_filename)
pass
except:
pass
|
import argparse
import webbrowser
from .groupserializer import GroupSerializer
from .statistic import all_statistics
from .statistics import *
def gstat_stats():
parser = argparse.ArgumentParser(description="Generates stats for a group")
parser.add_argument("-g", "--group", dest="group_name", required=True,
help="Group to generate stats for.")
parser.add_argument("-s", "--stat", dest="stats", default=[],
action="append",
help=("Name of stat to generate. This may be specified"
" more than once. Choices: %s "
% ", ".join(all_statistics.keys())))
parser.add_argument("--all-stats", action="store_true", default=False,
help="Generate all possible stats.")
parser.add_argument("--ignore-user", dest="ignore_users", default=[],
action="append",
help="User to ignore. May be specified more than once.")
args = parser.parse_args()
stats = [stat_class() for name, stat_class in all_statistics.items()
if args.all_stats or name in args.stats]
print("args: %s" % str(args))
if not stats:
parser.print_help()
raise RuntimeError("Must specify a valid --stat or use --all-stats")
(group, messages) = GroupSerializer.load(args.group_name)
for stat in stats:
stat.calculate(group, messages, ignore_users=args.ignore_users)
for stat in stats:
output_html_filename = stat.show()
try:
# webbrowser.open_new_tab(output_html_filename)
pass
except:
pass
|
Print args when generating stats
|
Print args when generating stats
|
Python
|
mit
|
kjteske/groupmestats,kjteske/groupmestats
|
7b7f33439b16faeef67022374cf88ba9a275ce8a
|
flocker/filesystems/interfaces.py
|
flocker/filesystems/interfaces.py
|
# Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""
Interfaces that filesystem APIs need to expose.
"""
from __future__ import absolute_import
from zope.interface import Interface
class IFilesystemSnapshots(Interface):
"""
Support creating and listing snapshots of a specific filesystem.
"""
def create(name):
"""
Create a snapshot of the filesystem.
:param name: The name of the snapshot.
:type name: :py:class:`flocker.snapshots.SnapshotName`
:return: Deferred that fires on snapshot creation, or errbacks if
snapshotting failed. The Deferred should support cancellation
if at all possible.
"""
def list():
"""
Return all the filesystem's snapshots.
:return: Deferred that fires with a ``list`` of
:py:class:`flocker.snapshots.SnapshotName`. This will likely be
improved in later iterations.
"""
|
# Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""
Interfaces that filesystem APIs need to expose.
"""
from __future__ import absolute_import
from zope.interface import Interface
class IFilesystemSnapshots(Interface):
"""
Support creating and listing snapshots of a specific filesystem.
"""
def create(name):
"""
Create a snapshot of the filesystem.
:param name: The name of the snapshot.
:type name: :py:class:`flocker.snapshots.SnapshotName`
:return: Deferred that fires on snapshot creation, or errbacks if
snapshotting failed. The Deferred should support cancellation
if at all possible.
"""
def list():
"""
Return all the filesystem's snapshots.
:return: Deferred that fires with a ``list`` of
:py:class:`flocker.snapshots.SnapshotName`.
"""
|
Address review comment: Don't state the obvious.
|
Address review comment: Don't state the obvious.
|
Python
|
apache-2.0
|
LaynePeng/flocker,agonzalezro/flocker,AndyHuu/flocker,achanda/flocker,jml/flocker,AndyHuu/flocker,lukemarsden/flocker,mbrukman/flocker,runcom/flocker,Azulinho/flocker,jml/flocker,mbrukman/flocker,w4ngyi/flocker,hackday-profilers/flocker,achanda/flocker,lukemarsden/flocker,achanda/flocker,beni55/flocker,lukemarsden/flocker,beni55/flocker,Azulinho/flocker,mbrukman/flocker,hackday-profilers/flocker,moypray/flocker,AndyHuu/flocker,LaynePeng/flocker,moypray/flocker,adamtheturtle/flocker,w4ngyi/flocker,1d4Nf6/flocker,agonzalezro/flocker,Azulinho/flocker,wallnerryan/flocker-profiles,adamtheturtle/flocker,adamtheturtle/flocker,LaynePeng/flocker,beni55/flocker,moypray/flocker,runcom/flocker,jml/flocker,hackday-profilers/flocker,agonzalezro/flocker,runcom/flocker,wallnerryan/flocker-profiles,1d4Nf6/flocker,1d4Nf6/flocker,wallnerryan/flocker-profiles,w4ngyi/flocker
|
df51d042bf1958f48fc39f1f3870285c87491243
|
lemon/templatetags/main_menu.py
|
lemon/templatetags/main_menu.py
|
from django.template import Library, Variable
from django.template import TemplateSyntaxError, VariableDoesNotExist
from django.template.defaulttags import URLNode
from ..models import MenuItem
from ..settings import CONFIG
register = Library()
class MainMenuItemURLNode(URLNode):
def __init__(self, content_type):
self.content_type = Variable(content_type)
self.args = ()
self.kwargs = {}
self.asvar = False
self.legacy_view_name = True
def render(self, context):
try:
content_type = self.content_type.resolve(context)
opts = content_type.model_class()._meta
app_label = opts.app_label
module_name = opts.module_name
self.view_name = 'admin:%s_%s_changelist' % \
(app_label, module_name)
except VariableDoesNotExist:
return ''
return super(MainMenuItemURLNode, self).render(context)
@register.inclusion_tag('lemon/main_menu.html')
def main_menu():
queryset = MenuItem.objects.select_related('section', 'content_type')
queryset = queryset.order_by('section__position', 'position')
return {'menu_items': queryset, 'menu_links': CONFIG['MENU_LINKS']}
@register.tag
def main_menu_item_url(parser, token):
try:
tag_name, content_type = token.split_contents()
except ValueError:
raise TemplateSyntaxError(
'%r tag requiresa single argument' % token.contents.split()[0]
)
return MainMenuItemURLNode(content_type)
|
from django.core.urlresolvers import reverse, NoReverseMatch
from django.template import Library, Variable, Node
from django.template import TemplateSyntaxError, VariableDoesNotExist
from django.template.defaulttags import URLNode
from ..models import MenuItem
from ..settings import CONFIG
register = Library()
class MainMenuItemURLNode(Node):
def __init__(self, content_type):
self.content_type = Variable(content_type)
def render(self, context):
try:
content_type = self.content_type.resolve(context)
except VariableDoesNotExist:
return ''
opts = content_type.model_class()._meta
app_label = opts.app_label
module_name = opts.module_name
view_name = 'admin:%s_%s_changelist' % \
(app_label, module_name)
try:
return reverse(view_name)
except NoReverseMatch:
return ''
@register.inclusion_tag('lemon/main_menu.html')
def main_menu():
queryset = MenuItem.objects.select_related('section', 'content_type')
queryset = queryset.order_by('section__position', 'position')
return {'menu_items': queryset, 'menu_links': CONFIG['MENU_LINKS']}
@register.tag
def main_menu_item_url(parser, token):
try:
tag_name, content_type = token.split_contents()
except ValueError:
raise TemplateSyntaxError(
'%r tag requires a single argument' % token.contents.split()[0]
)
return MainMenuItemURLNode(content_type)
|
Fix main menu url reversing in admin
|
Fix main menu url reversing in admin
|
Python
|
bsd-3-clause
|
trilan/lemon,trilan/lemon,trilan/lemon
|
e548713f5192d125b1313fa955240965a1136de8
|
plugin/__init__.py
|
plugin/__init__.py
|
########
# Copyright (c) 2014 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
|
# -*- coding: utf-8 -*-
########
# Copyright (c) 2014 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
|
UPDATE init; add utf-8 encoding
|
UPDATE init; add utf-8 encoding
|
Python
|
apache-2.0
|
fastconnect/cloudify-azure-plugin
|
28786f30be37bb43a175262f96b618fc440d5ace
|
send-email.py
|
send-email.py
|
#!/usr/bin/env python3
import datetime
import os
import sys
import smtplib
from email.mime.text import MIMEText
def timeString():
return str(datetime.datetime.now())
if not os.path.exists('email-list'):
print(timeString(), ':\tERROR: email-list not found.', sep='')
quit(1)
if not os.path.exists('credentials'):
print(timeString(), ':\tERROR: credentials not found.', sep='')
quit(1)
with open('credentials', 'r') as _file:
_lines = [str(e).strip('\n') for e in _file]
server = _lines[0]
port = _lines[1]
username = _lines[2]
password = _lines[3]
with open('new-products.html', 'r') as _file:
_message = _file.read()
with open('email-list', 'r') as _file:
recipients = [e.strip('\n') for e in _file]
session=smtplib.SMTP(server, port)
session.ehlo()
session.starttls()
session.login(username, password)
for message_to in recipients:
msg = MIMEText(_message, 'html')
msg['To'] = message_to
msg['From'] = username
msg['Subject'] = 'ALERT: New Cymbals detected on mycymbal.com'
msg = msg.as_string()
session.sendmail(username, message_to, msg)
print(timeString(), ':\tEmailed ', message_to, sep='')
session.quit()
|
#!/usr/bin/env python3
import datetime
import os
import sys
import smtplib
from email.mime.text import MIMEText
def timeString():
return str(datetime.datetime.now())
if not os.path.exists('email-list'):
print(timeString(), ':\tERROR: email-list not found.', sep='')
quit(1)
if not os.path.exists('credentials'):
print(timeString(), ':\tERROR: credentials not found.', sep='')
quit(1)
with open('credentials', 'r') as _file:
_lines = [str(e).strip('\n') for e in _file]
server = _lines[0]
port = _lines[1]
username = _lines[2]
password = _lines[3]
with open('new-products.html', 'r') as _file:
_message = _file.read()
with open('email-list', 'r') as _file:
recipients = [e.strip('\n') for e in _file]
session=smtplib.SMTP(server, port)
session.ehlo()
session.starttls()
session.login(username, password)
for message_to in recipients:
msg = MIMEText(_message, 'html')
msg['To'] = message_to
msg['From'] = username
msg['Subject'] = 'MyCymbal Digest'
msg = msg.as_string()
session.sendmail(username, message_to, msg)
print(timeString(), ':\tEmailed ', message_to, sep='')
session.quit()
|
Change email subject. Not much of an ALERT if it happens every day.
|
Change email subject. Not much of an ALERT if it happens every day.
|
Python
|
unlicense
|
nerflad/mds-new-products,nerflad/mds-new-products,nerflad/mds-new-products
|
400c506627deca5d85454928254b1968e09dc33e
|
scrape.py
|
scrape.py
|
import scholarly
import requests
_EXACT_SEARCH = '/scholar?q="{}"'
_START_YEAR = '&as_ylo={}'
_END_YEAR = '&as_yhi={}'
def search(query, exact=True, start_year=None, end_year=None):
"""Search by scholar query and return a generator of Publication objects"""
url = _EXACT_SEARCH.format(requests.utils.quote(query))
if start_year:
url += _START_YEAR.format(start_year)
if end_year:
url += _END_YEAR.format(end_year)
soup = scholarly._get_soup(url)
return scholarly._search_scholar_soup(soup)
if __name__ == '__main__':
s = search("Cure Alzheimer's Fund", start_year=2015, end_year=2015)
num = 0
for x in s:
x.fill()
stuff = ['title', 'author', 'journal', 'volume', 'issue']
for thing in stuff:
if thing in x.bib:
print("{}: {}".format(thing, x.bib[thing]))
num += 1
print("Number of results:", num)
|
import re
import requests
import scholarly
_EXACT_SEARCH = '/scholar?q="{}"'
_START_YEAR = '&as_ylo={}'
_END_YEAR = '&as_yhi={}'
class Papers(object):
"""Wrapper around scholarly._search_scholar_soup that allows one to get the
number of papers found in the search with len()"""
def __init__(self, query, start_year=None, end_year=None):
url = _EXACT_SEARCH.format(requests.utils.quote(query))
if start_year:
url += _START_YEAR.format(start_year)
if end_year:
url += _END_YEAR.format(end_year)
soup = scholarly._get_soup(url)
results = soup.find('div', id='gs_ab_md').text
self.num = int(re.search(r'\d+ results', results).group(0).split()[0])
self.papers = scholarly._search_scholar_soup(soup)
def __len__(self):
return self.num
def __iter__(self):
return (paper.fill().bib for paper in self.papers)
def get_published_papers():
""" Returns a generator that returns dicts with paper metadata."""
return Papers("Cure Alzheimer's Fund", start_year=2015, end_year=2015)
def main():
papers = get_published_papers()
print("Number of results:", len(papers))
for paper in papers:
stuff = ['title', 'author', 'journal', 'volume', 'issue']
for thing in stuff:
if thing in paper:
print("{}: {}".format(thing, paper[thing]))
if __name__ == '__main__':
main()
|
Allow getting number of results found with len()
|
Allow getting number of results found with len()
|
Python
|
mit
|
Spferical/cure-alzheimers-fund-tracker,Spferical/cure-alzheimers-fund-tracker,Spferical/cure-alzheimers-fund-tracker
|
ce2ea43a9ca49caa50e26bc7d7e11ba97edea929
|
src/zeit/content/article/edit/browser/tests/test_header.py
|
src/zeit/content/article/edit/browser/tests/test_header.py
|
import zeit.content.article.edit.browser.testing
class HeaderModules(zeit.content.article.edit.browser.testing.EditorTestCase):
def test_can_create_module_by_drag_and_drop(self):
s = self.selenium
self.add_article()
block = 'quiz'
# copy&paste from self.create_block()
s.click('link=Struktur')
s.click('link=Header')
s.waitForElementPresent('css=#header-modules .module')
block_sel = '.block.type-{0}'.format(block)
count = s.getCssCount('css={0}'.format(block_sel))
s.dragAndDropToObject(
'css=#header-modules .module[cms\\:block_type={0}]'.format(block),
'css=#editable-header > .landing-zone', '10,10')
s.waitForCssCount('css={0}'.format(block_sel), count + 1)
|
import zeit.content.article.edit.browser.testing
class HeaderModules(zeit.content.article.edit.browser.testing.EditorTestCase):
def test_can_create_module_by_drag_and_drop(self):
s = self.selenium
self.add_article()
# Select header that allows header module
s.click('css=#edit-form-misc .edit-bar .fold-link')
s.select('id=options-template.template', 'Kolumne')
s.waitForVisible('css=.fieldname-header_layout')
s.select('id=options-template.header_layout', 'Standard')
s.type('id=options-template.header_layout', '\t')
s.pause(500)
block = 'quiz'
# copy&paste from self.create_block()
s.click('link=Struktur')
s.click('link=Header')
s.waitForElementPresent('css=#header-modules .module')
block_sel = '.block.type-{0}'.format(block)
count = s.getCssCount('css={0}'.format(block_sel))
s.dragAndDropToObject(
'css=#header-modules .module[cms\\:block_type={0}]'.format(block),
'css=#editable-header > .landing-zone', '10,10')
s.waitForCssCount('css={0}'.format(block_sel), count + 1)
|
Fix test, needs to select proper header for header-module to be enabled (belongs to commit:eb1b6fa)
|
ZON-3167: Fix test, needs to select proper header for header-module to be enabled (belongs to commit:eb1b6fa)
|
Python
|
bsd-3-clause
|
ZeitOnline/zeit.content.article,ZeitOnline/zeit.content.article,ZeitOnline/zeit.content.article
|
2e9cb250d58474354bdfff1edb4fc9e71ee95d60
|
lightbus/utilities/importing.py
|
lightbus/utilities/importing.py
|
import importlib
import logging
from typing import Sequence, Tuple, Callable
import pkg_resources
logger = logging.getLogger(__name__)
def import_module_from_string(name):
return importlib.import_module(name)
def import_from_string(name):
components = name.split(".")
mod = __import__(components[0])
for comp in components[1:]:
mod = getattr(mod, comp)
return mod
def load_entrypoint_classes(entrypoint_name) -> Sequence[Tuple[str, str, Callable]]:
"""Load classes specified in an entrypoint
Entrypoints are specified in setup.py, and Lightbus uses them to
discover plugins & transports.
"""
found_classes = []
for entrypoint in pkg_resources.iter_entry_points(entrypoint_name):
class_ = entrypoint.load()
found_classes.append((entrypoint.module_name, entrypoint.name, class_))
return found_classes
|
import importlib
import logging
import sys
from typing import Sequence, Tuple, Callable
import pkg_resources
logger = logging.getLogger(__name__)
def import_module_from_string(name):
if name in sys.modules:
return sys.modules[name]
else:
return importlib.import_module(name)
def import_from_string(name):
components = name.split(".")
mod = __import__(components[0])
for comp in components[1:]:
mod = getattr(mod, comp)
return mod
def load_entrypoint_classes(entrypoint_name) -> Sequence[Tuple[str, str, Callable]]:
"""Load classes specified in an entrypoint
Entrypoints are specified in setup.py, and Lightbus uses them to
discover plugins & transports.
"""
found_classes = []
for entrypoint in pkg_resources.iter_entry_points(entrypoint_name):
class_ = entrypoint.load()
found_classes.append((entrypoint.module_name, entrypoint.name, class_))
return found_classes
|
Fix to import_module_from_string() to prevent multiple imports
|
Fix to import_module_from_string() to prevent multiple imports
|
Python
|
apache-2.0
|
adamcharnock/lightbus
|
0c18e83248a752a3191da1d9c8369fafc2b61674
|
purepython/urls.py
|
purepython/urls.py
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'purepython.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
)
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
from fb.views import index
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', index),
url(r'^admin/', include(admin.site.urls)),
)
|
Add url to access the index view.
|
Add url to access the index view.
|
Python
|
apache-2.0
|
pure-python/brainmate
|
b74c56b3999800917946378f20288407347710e6
|
social/backends/gae.py
|
social/backends/gae.py
|
"""
Google App Engine support using User API
"""
from __future__ import absolute_import
from google.appengine.api import users
from social.backends.base import BaseAuth
from social.exceptions import AuthException
class GoogleAppEngineAuth(BaseAuth):
"""GoogleAppengine authentication backend"""
name = 'google-appengine'
def get_user_id(self, details, response):
"""Return current user id."""
user = users.get_current_user()
if user:
return user.user_id()
def get_user_details(self, response):
"""Return user basic information (id and email only)."""
user = users.get_current_user()
return {'username': user.user_id(),
'email': user.email(),
'fullname': '',
'first_name': '',
'last_name': ''}
def auth_url(self):
"""Build and return complete URL."""
return users.create_login_url(self.redirect_uri)
def auth_complete(self, *args, **kwargs):
"""Completes login process, must return user instance."""
if not users.get_current_user():
raise AuthException('Authentication error')
kwargs.update({'response': '', 'backend': self})
return self.strategy.authenticate(*args, **kwargs)
BACKENDS = {
'gae': GoogleAppEngineAuth
}
|
"""
Google App Engine support using User API
"""
from __future__ import absolute_import
from google.appengine.api import users
from social.backends.base import BaseAuth
from social.exceptions import AuthException
class GoogleAppEngineAuth(BaseAuth):
"""GoogleAppengine authentication backend"""
name = 'google-appengine'
def get_user_id(self, details, response):
"""Return current user id."""
user = users.get_current_user()
if user:
return user.user_id()
def get_user_details(self, response):
"""Return user basic information (id and email only)."""
user = users.get_current_user()
return {'username': user.user_id(),
'email': user.email(),
'fullname': '',
'first_name': '',
'last_name': ''}
def auth_url(self):
"""Build and return complete URL."""
return users.create_login_url(self.redirect_uri)
def auth_complete(self, *args, **kwargs):
"""Completes login process, must return user instance."""
if not users.get_current_user():
raise AuthException('Authentication error')
kwargs.update({'response': '', 'backend': self})
return self.strategy.authenticate(*args, **kwargs)
BACKENDS = {
'google-appengine': GoogleAppEngineAuth
}
|
Rename to be consistent with backend name
|
Rename to be consistent with backend name
|
Python
|
bsd-3-clause
|
ononeor12/python-social-auth,barseghyanartur/python-social-auth,tutumcloud/python-social-auth,webjunkie/python-social-auth,cmichal/python-social-auth,henocdz/python-social-auth,mchdks/python-social-auth,falcon1kr/python-social-auth,cmichal/python-social-auth,contracode/python-social-auth,SeanHayes/python-social-auth,VishvajitP/python-social-auth,robbiet480/python-social-auth,duoduo369/python-social-auth,lamby/python-social-auth,mrwags/python-social-auth,tkajtoch/python-social-auth,nirmalvp/python-social-auth,mchdks/python-social-auth,JerzySpendel/python-social-auth,henocdz/python-social-auth,rsteca/python-social-auth,joelstanner/python-social-auth,mark-adams/python-social-auth,frankier/python-social-auth,mathspace/python-social-auth,ByteInternet/python-social-auth,drxos/python-social-auth,mathspace/python-social-auth,msampathkumar/python-social-auth,yprez/python-social-auth,frankier/python-social-auth,yprez/python-social-auth,VishvajitP/python-social-auth,SeanHayes/python-social-auth,python-social-auth/social-app-django,firstjob/python-social-auth,ononeor12/python-social-auth,falcon1kr/python-social-auth,michael-borisov/python-social-auth,tkajtoch/python-social-auth,JJediny/python-social-auth,ariestiyansyah/python-social-auth,jameslittle/python-social-auth,clef/python-social-auth,chandolia/python-social-auth,bjorand/python-social-auth,S01780/python-social-auth,mrwags/python-social-auth,muhammad-ammar/python-social-auth,jneves/python-social-auth,daniula/python-social-auth,cmichal/python-social-auth,wildtetris/python-social-auth,jneves/python-social-auth,bjorand/python-social-auth,contracode/python-social-auth,ByteInternet/python-social-auth,cjltsod/python-social-auth,S01780/python-social-auth,barseghyanartur/python-social-auth,daniula/python-social-auth,lneoe/python-social-auth,muhammad-ammar/python-social-auth,Andygmb/python-social-auth,webjunkie/python-social-auth,muhammad-ammar/python-social-auth,DhiaEddineSaidi/python-social-auth,garrett-schlesinger/python-social-auth,python-social-auth/social-storage-sqlalchemy,JJediny/python-social-auth,jameslittle/python-social-auth,JerzySpendel/python-social-auth,fearlessspider/python-social-auth,tobias47n9e/social-core,ononeor12/python-social-auth,msampathkumar/python-social-auth,henocdz/python-social-auth,san-mate/python-social-auth,jneves/python-social-auth,daniula/python-social-auth,mark-adams/python-social-auth,garrett-schlesinger/python-social-auth,iruga090/python-social-auth,wildtetris/python-social-auth,MSOpenTech/python-social-auth,alrusdi/python-social-auth,merutak/python-social-auth,lamby/python-social-auth,ariestiyansyah/python-social-auth,lneoe/python-social-auth,VishvajitP/python-social-auth,degs098/python-social-auth,imsparsh/python-social-auth,nvbn/python-social-auth,rsteca/python-social-auth,degs098/python-social-auth,JJediny/python-social-auth,cjltsod/python-social-auth,rsalmaso/python-social-auth,michael-borisov/python-social-auth,tkajtoch/python-social-auth,barseghyanartur/python-social-auth,tutumcloud/python-social-auth,robbiet480/python-social-auth,noodle-learns-programming/python-social-auth,falcon1kr/python-social-auth,jameslittle/python-social-auth,jeyraof/python-social-auth,san-mate/python-social-auth,python-social-auth/social-app-django,rsteca/python-social-auth,lamby/python-social-auth,chandolia/python-social-auth,imsparsh/python-social-auth,jeyraof/python-social-auth,Andygmb/python-social-auth,iruga090/python-social-auth,iruga090/python-social-auth,noodle-learns-programming/python-social-auth,msampathkumar/python-social-auth,clef/python-social-auth,fearlessspider/python-social-auth,JerzySpendel/python-social-auth,hsr-ba-fs15-dat/python-social-auth,chandolia/python-social-auth,hsr-ba-fs15-dat/python-social-auth,DhiaEddineSaidi/python-social-auth,python-social-auth/social-core,rsalmaso/python-social-auth,robbiet480/python-social-auth,python-social-auth/social-docs,python-social-auth/social-app-cherrypy,drxos/python-social-auth,MSOpenTech/python-social-auth,firstjob/python-social-auth,contracode/python-social-auth,fearlessspider/python-social-auth,duoduo369/python-social-auth,alrusdi/python-social-auth,alrusdi/python-social-auth,mrwags/python-social-auth,mathspace/python-social-auth,hsr-ba-fs15-dat/python-social-auth,degs098/python-social-auth,python-social-auth/social-core,joelstanner/python-social-auth,webjunkie/python-social-auth,jeyraof/python-social-auth,san-mate/python-social-auth,clef/python-social-auth,Andygmb/python-social-auth,lawrence34/python-social-auth,drxos/python-social-auth,imsparsh/python-social-auth,michael-borisov/python-social-auth,noodle-learns-programming/python-social-auth,python-social-auth/social-app-django,lawrence34/python-social-auth,merutak/python-social-auth,DhiaEddineSaidi/python-social-auth,nirmalvp/python-social-auth,ariestiyansyah/python-social-auth,bjorand/python-social-auth,MSOpenTech/python-social-auth,yprez/python-social-auth,lawrence34/python-social-auth,ByteInternet/python-social-auth,nirmalvp/python-social-auth,nvbn/python-social-auth,lneoe/python-social-auth,S01780/python-social-auth,mark-adams/python-social-auth,firstjob/python-social-auth,joelstanner/python-social-auth,merutak/python-social-auth,mchdks/python-social-auth,wildtetris/python-social-auth
|
b78c457d52702beb5067eb7c3067cb69af5e935d
|
itunes/exceptions.py
|
itunes/exceptions.py
|
"""
exceptions.py
Copyright © 2015 Alex Danoff. All Rights Reserved.
2015-08-02
This file defines custom exceptions for the iTunes funcitonality.
"""
class ITunesError(Exception):
"""
Base exception class for iTunes interface.
"""
pass
class AppleScriptError(ITunesError):
"""
Represents an error received from AppleScript while running a script.
Parameters
----------
message : str
The message that the exception will hold.
script : str
The AppleScript that was running when this exception was raised (default
"").
Attributes
----------
script : str
The AppleScript that was running when this exception was raised, if one
was provided.
"""
def __init__(self, message, script=""):
super(AppleScriptError, self).__init__(message)
self.script = script
|
"""
exceptions.py
Copyright © 2015 Alex Danoff. All Rights Reserved.
2015-08-02
This file defines custom exceptions for the iTunes funcitonality.
"""
class ITunesError(Exception):
"""
Base exception class for iTunes interface.
"""
pass
class AppleScriptError(ITunesError):
"""
Represents an error received from AppleScript while running a script.
Parameters
----------
message : str
The message that the exception will hold.
script : str
The AppleScript that was running when this exception was raised (default
"").
Attributes
----------
script : str
The AppleScript that was running when this exception was raised, if one
was provided.
"""
def __init__(self, message, script=""):
super(AppleScriptError, self).__init__(message)
self.script = script
class TrackError(ITunesError):
"""
Represents an error in finding or playing a track.
Parameters
----------
message : str
The message that the exception will hold.
title : str
The title of the track that caused the error (default "").
Attributes
----------
title : str
The title of the track that caused the error.
"""
def __init__(self, message, title=""):
super(TrackError, self).__init__(message)
self.title = title
|
Add custom exception for track-related errors
|
Add custom exception for track-related errors
The new exception type (`TrackError`) will be used when a track cannot
be played or found.
|
Python
|
mit
|
adanoff/iTunesTUI
|
b339c25068e849dbbf769f22893125b15325eb66
|
figgypy/utils.py
|
figgypy/utils.py
|
import os
def env_or_default(var, default=None):
"""Get environment variable or provide default.
Args:
var (str): environment variable to search for
default (optional(str)): default to return
"""
if var in os.environ:
return os.environ[var]
return default
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from future.utils import bytes_to_native_str as n
from base64 import b64encode
import os
import boto3
def env_or_default(var, default=None):
"""Get environment variable or provide default.
Args:
var (str): environment variable to search for
default (optional(str)): default to return
"""
if var in os.environ:
return os.environ[var]
return default
def kms_encrypt(value, key, aws_config=None):
"""Encrypt and value with KMS key.
Args:
value (str): value to encrypt
key (str): key id or alias
aws_config (optional[dict]): aws credentials
dict of arguments passed into boto3 session
example:
aws_creds = {'aws_access_key_id': aws_access_key_id,
'aws_secret_access_key': aws_secret_access_key,
'region_name': 'us-east-1'}
Returns:
str: encrypted cipher text
"""
aws_config = aws_config or {}
aws = boto3.session.Session(**aws_config)
client = aws.client('kms')
enc_res = client.encrypt(KeyId=key,
Plaintext=value)
return n(b64encode(enc_res['CiphertextBlob']))
|
Add new helper function to encrypt for KMS
|
Add new helper function to encrypt for KMS
|
Python
|
mit
|
theherk/figgypy
|
0f4ca12e524be7cbd82ac79e81a62015b47ca6ef
|
openfisca_core/tests/formula_helpers.py
|
openfisca_core/tests/formula_helpers.py
|
# -*- coding: utf-8 -*-
import numpy
from nose.tools import raises
from openfisca_core.formula_helpers import apply_threshold as apply_threshold
from openfisca_core.tools import assert_near
@raises(AssertionError)
def test_apply_threshold_with_too_many_thresholds():
input = numpy.array([10])
thresholds = [5]
outputs = [10]
return apply_threshold(input, thresholds, outputs)
@raises(AssertionError)
def test_apply_threshold_with_too_few_thresholds():
input = numpy.array([10])
thresholds = [5]
outputs = [10, 15, 20]
return apply_threshold(input, thresholds, outputs)
def test_apply_threshold():
input = numpy.array([4, 5, 6, 7, 8])
thresholds = [5, 7]
outputs = [10, 15, 20]
result = apply_threshold(input, thresholds, outputs)
assert_near(result, [10, 10, 15, 15, 20])
|
# -*- coding: utf-8 -*-
import numpy
from nose.tools import raises
from openfisca_core.formula_helpers import apply_threshold as apply_threshold
from openfisca_core.tools import assert_near
@raises(AssertionError)
def test_apply_threshold_with_too_many_thresholds():
input = numpy.array([10])
thresholds = [5]
outputs = [10]
return apply_threshold(input, thresholds, outputs)
@raises(AssertionError)
def test_apply_threshold_with_too_few_thresholds():
input = numpy.array([10])
thresholds = [5]
outputs = [10, 15, 20]
return apply_threshold(input, thresholds, outputs)
def test_apply_threshold():
input = numpy.array([4, 5, 6, 7, 8])
thresholds = [5, 7]
outputs = [10, 15, 20]
result = apply_threshold(input, thresholds, outputs)
assert_near(result, [10, 10, 15, 15, 20])
def test_apply_threshold_with_variable_threshold():
input = numpy.array([1000, 1000, 1000])
thresholds = [numpy.array([500, 1500, 1000])] # Only one thresold, but varies with the person
outputs = [True, False] # True if input <= threshold, false otherwise
result = apply_threshold(input, thresholds, outputs)
assert_near(result, [False, True, True])
|
Add more tricky case test for apply_threshold
|
Add more tricky case test for apply_threshold
|
Python
|
agpl-3.0
|
benjello/openfisca-core,openfisca/openfisca-core,benjello/openfisca-core,sgmap/openfisca-core,openfisca/openfisca-core
|
342e6134a63c5b575ae8e4348a54f61350bca2da
|
parser/crimeparser/pipelinesEnricher.py
|
parser/crimeparser/pipelinesEnricher.py
|
from geopy import Nominatim
from geopy.extra.rate_limiter import RateLimiter
class GeoCodePipeline(object):
def open_spider(self, spider):
geolocator = Nominatim(timeout=5)
self.__geocodeFunc = RateLimiter(geolocator.geocode, min_delay_seconds=2)
def process_item(self, item, spider):
for crime in item["crimes"]:
place = crime["place"]
latitude, longitude = self.__geocode_address(place)
crime["latitude"] = latitude
crime["longitude"] = longitude
return item
def __geocode_address(self, place):
if place is None:
return None, None
location = self.__geocodeFunc(place)
if location is not None:
return location.latitude, location.longitude
else:
return None, None
|
from geopy import Nominatim, Photon
from geopy.extra.rate_limiter import RateLimiter
class GeoCodePipeline(object):
def open_spider(self, spider):
geolocator = Photon(timeout=5)
self.__geocodeFunc = RateLimiter(geolocator.geocode, min_delay_seconds=2)
def process_item(self, item, spider):
for crime in item["crimes"]:
place = crime["place"]
latitude, longitude = self.__geocode_address(place)
crime["latitude"] = latitude
crime["longitude"] = longitude
return item
def __geocode_address(self, place):
if place is None:
return None, None
location = self.__geocodeFunc(place)
if location is not None:
return location.latitude, location.longitude
else:
return None, None
|
Use Phonon instead of Nominatim for geo coding
|
Use Phonon instead of Nominatim for geo coding
Phonon is more fault tolerant to spelling mistakes.
|
Python
|
mit
|
aberklotz/crimereport,aberklotz/crimereport,aberklotz/crimereport
|
5398a864449db0a1d6ec106ddb839fff3b6afcda
|
mopidy_frontpanel/frontend.py
|
mopidy_frontpanel/frontend.py
|
from __future__ import unicode_literals
import logging
from mopidy.core import CoreListener
import pykka
import .menu import BrowseMenu
import .painter import Painter
logger = logging.getLogger(__name__)
class FrontPanel(pykka.ThreadingActor, CoreListener):
def __init__(self, config, core):
super(FrontPanel, self).__init__()
self.core = core
self.painter = Painter(core, self)
self.menu = BrowseMenu(core)
def on_start(self):
self.painter.start()
def handleInput(self, input):
self.menu.handleInput(input)
self.painter.update()
def track_playback_started(self, tl_track):
self.painter.update()
def track_playback_ended(self, tl_track, time_position):
self.painter.update()
|
from __future__ import unicode_literals
import logging
from mopidy.core import CoreListener
import pykka
import .menu import BrowseMenu
import .painter import Painter
logger = logging.getLogger(__name__)
class FrontPanel(pykka.ThreadingActor, CoreListener):
def __init__(self, config, core):
super(FrontPanel, self).__init__()
self.core = core
self.painter = Painter(core, self)
self.menu = BrowseMenu(core)
def on_start(self):
self.painter.start()
def handleInput(self, input):
if (input == "play"):
pass
elif (input == "pause"):
pass
elif (input == "stop"):
pass
elif (input == "vol_up"):
pass
elif (input == "vol_down"):
pass
else:
self.menu.handleInput(input)
self.painter.update()
def track_playback_started(self, tl_track):
self.painter.update()
def track_playback_ended(self, tl_track, time_position):
self.painter.update()
|
Handle playback changes in FrontPanel
|
Handle playback changes in FrontPanel
|
Python
|
apache-2.0
|
nick-bulleid/mopidy-frontpanel
|
76b916c6f53d97b4658c16a85f10302e75794bcd
|
kitsune/upload/storage.py
|
kitsune/upload/storage.py
|
import hashlib
import itertools
import os
import time
from django.conf import settings
from django.core.files.storage import FileSystemStorage
from storages.backends.s3boto3 import S3Boto3Storage
DjangoStorage = S3Boto3Storage if settings.AWS_ACCESS_KEY_ID else FileSystemStorage
class RenameFileStorage(DjangoStorage):
"""Subclass Django's file system storage to add our file naming
conventions."""
def get_available_name(self, name):
dir_name, file_name = os.path.split(name)
file_root, file_ext = os.path.splitext(file_name)
# Set file_root to something we like: clean and all ascii
md5_sub = hashlib.md5(file_root.encode('utf8')).hexdigest()[0:6]
file_root = time.strftime('%Y-%m-%d-%H-%M-%S-',
time.localtime()) + md5_sub
name = os.path.join(dir_name, file_root + file_ext)
# If the filename already exists, add an underscore and a number
# (before the file extension, if one exists) to the filename until
# the generated filename doesn't exist.
count = itertools.count(1)
while self.exists(name):
# file_ext includes the dot.
name = os.path.join(dir_name, "%s_%s%s" %
(file_root, count.next(), file_ext))
return name
|
import hashlib
import itertools
import os
import time
from django.conf import settings
from django.core.files.storage import FileSystemStorage
from storages.backends.s3boto3 import S3Boto3Storage
DjangoStorage = S3Boto3Storage if settings.AWS_ACCESS_KEY_ID else FileSystemStorage
class RenameFileStorage(DjangoStorage):
"""Subclass Django's file system storage to add our file naming
conventions."""
def get_available_name(self, name, max_length=None):
dir_name, file_name = os.path.split(name)
file_root, file_ext = os.path.splitext(file_name)
# Set file_root to something we like: clean and all ascii
md5_sub = hashlib.md5(file_root.encode('utf8')).hexdigest()[0:6]
file_root = time.strftime('%Y-%m-%d-%H-%M-%S-',
time.localtime()) + md5_sub
name = os.path.join(dir_name, file_root + file_ext)
# If the filename already exists, add an underscore and a number
# (before the file extension, if one exists) to the filename until
# the generated filename doesn't exist.
count = itertools.count(1)
while self.exists(name):
# file_ext includes the dot.
name = os.path.join(dir_name, "%s_%s%s" %
(file_root, count.next(), file_ext))
return name
|
Update RenameFileStorage method to be 1.11 compatible
|
Update RenameFileStorage method to be 1.11 compatible
|
Python
|
bsd-3-clause
|
mozilla/kitsune,anushbmx/kitsune,mozilla/kitsune,anushbmx/kitsune,anushbmx/kitsune,mozilla/kitsune,mozilla/kitsune,anushbmx/kitsune
|
669325d6ca93f81c4635d7d3d57120d8e23e5251
|
organizations/backends/forms.py
|
organizations/backends/forms.py
|
from django import forms
from django.contrib.auth.models import User
class InvitationRegistrationForm(forms.ModelForm):
first_name = forms.CharField(max_length=30)
last_name = forms.CharField(max_length=30)
password = forms.CharField(max_length=30, widget=forms.PasswordInput)
password_confirm = forms.CharField(max_length=30,
widget=forms.PasswordInput)
class Meta:
model = User
|
from django import forms
from django.contrib.auth.models import User
class InvitationRegistrationForm(forms.ModelForm):
first_name = forms.CharField(max_length=30)
last_name = forms.CharField(max_length=30)
password = forms.CharField(max_length=30, widget=forms.PasswordInput)
password_confirm = forms.CharField(max_length=30,
widget=forms.PasswordInput)
def __init__(self, *args, **kwargs):
super(InvitationRegistrationForm, self).__init__(*args, **kwargs)
self.initial['username'] = ''
class Meta:
model = User
exclude = ('is_staff', 'is_superuser', 'is_active', 'last_login',
'date_joined', 'groups', 'user_permissions')
|
Hide all unnecessary user info
|
Hide all unnecessary user info
Excludes all User fields save for useranme, first/last name, email, and
password. Also clears the username of its default data.
|
Python
|
bsd-2-clause
|
aptivate/django-organizations,arteria/django-ar-organizations,GauthamGoli/django-organizations,aptivate/django-organizations,DESHRAJ/django-organizations,DESHRAJ/django-organizations,GauthamGoli/django-organizations,st8st8/django-organizations,bennylope/django-organizations,arteria/django-ar-organizations,aptivate/django-organizations,st8st8/django-organizations,bennylope/django-organizations
|
16fdad8ce40a539d732c8def4898aae0f2d58cd0
|
foxybot/registrar.py
|
foxybot/registrar.py
|
class CommandRegistrar():
"""A singleton to manage the command table and command execution"""
_instance = None
def __init__(self):
self.command_table = {}
@staticmethod
def instance():
"""Get the singleton, create an instance if needed"""
if not CommandRegistrar._instance:
CommandRegistrar._instance = CommandRegistrar()
return CommandRegistrar._instance
@staticmethod
async def execute_command(shards, shard, msg):
# !roll 100 -> 'roll'
instance = CommandRegistrar.instance()
command = msg.content[1:].split(' ')[0].lower()
if command in instance.command_table.keys():
await instance.command_table[command].execute(shards, shard, msg)
@property
def loaded_commands(self):
return [command.name for command in set(self.command_table.values())]
@property
def loaded_aliases(self):
return list(self.command_table.keys())
|
class CommandRegistrar():
"""A singleton to manage the command table and command execution"""
_instance = None
def __init__(self):
self.command_table = {}
@staticmethod
def instance():
"""Get the singleton, create an instance if needed"""
if not CommandRegistrar._instance:
CommandRegistrar._instance = CommandRegistrar()
return CommandRegistrar._instance
@staticmethod
async def execute_command(shards, shard, msg):
# !roll 100 -> 'roll'
instance = CommandRegistrar.instance()
command = msg.content[1:].split(' ')[0].lower()
if command in instance.command_table.keys():
await instance.command_table[command].execute(shards, shard, msg)
@property
def commands(self):
return self.command_table
@property
def loaded_commands(self):
return [command.name for command in set(self.command_table.values())]
@property
def loaded_aliases(self):
return list(self.command_table.keys())
|
Add `commands` property to `CommandManager` to allow retrieving the command table
|
Add `commands` property to `CommandManager` to allow retrieving the command table
|
Python
|
bsd-2-clause
|
6180/foxybot
|
5bb0259a747651290f91c0384ca93492a423c82d
|
IPython/utils/docs.py
|
IPython/utils/docs.py
|
# encoding: utf-8
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
import os
GENERATING_DOCUMENTATION = os.environ.get("IN_SPHINX_RUN", None) == "True"
|
import os
GENERATING_DOCUMENTATION = os.environ.get("IN_SPHINX_RUN", None) == "True"
|
Remove outdated header as suggested
|
Remove outdated header as suggested
Co-authored-by: Matthias Bussonnier <bc6ce7c050ee90e1f3b70c16cec57d4205e63b6c@gmail.com>
|
Python
|
bsd-3-clause
|
ipython/ipython,ipython/ipython
|
bc50a924c50fb22a0ac03b3b696d6fba4efcd120
|
src/main.py
|
src/main.py
|
#!/usr/bin/env python2
import sys
from direct.showbase.ShowBase import ShowBase
import panda3d.core as p3d
import ecs
from player import PlayerController
class NodePathComponent(ecs.Component):
__slots__ = [
"nodepath",
]
def __init__(self, modelpath=None):
if modelpath is not None:
self.nodepath = base.loader.loadModel(modelpath)
else:
self.nodepath = p3d.NodePath(p3d.PandaNode('node'))
class Sigurd(ShowBase):
def __init__(self):
ShowBase.__init__(self)
self.disableMouse()
self.ecsmanager = ecs.ECSManager()
def run_ecs(task):
self.ecsmanager.update(0)
task.cont
self.taskMgr.add(run_ecs, 'ECS')
level = ecs.Entity()
np_component = NodePathComponent('models/level')
np_component.nodepath.reparent_to(base.render)
self.ecsmanager.add_entity(level)
PlayerController(self.camera)
self.camLens.setFov(90)
self.accept('escape-up', sys.exit)
if __name__ == '__main__':
app = Sigurd()
app.run()
|
#!/usr/bin/env python2
import math
import sys
from direct.showbase.ShowBase import ShowBase
import panda3d.core as p3d
import ecs
from player import PlayerController
class NodePathComponent(ecs.Component):
__slots__ = [
"nodepath",
]
def __init__(self, modelpath=None):
if modelpath is not None:
self.nodepath = base.loader.loadModel(modelpath)
else:
self.nodepath = p3d.NodePath(p3d.PandaNode('node'))
class Sigurd(ShowBase):
def __init__(self):
ShowBase.__init__(self)
self.disableMouse()
self.ecsmanager = ecs.ECSManager()
def run_ecs(task):
self.ecsmanager.update(0)
task.cont
self.taskMgr.add(run_ecs, 'ECS')
level = ecs.Entity()
np_component = NodePathComponent('models/level')
np_component.nodepath.reparent_to(base.render)
self.ecsmanager.add_entity(level)
PlayerController(self.camera)
self.accept('escape-up', sys.exit)
self.accept('aspectRatioChanged', self.cb_resize)
def cb_resize(self):
vfov = 70
aspect = self.camLens.get_aspect_ratio()
hfov = math.degrees(2 * math.atan(math.tan(math.radians(vfov)/2.0) * aspect))
print(hfov)
self.camLens.setFov(hfov, vfov)
if __name__ == '__main__':
app = Sigurd()
app.run()
|
Change fov scaling to "Hor+".
|
Change fov scaling to "Hor+".
|
Python
|
apache-2.0
|
Moguri/sigurd
|
6d2d915d7bec4e4a8e733a073ec3dc79a1d06812
|
src/stop.py
|
src/stop.py
|
import os
import json
from flask import Flask
from flask import request
from flask import json
import services
app = Flask(__name__)
digitransitAPIService = services.DigitransitAPIService()
@app.route('/')
def hello_world():
return 'Hello World!'
@app.route('/test')
def digitransit_test():
return json.dumps(digitransitAPIService.get_stops(60.203978, 24.9633573))
@app.route('/stops', methods=['GET'])
def stops():
lat = float(request.args.get('lat'))
lon = float(request.args.get('lon'))
result = digitransitAPIService.get_stops(lat, lon)
print(result)
return json.dumps(result)
if __name__ == '__main__':
app.run(host='0.0.0.0', port=os.getenv('PORT', '5000'))
|
import os
import json
from flask import Flask
from flask import make_response
from flask import request
from flask import json
import services
app = Flask(__name__)
digitransitAPIService = services.DigitransitAPIService()
@app.route('/')
def hello_world():
return 'Hello World!'
@app.route('/test')
def digitransit_test():
return json.dumps(digitransitAPIService.get_stops(60.203978, 24.9633573))
@app.route('/stops', methods=['GET'])
def stops():
lat = float(request.args.get('lat'))
lon = float(request.args.get('lon'))
result = digitransitAPIService.get_stops(lat, lon)
resp = make_response(json.dumps(result))
resp.mimetype = 'application/json'
return resp
if __name__ == '__main__':
app.run(host='0.0.0.0', port=os.getenv('PORT', '5000'))
|
Set response content type of a json response to application/json
|
Set response content type of a json response to application/json
|
Python
|
mit
|
STOP2/stop2.0-backend,STOP2/stop2.0-backend
|
7cfdf48bd04ba45a962901e1778ba05bab4699e6
|
readthedocs/core/migrations/0005_migrate-old-passwords.py
|
readthedocs/core/migrations/0005_migrate-old-passwords.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2018-10-11 17:28
from __future__ import unicode_literals
from django.db import migrations
def forwards_func(apps, schema_editor):
User = apps.get_model('auth', 'User')
old_password_patterns = (
'sha1$',
# RTD's production database doesn't have any of these
# but they are included for completeness
'md5$',
'crypt$',
)
for pattern in old_password_patterns:
users = User.objects.filter(password__startswith=pattern)
for user in users:
user.set_unusable_password()
user.save()
class Migration(migrations.Migration):
dependencies = [
('core', '0004_ad-opt-out'),
('auth', '0008_alter_user_username_max_length'),
]
operations = [
migrations.RunPython(forwards_func),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2018-10-11 17:28
from __future__ import unicode_literals
from django.db import migrations
from django.contrib.auth.hashers import make_password
def forwards_func(apps, schema_editor):
User = apps.get_model('auth', 'User')
old_password_patterns = (
'sha1$',
# RTD's production database doesn't have any of these
# but they are included for completeness
'md5$',
'crypt$',
)
for pattern in old_password_patterns:
users = User.objects.filter(password__startswith=pattern)
for user in users:
user.password = make_password(None)
user.save()
class Migration(migrations.Migration):
dependencies = [
('core', '0004_ad-opt-out'),
('auth', '0008_alter_user_username_max_length'),
]
operations = [
migrations.RunPython(forwards_func),
]
|
Migrate old passwords without "set_unusable_password"
|
Migrate old passwords without "set_unusable_password"
|
Python
|
mit
|
rtfd/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org
|
673f4ad22ccd14f9feb68cfc3afc1f34580c0a51
|
test/teeminus10_helpers_test.py
|
test/teeminus10_helpers_test.py
|
from teeminus10_helpers import *
import unittest
class TestInTimeOfDay(unittest.TestCase):
def setUp(self):
self.location = ephem.city('London')
self.location.date = datetime(2013, 03, 14, 9, 0, 0)
self.pass_day_time = datetime(2013, 03, 14, 12, 0, 0)
self.pass_night_time = datetime(2013, 03, 14, 0, 0, 0)
self.sun = ephem.Sun("2013/03/14")
def test_pass_in_whatever_time(self):
self.assertTrue(in_time_of_day(self.location, self.pass_day_time, "whatever"))
self.assertTrue(in_time_of_day(self.location, self.pass_night_time, "whatever"))
def test_pass_in_day_time(self):
self.assertTrue(in_time_of_day(self.location, self.pass_day_time, "day"))
self.assertFalse(in_time_of_day(self.location, self.pass_night_time, "day"))
def test_pass_in_night_time(self):
self.assertFalse(in_time_of_day(self.location, self.pass_day_time, "night"))
self.assertTrue(in_time_of_day(self.location, self.pass_night_time, "night"))
|
from teeminus10_helpers import *
import unittest
class TestInTimeOfDay(unittest.TestCase):
def setUp(self):
self.location = ephem.city('London')
self.location.date = datetime(2013, 03, 14, 9, 0, 0)
self.pass_day_time = datetime(2013, 03, 14, 12, 0, 0)
self.pass_night_time = datetime(2013, 03, 14, 0, 0, 0)
def test_pass_in_whatever_time(self):
self.assertTrue(in_time_of_day(self.location, self.pass_day_time, "whatever"))
self.assertTrue(in_time_of_day(self.location, self.pass_night_time, "whatever"))
def test_pass_in_day_time(self):
self.assertTrue(in_time_of_day(self.location, self.pass_day_time, "day"))
self.assertFalse(in_time_of_day(self.location, self.pass_night_time, "day"))
def test_pass_in_night_time(self):
self.assertFalse(in_time_of_day(self.location, self.pass_day_time, "night"))
self.assertTrue(in_time_of_day(self.location, self.pass_night_time, "night"))
|
Remove sun setup for now
|
Remove sun setup for now
|
Python
|
mit
|
jpgneves/t-10_server,jpgneves/t-10_server
|
848c3a8b754d7a359da94c211f58d16bdf34c804
|
fabfile.py
|
fabfile.py
|
# -*- coding: utf-8 -*-
u"""
.. module:: fabfile
Be aware, that becaus fabric doesn't support py3k You need to execute this
particular script using Python 2.
"""
import contextlib
from fabric.api import cd
from fabric.api import env
from fabric.api import prefix
from fabric.api import run
env.user = 'root'
env.hosts = ['wysadzulice.pl']
env.forward_agent = True
def update():
u"""Function defining all steps required to properly update application."""
with contextlib.nested(
cd('/var/www/wysadzulice_pl'),
prefix('workon wysadzulice_pl')
):
run('git pull')
run('git checkout master')
run('python manage.py migrate --traceback')
run('service apache2 restart')
|
# -*- coding: utf-8 -*-
u"""
.. module:: fabfile
Be aware, that becaus fabric doesn't support py3k You need to execute this
particular script using Python 2.
"""
import contextlib
from fabric.api import cd
from fabric.api import env
from fabric.api import prefix
from fabric.api import run
env.user = 'root'
env.hosts = ['wysadzulice.pl']
env.forward_agent = True
def update():
u"""Function defining all steps required to properly update application."""
with contextlib.nested(
cd('/var/www/wysadzulice_pl'),
prefix('workon wysadzulice_pl')
):
run('git pull')
run('git checkout master')
run('python manage.py migrate --traceback')
run('npm cache clear')
run('rm -rf ./node_modules')
run('npm install')
run('gulp build')
run('service apache2 restart')
|
Add js tasks to fabric update
|
Add js tasks to fabric update
|
Python
|
mit
|
komitywa/wysadzulice.pl,magul/wysadzulice.pl,magul/wysadzulice.pl,magul/wysadzulice.pl,komitywa/wysadzulice.pl,komitywa/wysadzulice.pl
|
db4ecaba64a4fbd9d432b461ca0df5b63dd11fb4
|
marathon_acme/cli.py
|
marathon_acme/cli.py
|
import argparse
import sys
def main(raw_args=sys.argv[1:]):
"""
A tool to automatically request, renew and distribute Let's Encrypt
certificates for apps running on Marathon and served by marathon-lb.
"""
parser = argparse.ArgumentParser(
description='Automatically manage ACME certificates for Marathon apps')
parser.add_argument('-a', '--acme',
help='The address for the ACME Directory Resource '
'(default: %(default)s)',
default=(
'https://acme-v01.api.letsencrypt.org/directory'))
parser.add_argument('-m', '--marathon', nargs='+',
help='The address for the Marathon HTTP API (default: '
'%(default)s)',
default='http://marathon.mesos:8080')
parser.add_argument('-l', '--lb', nargs='+',
help='The address for the marathon-lb HTTP API '
'(default: %(default)s)',
default='http://marathon-lb.marathon.mesos:9090')
parser.add_argument('storage-dir',
help='Path to directory for storing certificates')
args = parser.parse_args(raw_args) # noqa
if __name__ == '__main__':
main()
|
import argparse
import sys
def main(raw_args=sys.argv[1:]):
"""
A tool to automatically request, renew and distribute Let's Encrypt
certificates for apps running on Marathon and served by marathon-lb.
"""
parser = argparse.ArgumentParser(
description='Automatically manage ACME certificates for Marathon apps')
parser.add_argument('-a', '--acme',
help='The address for the ACME Directory Resource '
'(default: %(default)s)',
default=(
'https://acme-v01.api.letsencrypt.org/directory'))
parser.add_argument('-m', '--marathon', nargs='+',
help='The address for the Marathon HTTP API (default: '
'%(default)s)',
default='http://marathon.mesos:8080')
parser.add_argument('-l', '--lb', nargs='+',
help='The address for the marathon-lb HTTP API '
'(default: %(default)s)',
default='http://marathon-lb.marathon.mesos:9090')
parser.add_argument('-g', '--group',
help='The marathon-lb group to issue certificates for '
'(default: %(default)s)',
default='external')
parser.add_argument('storage-dir',
help='Path to directory for storing certificates')
args = parser.parse_args(raw_args) # noqa
if __name__ == '__main__':
main()
|
Add --group option to CLI
|
Add --group option to CLI
|
Python
|
mit
|
praekeltfoundation/certbot,praekeltfoundation/certbot
|
316533b3d0864c3cf3dba7ae7a3a83e30a02f33a
|
scrape-10k.py
|
scrape-10k.py
|
import csv
import time
import requests
import lxml.html
top10k = {}
for page_index in range(1, 201):
print('Requesting page {}'.format(page_index))
url = 'https://osu.ppy.sh/p/pp/'
payload = {
'm': 0, # osu! standard gamemode
'o': 1, # descending order
'page': page_index,
}
page = requests.get(url, params=payload)
tree = lxml.html.document_fromstring(page.text)
print('Processing page {}'.format(page_index))
rows = tree.cssselect('tr a')
for row in rows:
user_name = row.text
user_id = row.attrib['href'][3:]
top10k[user_id] = user_name
print(user_name, user_id)
time.sleep(1) # Be nice and slow down
with open('names.csv', 'a', newline='') as csvfile:
writer = csv.writer(csvfile, delimiter=',')
for user_id, user_name in top10k.items():
writer.writerow([user_id, user_name])
|
import csv
import time
import collections
import requests
import lxml.html
top10k = collections.OrderedDict()
for page_index in range(1, 201):
print('Requesting page {}'.format(page_index))
url = 'https://osu.ppy.sh/p/pp/'
payload = {
'm': 0, # osu! standard gamemode
'o': 1, # descending order
'page': page_index,
}
page = requests.get(url, params=payload)
tree = lxml.html.document_fromstring(page.text)
print('Processing page {}'.format(page_index))
rows = tree.cssselect('tr a')
for row in rows:
user_name = row.text
user_id = row.attrib['href'][3:]
top10k[user_id] = user_name
print(user_name, user_id)
time.sleep(1) # Be nice and slow down
with open('names.csv', 'a', newline='') as csvfile:
writer = csv.writer(csvfile, delimiter=',')
for user_id, user_name in top10k.items():
writer.writerow([user_id, user_name])
|
Maintain top 10k order when writing into file
|
Maintain top 10k order when writing into file
|
Python
|
mit
|
Cyanogenoid/osu-modspecific-rank
|
89b9bb45b17d457f6cf158330dfde6fe00e78cf4
|
core/storage/statistics/models.py
|
core/storage/statistics/models.py
|
# coding: utf-8
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Dummy model file to keep django happy."""
__author__ = 'Tarashish Mishra'
from core.storage.statistics import django_models
StateCounterModel = django_models.StateCounterModel
StateFeedbackFromReaderModel = django_models.StateFeedbackFromReaderModel
StateRuleAnswerLogModel = django_models.StateRuleAnswerLogModel
|
# coding: utf-8
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Dummy model file to keep django happy."""
__author__ = 'Tarashish Mishra'
from core.storage.statistics import django_models
StateCounterModel = django_models.StateCounterModel
StateRuleAnswerLogModel = django_models.StateRuleAnswerLogModel
FeedbackItemModel = django_models.FeedbackItemModel
|
Fix omission in previous commit.
|
Fix omission in previous commit.
|
Python
|
apache-2.0
|
leandrotoledo/oppia,rackstar17/oppia,zgchizi/oppia-uc,nagyistoce/oppia,kennho/oppia,MAKOSCAFEE/oppia,raju249/oppia,hazmatzo/oppia,Atlas-Sailed-Co/oppia,Atlas-Sailed-Co/oppia,mit0110/oppia,brylie/oppia,nagyistoce/oppia,BenHenning/oppia,dippatel1994/oppia,VictoriaRoux/oppia,CMDann/oppia,kennho/oppia,cleophasmashiri/oppia,aldeka/oppia,sunu/oh-missions-oppia-beta,sbhowmik89/oppia,sbhowmik89/oppia,michaelWagner/oppia,raju249/oppia,wangsai/oppia,prasanna08/oppia,sarahfo/oppia,sanyaade-teachings/oppia,anthkris/oppia,BenHenning/oppia,aldeka/oppia,google-code-export/oppia,oppia/oppia,rackstar17/oppia,whygee/oppia,sdulal/oppia,sdulal/oppia,hazmatzo/oppia,virajprabhu/oppia,MaximLich/oppia,kingctan/oppia,Cgruppo/oppia,kevinlee12/oppia,miyucy/oppia,AllanYangZhou/oppia,michaelWagner/oppia,CMDann/oppia,kingctan/oppia,wangsai/oppia,DewarM/oppia,sanyaade-teachings/oppia,terrameijar/oppia,asandyz/oppia,souravbadami/oppia,asandyz/oppia,oppia/oppia,miyucy/oppia,aldeka/oppia,shaz13/oppia,wangsai/oppia,openhatch/oh-missions-oppia-beta,souravbadami/oppia,virajprabhu/oppia,virajprabhu/oppia,Cgruppo/oppia,miyucy/oppia,dippatel1994/oppia,felipecocco/oppia,kaffeel/oppia,bjvoth/oppia,won0089/oppia,whygee/oppia,jestapinski/oppia,nagyistoce/oppia,kevinlee12/oppia,Atlas-Sailed-Co/oppia,sunu/oh-missions-oppia-beta,amgowano/oppia,BenHenning/oppia,dippatel1994/oppia,oulan/oppia,sanyaade-teachings/oppia,kaffeel/oppia,anggorodewanto/oppia,zgchizi/oppia-uc,rackstar17/oppia,brianrodri/oppia,sdulal/oppia,shaz13/oppia,oppia/oppia,whygee/oppia,gale320/oppia,dippatel1994/oppia,MAKOSCAFEE/oppia,Atlas-Sailed-Co/oppia,won0089/oppia,toooooper/oppia,CMDann/oppia,kingctan/oppia,danieljjh/oppia,danieljjh/oppia,VictoriaRoux/oppia,oulan/oppia,oulan/oppia,won0089/oppia,himanshu-dixit/oppia,mindpin/mindpin_oppia,himanshu-dixit/oppia,toooooper/oppia,cleophasmashiri/oppia,MaximLich/oppia,kevinlee12/oppia,won0089/oppia,fernandopinhati/oppia,brylie/oppia,MaximLich/oppia,mit0110/oppia,leandrotoledo/oppia,shaz13/oppia,MAKOSCAFEE/oppia,asandyz/oppia,CMDann/oppia,hazmatzo/oppia,DewarM/oppia,toooooper/oppia,directorlive/oppia,kennho/oppia,Dev4X/oppia,mindpin/mindpin_oppia,google-code-export/oppia,brylie/oppia,miyucy/oppia,wangsai/oppia,sarahfo/oppia,mit0110/oppia,hazmatzo/oppia,danieljjh/oppia,anggorodewanto/oppia,mindpin/mindpin_oppia,kingctan/oppia,google-code-export/oppia,anggorodewanto/oppia,virajprabhu/oppia,hazmatzo/oppia,zgchizi/oppia-uc,cleophasmashiri/oppia,gale320/oppia,infinyte/oppia,won0089/oppia,Cgruppo/oppia,kennho/oppia,openhatch/oh-missions-oppia-beta,Cgruppo/oppia,himanshu-dixit/oppia,leandrotoledo/oppia,jestapinski/oppia,felipecocco/oppia,Dev4X/oppia,AllanYangZhou/oppia,sdulal/oppia,amitdeutsch/oppia,kevinlee12/oppia,aldeka/oppia,mit0110/oppia,Cgruppo/oppia,cleophasmashiri/oppia,MaximLich/oppia,Atlas-Sailed-Co/oppia,souravbadami/oppia,jestapinski/oppia,google-code-export/oppia,DewarM/oppia,edallison/oppia,fernandopinhati/oppia,infinyte/oppia,prasanna08/oppia,leandrotoledo/oppia,felipecocco/oppia,directorlive/oppia,sunu/oppia,sunu/oppia,raju249/oppia,michaelWagner/oppia,kingctan/oppia,amitdeutsch/oppia,prasanna08/oppia,sbhowmik89/oppia,VictoriaRoux/oppia,sarahfo/oppia,rackstar17/oppia,sanyaade-teachings/oppia,bjvoth/oppia,AllanYangZhou/oppia,sunu/oh-missions-oppia-beta,gale320/oppia,sunu/oppia,gale320/oppia,directorlive/oppia,himanshu-dixit/oppia,openhatch/oh-missions-oppia-beta,infinyte/oppia,google-code-export/oppia,dippatel1994/oppia,brylie/oppia,sunu/oppia,danieljjh/oppia,sarahfo/oppia,anthkris/oppia,directorlive/oppia,CMDann/oppia,souravbadami/oppia,amgowano/oppia,BenHenning/oppia,brianrodri/oppia,edallison/oppia,terrameijar/oppia,wangsai/oppia,bjvoth/oppia,amitdeutsch/oppia,prasanna08/oppia,asandyz/oppia,leandrotoledo/oppia,michaelWagner/oppia,VictoriaRoux/oppia,oppia/oppia,VictoriaRoux/oppia,kevinlee12/oppia,amitdeutsch/oppia,amitdeutsch/oppia,sarahfo/oppia,sdulal/oppia,michaelWagner/oppia,asandyz/oppia,zgchizi/oppia-uc,DewarM/oppia,toooooper/oppia,mit0110/oppia,whygee/oppia,Dev4X/oppia,kaffeel/oppia,anggorodewanto/oppia,fernandopinhati/oppia,bjvoth/oppia,sbhowmik89/oppia,danieljjh/oppia,oulan/oppia,nagyistoce/oppia,whygee/oppia,openhatch/oh-missions-oppia-beta,bjvoth/oppia,BenHenning/oppia,amgowano/oppia,directorlive/oppia,edallison/oppia,toooooper/oppia,anthkris/oppia,fernandopinhati/oppia,terrameijar/oppia,oppia/oppia,souravbadami/oppia,brylie/oppia,kennho/oppia,AllanYangZhou/oppia,DewarM/oppia,sbhowmik89/oppia,amgowano/oppia,virajprabhu/oppia,sunu/oh-missions-oppia-beta,sunu/oppia,infinyte/oppia,edallison/oppia,prasanna08/oppia,infinyte/oppia,shaz13/oppia,raju249/oppia,jestapinski/oppia,Dev4X/oppia,sanyaade-teachings/oppia,brianrodri/oppia,nagyistoce/oppia,cleophasmashiri/oppia,Dev4X/oppia,brianrodri/oppia,felipecocco/oppia,fernandopinhati/oppia,mindpin/mindpin_oppia,kaffeel/oppia,felipecocco/oppia,terrameijar/oppia,anthkris/oppia,kaffeel/oppia,MAKOSCAFEE/oppia,gale320/oppia,oulan/oppia,brianrodri/oppia
|
d68935dfb34f7c5fc463f94e49f0c060717b17b8
|
cmsplugin_contact_plus/checks.py
|
cmsplugin_contact_plus/checks.py
|
# -*- coding: utf-8 -*-
from django.core.checks import Warning, register
def warn_1_3_changes(app_configs, **kwargs):
return [
Warning(
'cmsplugin-contact-plus >= 1.3 has renamed the "input" field. Do not forget to migrate your '
'database and update your templates',
hint=None,
obj=None,
id='cmsplugin_contact_plus.W001',
)
]
def register_checks():
for check in [
warn_1_3_changes,
]:
register(check)
|
# -*- coding: utf-8 -*-
from django.core.checks import Warning, register
def warn_1_3_changes(app_configs, **kwargs):
return [
Warning(
'cmsplugin-contact-plus >= 1.3 has renamed the "input" field. Do not forget to migrate your '
'database and update your templates',
hint=None,
obj=None,
id='cmsplugin_contact_plus.W001',
)
]
def register_checks():
for check in [
# warn_1_3_changes, # Might be more annoying than useful
]:
register(check)
|
Comment out warning for renamed field
|
Comment out warning for renamed field
|
Python
|
bsd-3-clause
|
arteria/cmsplugin-contact-plus,arteria/cmsplugin-contact-plus,worthwhile/cmsplugin-remote-form,worthwhile/cmsplugin-remote-form
|
f0f66aa917d9ec85cfbe2a0460b2d4b4d5ffe0eb
|
middleware/hat_manager.py
|
middleware/hat_manager.py
|
class HatManager(object):
def __init__(self, sense):
self.sense = sense
self._pressure = self.sense.get_pressure()
self._temperature = self.sense.get_temperature()
self._humidity = self.sense.get_humidity()
def refresh_state(self):
self._pressure = self.sense.get_pressure()
self._temperature = self.sense.get_temperature()
self._humidity = self.sense.get_humidity()
@property
def get_humidity(self):
return self._humidity
@property
def get_temperature(self):
return self._temperature
@property
def get_pressure(self):
return self._pressure
|
class HatManager(object):
def __init__(self, sense):
self.sense = sense
self._pressure = self.sense.get_pressure()
self._temperature = self.sense.get_temperature()
self._humidity = self.sense.get_humidity()
def refresh_state(self):
self._pressure = self.sense.get_pressure()
self._temperature = self.sense.get_temperature()
self._humidity = self.sense.get_humidity()
@property
def get_humidity(self):
return self._humidity
@property
def get_temperature(self):
return self._temperature
@property
def get_pressure(self):
return self._pressure
def set_message(self, msg):
self.sense.show_message(msg, scroll_speed=0.05)
|
Add a method to print a message on the sense hat
|
Add a method to print a message on the sense hat
|
Python
|
mit
|
ylerjen/pir-hat,ylerjen/pir-hat,ylerjen/pir-hat
|
60173acbecf1239872411b2ca0dd9eb75b543843
|
tests/sentry/web/frontend/test_organization_stats.py
|
tests/sentry/web/frontend/test_organization_stats.py
|
from __future__ import absolute_import
from django.core.urlresolvers import reverse
from sentry.testutils import TestCase, PermissionTestCase
class OrganizationStatsPermissionTest(PermissionTestCase):
def setUp(self):
super(OrganizationStatsPermissionTest, self).setUp()
self.path = reverse('sentry-organization-stats', args=[self.organization.slug])
def test_teamless_admin_cannot_load(self):
self.assert_teamless_admin_cannot_access(self.path)
def test_org_member_cannot_load(self):
self.assert_org_member_cannot_access(self.path)
def test_org_admin_can_load(self):
self.assert_org_admin_can_access(self.path)
class OrganizationStatsTest(TestCase):
def test_renders_with_context(self):
organization = self.create_organization(name='foo', owner=self.user)
team_1 = self.create_team(name='foo', organization=organization)
team_2 = self.create_team(name='bar', organization=organization)
path = reverse('sentry-organization-stats', args=[organization.slug])
self.login_as(self.user)
resp = self.client.get(path)
assert resp.status_code == 200
self.assertTemplateUsed(resp, 'sentry/organization-stats.html')
assert resp.context['organization'] == organization
|
from __future__ import absolute_import
from django.core.urlresolvers import reverse
from sentry.testutils import TestCase, PermissionTestCase
class OrganizationStatsPermissionTest(PermissionTestCase):
def setUp(self):
super(OrganizationStatsPermissionTest, self).setUp()
self.path = reverse('sentry-organization-stats', args=[self.organization.slug])
def test_teamless_admin_cannot_load(self):
self.assert_teamless_admin_cannot_access(self.path)
def test_org_member_can_load(self):
self.assert_org_member_can_access(self.path)
class OrganizationStatsTest(TestCase):
def test_renders_with_context(self):
organization = self.create_organization(name='foo', owner=self.user)
team_1 = self.create_team(name='foo', organization=organization)
team_2 = self.create_team(name='bar', organization=organization)
path = reverse('sentry-organization-stats', args=[organization.slug])
self.login_as(self.user)
resp = self.client.get(path)
assert resp.status_code == 200
self.assertTemplateUsed(resp, 'sentry/organization-stats.html')
assert resp.context['organization'] == organization
|
Correct permission tests for organization stats
|
Correct permission tests for organization stats
|
Python
|
bsd-3-clause
|
looker/sentry,alexm92/sentry,gg7/sentry,zenefits/sentry,vperron/sentry,ifduyue/sentry,imankulov/sentry,JamesMura/sentry,daevaorn/sentry,mitsuhiko/sentry,JackDanger/sentry,ewdurbin/sentry,BuildingLink/sentry,daevaorn/sentry,kevinlondon/sentry,songyi199111/sentry,TedaLIEz/sentry,kevinlondon/sentry,wujuguang/sentry,mitsuhiko/sentry,argonemyth/sentry,hongliang5623/sentry,ifduyue/sentry,kevinlondon/sentry,JamesMura/sentry,nicholasserra/sentry,boneyao/sentry,ewdurbin/sentry,TedaLIEz/sentry,vperron/sentry,looker/sentry,ifduyue/sentry,1tush/sentry,BuildingLink/sentry,Kryz/sentry,kevinastone/sentry,gencer/sentry,mvaled/sentry,looker/sentry,boneyao/sentry,jean/sentry,JTCunning/sentry,wong2/sentry,songyi199111/sentry,ngonzalvez/sentry,imankulov/sentry,felixbuenemann/sentry,TedaLIEz/sentry,jean/sentry,JackDanger/sentry,jean/sentry,jean/sentry,ngonzalvez/sentry,BuildingLink/sentry,zenefits/sentry,ewdurbin/sentry,daevaorn/sentry,fuziontech/sentry,JackDanger/sentry,argonemyth/sentry,hongliang5623/sentry,mvaled/sentry,JTCunning/sentry,nicholasserra/sentry,ifduyue/sentry,fotinakis/sentry,korealerts1/sentry,boneyao/sentry,kevinastone/sentry,Natim/sentry,beeftornado/sentry,drcapulet/sentry,gg7/sentry,gencer/sentry,llonchj/sentry,Kryz/sentry,drcapulet/sentry,llonchj/sentry,BayanGroup/sentry,korealerts1/sentry,fotinakis/sentry,vperron/sentry,BayanGroup/sentry,fuziontech/sentry,looker/sentry,drcapulet/sentry,felixbuenemann/sentry,fotinakis/sentry,wong2/sentry,zenefits/sentry,beeftornado/sentry,mvaled/sentry,Natim/sentry,beeftornado/sentry,Kryz/sentry,imankulov/sentry,pauloschilling/sentry,BuildingLink/sentry,gencer/sentry,mvaled/sentry,fuziontech/sentry,alexm92/sentry,Natim/sentry,1tush/sentry,kevinastone/sentry,korealerts1/sentry,JamesMura/sentry,BuildingLink/sentry,ngonzalvez/sentry,pauloschilling/sentry,songyi199111/sentry,wong2/sentry,JamesMura/sentry,zenefits/sentry,mvaled/sentry,wujuguang/sentry,fotinakis/sentry,gencer/sentry,hongliang5623/sentry,gencer/sentry,daevaorn/sentry,pauloschilling/sentry,nicholasserra/sentry,BayanGroup/sentry,jean/sentry,wujuguang/sentry,1tush/sentry,mvaled/sentry,llonchj/sentry,alexm92/sentry,JamesMura/sentry,argonemyth/sentry,zenefits/sentry,gg7/sentry,ifduyue/sentry,felixbuenemann/sentry,looker/sentry,JTCunning/sentry
|
54e5ee0cb6df1f47a1a6edd114c65ad62fd0c517
|
node/floor_divide.py
|
node/floor_divide.py
|
#!/usr/bin/env python
from nodes import Node
class FloorDiv(Node):
char = "f"
args = 2
results = 1
@Node.test_func([3,2], [1])
@Node.test_func([6,-3], [-2])
def func(self, a:Node.number,b:Node.number):
"""a/b. Rounds down, returns an int."""
return a//b
@Node.test_func(["test", "e"], [["t", "e", "st"]])
def partition(self, string:str, sep:str):
"""Split the string at the first occurrence of sep,
return a 3-list containing the part before the separator,
the separator itself, and the part after the separator.
If the separator is not found,
return a 3-list containing the string itself,
followed by two empty strings."""
return [list(string.partition(sep))]
|
#!/usr/bin/env python
from nodes import Node
class FloorDiv(Node):
char = "f"
args = 2
results = 1
@Node.test_func([3,2], [1])
@Node.test_func([6,-3], [-2])
def func(self, a:Node.number,b:Node.number):
"""a/b. Rounds down, returns an int."""
return a//b
@Node.test_func(["test", "e"], [["t", "e", "st"]])
def partition(self, string:str, sep:str):
"""Split the string at the first occurrence of sep,
return a 3-list containing the part before the separator,
the separator itself, and the part after the separator.
If the separator is not found,
return a 3-list containing the string itself,
followed by two empty strings."""
return [list(string.partition(sep))]
@Node.test_func(["134", 1], [["134"]])
@Node.test_func(["1234", 2], [["12", "34"]])
@Node.test_func(["1234", 3], [["1", "2", "34"]])
@Node.test_func([[4,8,15,16,23,42], 5], [[[4],[8],[15],[16],[23,42]]])
def chunk(self, inp:Node.indexable, num:Node.number):
"""Return inp seperated into num groups"""
rtn = []
last = 0
size = len(inp)//num
for i in range(size, len(inp), size):
rtn.append(inp[last:i])
last = i
if len(rtn) != num:
rtn.append(inp[last:])
else:
rtn[-1] += inp[last:]
if len(rtn):
if isinstance(inp, str):
rtn[-1] = "".join(rtn[-1])
else:
rtn[-1] = type(inp)(rtn[-1])
return [rtn]
|
Add a group chunk, chunks a list into N groups
|
Add a group chunk, chunks a list into N groups
|
Python
|
mit
|
muddyfish/PYKE,muddyfish/PYKE
|
80737e5de2ca3f0f039c9d4fbbf3df4ac8b59193
|
run.py
|
run.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import twitter_rss
import time
import subprocess
import config
# Launch web server
p = subprocess.Popen(['/usr/bin/python2', config.INSTALL_DIR + 'server.py'])
# Update the feeds
try:
while 1:
print 'Updating ALL THE FEEDS!'
try:
with open(config.XML_DIR + 'user/user.txt', 'r') as usernames:
for user in usernames:
twitter_rss.UserTweetGetter(user)
usernames.close()
with open(config.XML_DIR + 'htag/htag.txt', 'r') as hashtags:
for htag in hashtags:
twitter_rss.HashtagTweetGetter(user)
hashtags.close()
except IOError:
print 'File could not be read'
time.sleep(config.TIMER)
except (KeyboardInterrupt, SystemExit):
p.kill() # kill the subprocess
print '\nKeyboardInterrupt catched -- Finishing program.'
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import twitter_rss
import time
import subprocess
import config
import sys
# Launch web server
p = subprocess.Popen([sys.executable, config.INSTALL_DIR + 'server.py'])
# Update the feeds
try:
while 1:
print 'Updating ALL THE FEEDS!'
try:
with open(config.XML_DIR + 'user/user.txt', 'r') as usernames:
for user in usernames:
twitter_rss.UserTweetGetter(user)
usernames.close()
with open(config.XML_DIR + 'htag/htag.txt', 'r') as hashtags:
for htag in hashtags:
twitter_rss.HashtagTweetGetter(user)
hashtags.close()
except IOError:
print 'File could not be read'
time.sleep(config.TIMER)
except (KeyboardInterrupt, SystemExit):
p.kill() # kill the subprocess
print '\nKeyboardInterrupt catched -- Finishing program.'
|
Use sys.executable instead of harcoded python path
|
Use sys.executable instead of harcoded python path
Fixes issue when running in a virtualenv and in non-standard python
installations.
|
Python
|
mit
|
Astalaseven/twitter-rss,Astalaseven/twitter-rss
|
d7d1e2937c9f09189aad713db1f5ee5d2d6a64bd
|
run.py
|
run.py
|
# -*- coding: utf-8 -*-
"""
This script generates all the relevant figures from the experiment.
"""
from Modules.processing import *
from Modules.plotting import *
set_sns()
save = True
savetype = ".eps"
show = True
def main():
plot_perf_curves(save=save, savetype=savetype)
plot_perf_curves(subplots=False, save=save, savetype=savetype)
plot_perf_re_dep(save=save, savetype=savetype, errorbars=False,
dual_xaxes=True)
PerfCurve(1.0).plotcp(save=save, savetype=savetype, show=False)
wm = WakeMap()
wm.plot_meancontquiv(save=save, savetype=savetype)
wm.plot_k(save=save, savetype=savetype)
plot_no_blades_all(save=save, savetype=savetype)
plot_cp_covers(save=save, savetype=savetype, add_strut_torque=False)
plot_cp_covers(save=save, savetype=savetype, add_strut_torque=True)
if show:
plt.show()
if __name__ == "__main__":
if not os.path.isdir("Figures"):
os.mkdir("Figures")
main()
|
# -*- coding: utf-8 -*-
"""
This script generates all the relevant figures from the experiment.
"""
from Modules.processing import *
from Modules.plotting import *
set_sns()
save = True
savetype = ".eps"
show = True
def main():
plot_perf_curves(save=save, savetype=savetype)
plot_perf_curves(subplots=False, save=save, savetype=savetype)
plot_perf_re_dep(save=save, savetype=savetype, errorbars=False,
dual_xaxes=True)
PerfCurve(1.0).plotcp(save=save, savetype=savetype, show=False)
wm = WakeMap()
wm.plot_meancontquiv(save=save, savetype=savetype)
wm.plot_k(save=save, savetype=savetype)
wm.make_K_bar_graph(save=save, savetype=savetype)
plot_no_blades_all(save=save, savetype=savetype)
plot_cp_covers(save=save, savetype=savetype, add_strut_torque=False)
plot_cp_covers(save=save, savetype=savetype, add_strut_torque=True)
if show:
plt.show()
if __name__ == "__main__":
if not os.path.isdir("Figures"):
os.mkdir("Figures")
main()
|
Make K transport bar graph
|
Make K transport bar graph
|
Python
|
mit
|
UNH-CORE/RM2-tow-tank
|
5f14b7217f81b6d7653f94065d1a3305204cf83b
|
ddcz/templatetags/creations.py
|
ddcz/templatetags/creations.py
|
from django import template
from django.contrib.staticfiles.storage import staticfiles_storage
from ..creations import RATING_DESCRIPTIONS
register = template.Library()
@register.inclusion_tag('creations/rating.html')
def creation_rating(rating, skin):
return {
'rating_description': RATING_DESCRIPTIONS[round(rating)],
'rating': range(rating),
'skin': skin,
'skin_rating_star_url': staticfiles_storage.url("skins/%s/img/rating-star.gif" % skin),
}
|
from django import template
from django.contrib.staticfiles.storage import staticfiles_storage
from ..creations import RATING_DESCRIPTIONS
register = template.Library()
@register.inclusion_tag('creations/rating.html')
def creation_rating(rating, skin):
return {
'rating_description': "Hodnocení: %s" % RATING_DESCRIPTIONS[round(rating)],
'rating': range(rating),
'skin': skin,
'skin_rating_star_url': staticfiles_storage.url("skins/%s/img/rating-star.gif" % skin),
}
|
Add explicit rating word to rating alt
|
Add explicit rating word to rating alt
|
Python
|
mit
|
dracidoupe/graveyard,dracidoupe/graveyard,dracidoupe/graveyard,dracidoupe/graveyard
|
6b59d17aa06741f40bb99dde6c10950de3a142e6
|
utils/load.py
|
utils/load.py
|
#!/usr/local/bin/python
from website import carts
import urllib2
import json
def load():
carts.remove_all()
host = 'http://data.cityofnewyork.us/resource/xfyi-uyt5.json'
for i in range(0, 7000, 1000):
query = 'permit_type_description=MOBILE+FOOD+UNIT&$offset=%d' % i
request = host + '?' + query
data = urllib2.urlopen(request)
results = json.loads(data.read())
data.close()
required_keys = ['longitude_wgs84', 'latitude_wgs84', 'street', 'address', 'zip_code', 'borough', 'license_permit_holder']
for r in results:
for k in required_keys:
if not r.has_key(k):
r[k] = ''
carts.insert(lat=r['latitude_wgs84'], lng=r['longitude_wgs84'],
address=r['address'] + ' ' + r['street'],
zip_code=r['zip_code'], borough=r['borough'],
name=r['license_permit_holder'])
out = [c for c in carts.find()]
print len(out)
|
#!/usr/local/bin/python
from website import carts
import urllib2
import json
def load():
carts.remove_all()
request = 'http://data.cityofnewyork.us/resource/akqf-qv4n.json'
for i in range(0, 24000, 1000):
query = '?$offset=%d' % i
data = urllib2.urlopen(request + query)
results = json.loads(data.read())
data.close()
required_keys = ['license_permit_holder', 'license_permit_holder_name',
'license_permit_number', 'permit_issuance_date',
'permit_expiration_date', 'longitude_wgs84', 'latitude_wgs84',
'zip_code', 'borough']
for r in results:
for k in required_keys:
if not r.has_key(k):
r[k] = ''
carts.insert(name=r['license_permit_holder'],
owner=r['license_permit_holder_name'],
permit_number=r['license_permit_number'],
issuance=r['permit_issuance_date'],
expiration=r['permit_expiration_date'],
loc=[ float(r['longitude_wgs84']),
float(r['latitude_wgs84']) ],
zip_code=r['zip_code'], borough=r['borough'])
out = [c for c in carts.find()]
print len(out)
|
Change cart structure and url endpoint for getting cart data
|
Change cart structure and url endpoint for getting cart data
|
Python
|
bsd-3-clause
|
stuycs-softdev-fall-2013/proj3-7-cartwheels,stuycs-softdev-fall-2013/proj3-7-cartwheels
|
de1c2842d7f07025f23e9b12efc7dd52e4d0efbf
|
device_notifications/tests/model_tests.py
|
device_notifications/tests/model_tests.py
|
from mock import patch
from django.test.testcases import TestCase
from device_notifications import settings
from device_notifications.models import AbstractBaseDevice
from device_notifications.models import InvalidDeviceType
class ConcreteTestDevice(AbstractBaseDevice):
pass
@patch.object(settings, 'get_device_model', return_value=ConcreteTestDevice)
class AbstractBaseDeviceTests(TestCase):
@patch('device_notifications.models.gcm_send_message_task')
def test_send_message(self, gcm_send_message_task):
device = ConcreteTestDevice(
pk=1,
device_type='android')
message = 'Hello World'
device.send_message(message)
gcm_send_message_task.apply_async.assert_called_with(
args=[device.pk, message])
@patch('device_notifications.models.gcm_send_message_task')
def test_send_message_bad_device_type(self, gcm_send_message_task):
device = ConcreteTestDevice(
pk=1,
device_type='windows_phone')
self.assertRaises(InvalidDeviceType, device.send_message, 'Hi')
|
from mock import patch
from django.test.testcases import TestCase
from device_notifications import settings
from device_notifications.models import AbstractBaseDevice
from device_notifications.models import InvalidDeviceType
class ConcreteTestDevice(AbstractBaseDevice):
pass
class AbstractBaseDeviceTests(TestCase):
def setUp(self):
self.get_device_model_patcher = patch.object(
settings,
'get_device_model',
return_value=ConcreteTestDevice)
self.get_device_model_patcher.start()
super(AbstractBaseDeviceTests, self).setUp()
def tearDown(self):
super(AbstractBaseDeviceTests, self).tearDown()
self.get_device_model_patcher.stop()
@patch('device_notifications.models.gcm_send_message_task')
def test_send_message(self, gcm_send_message_task):
device = ConcreteTestDevice(
pk=1,
device_type='android')
message = 'Hello World'
device.send_message(message)
gcm_send_message_task.apply_async.assert_called_with(
args=[device.pk, message])
@patch('device_notifications.models.gcm_send_message_task')
def test_send_message_bad_device_type(self, gcm_send_message_task):
device = ConcreteTestDevice(
pk=1,
device_type='windows_phone')
self.assertRaises(InvalidDeviceType, device.send_message, 'Hi')
|
Patch get_device_model in the setUp and tearDown methods so that we don't send the mock object to each test method.
|
Patch get_device_model in the setUp and tearDown methods so that we don't send the mock object to each test method.
|
Python
|
bsd-3-clause
|
roverdotcom/django-device-notifications
|
b6dcb4029d3bf4b402a6874c942c9e4a105f2a62
|
tracker_project/tracker_project/urls.py
|
tracker_project/tracker_project/urls.py
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns(
'',
url(r'^admin/', include(admin.site.urls)),
url(r'^accounts/', include('django.contrib.auth.urls')),
url(r'^logout/$', 'django.contrib.auth.views.logout', {'next_page': '/'}, name='logout'),
url(r'^', 'tracker_project.views.home', name='home')
)
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.core.urlresolvers import reverse_lazy
urlpatterns = patterns(
'',
url(r'^$', 'tracker_project.views.home', name='home'),
url(r'^admin/', include(admin.site.urls)),
url(r'^accounts/', include('django.contrib.auth.urls')),
url(
r'^logout/$',
'django.contrib.auth.views.logout',
{'next_page': reverse_lazy('home')},
name='logout'
),
url(r'^tracker/', include('tracker.urls', 'tracker')),
)
|
Fix login and logout URLs
|
Fix login and logout URLs
|
Python
|
mit
|
abarto/tracker_project,abarto/tracker_project,abarto/tracker_project,vivek8943/tracker_project,vivek8943/tracker_project,vivek8943/tracker_project
|
02140561a29a2b7fe50f7bf2402da566e60be641
|
bluebottle/organizations/serializers.py
|
bluebottle/organizations/serializers.py
|
from rest_framework import serializers
from bluebottle.organizations.models import Organization
from bluebottle.utils.serializers import URLField
class OrganizationSerializer(serializers.ModelSerializer):
class Meta:
model = Organization
fields = ('id', 'name', 'slug', 'address_line1', 'address_line2',
'city', 'state', 'country', 'postal_code', 'phone_number',
'website', 'email')
class ManageOrganizationSerializer(serializers.ModelSerializer):
slug = serializers.SlugField(required=False, allow_null=True)
name = serializers.CharField(required=True, allow_blank=True)
website = URLField(required=False, allow_blank=True)
email = serializers.EmailField(required=False, allow_blank=True)
class Meta:
model = Organization
fields = OrganizationSerializer.Meta.fields + ('partner_organizations',
'created', 'updated')
|
from rest_framework import serializers
from bluebottle.organizations.models import Organization
from bluebottle.utils.serializers import URLField
class OrganizationSerializer(serializers.ModelSerializer):
class Meta:
model = Organization
fields = ('id', 'name', 'slug', 'address_line1', 'address_line2',
'city', 'state', 'country', 'postal_code', 'phone_number',
'website', 'email')
class ManageOrganizationSerializer(serializers.ModelSerializer):
slug = serializers.SlugField(required=False, allow_null=True)
name = serializers.CharField(required=True)
website = URLField(required=False, allow_blank=True)
email = serializers.EmailField(required=False, allow_blank=True)
class Meta:
model = Organization
fields = OrganizationSerializer.Meta.fields + ('partner_organizations',
'created', 'updated')
|
Make the name of an organization required
|
Make the name of an organization required
|
Python
|
bsd-3-clause
|
jfterpstra/bluebottle,jfterpstra/bluebottle,onepercentclub/bluebottle,jfterpstra/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,jfterpstra/bluebottle
|
33f2075396ded90e3cf17033985f29d262965500
|
dariah_static_data/management/commands/import_tadirah_vcc.py
|
dariah_static_data/management/commands/import_tadirah_vcc.py
|
from dariah_static_data.models import VCC
from dariah_static_data.management.commands._private_helper import Command as SuperCommand
class Command(SuperCommand):
filename = 'tadirah_vcc.csv'
fieldnames = ['uri', 'name', 'description']
mapping = [('name', 'name', 1), ('uri', 'uri', 1), ('description', 'description', 1)] # [('model_fieldname', 'csv_fieldname', required?),...], omit fields that are not in the model
model = VCC
|
from dariah_static_data.models import TADIRAHVCC
from dariah_static_data.management.commands._private_helper import Command as SuperCommand
class Command(SuperCommand):
filename = 'tadirah_vcc.csv'
fieldnames = ['uri', 'name', 'description']
mapping = [('name', 'name', 1), ('uri', 'uri', 1), ('description', 'description', 1)] # [('model_fieldname', 'csv_fieldname', required?),...], omit fields that are not in the model
model = TADIRAHVCC
|
Fix incorrect import after refactor of dariah_static_data models.
|
Fix incorrect import after refactor of dariah_static_data models.
|
Python
|
apache-2.0
|
DANS-KNAW/dariah-contribute,DANS-KNAW/dariah-contribute
|
add508b780d16fd2da2fd0639304935b762c001f
|
tests/cupy_tests/binary_tests/test_packing.py
|
tests/cupy_tests/binary_tests/test_packing.py
|
import unittest
from cupy import testing
@testing.gpu
class TestPacking(unittest.TestCase):
_multiprocess_can_split_ = True
|
import numpy
import unittest
from cupy import testing
@testing.gpu
class TestPacking(unittest.TestCase):
_multiprocess_can_split_ = True
@testing.for_int_dtypes()
@testing.numpy_cupy_array_equal()
def check_packbits(self, data, xp, dtype):
a = xp.array(data, dtype=dtype)
return xp.packbits(a)
@testing.numpy_cupy_array_equal()
def check_unpackbits(self, data, xp):
a = xp.array(data, dtype=xp.uint8)
return xp.unpackbits(a)
def test_packbits(self):
self.check_packbits([])
self.check_packbits([0])
self.check_packbits([1])
self.check_packbits([0, 1])
self.check_packbits([1, 0, 1, 1, 0, 1, 1, 1])
self.check_packbits([1, 0, 1, 1, 0, 1, 1, 1, 1])
self.check_packbits(numpy.arange(24).reshape((2, 3, 4)) % 2)
def test_unpackbits(self):
self.check_unpackbits([])
self.check_unpackbits([0])
self.check_unpackbits([1])
self.check_unpackbits([255])
self.check_unpackbits([100, 200, 123, 213])
|
Add tests for packbits and unpackbits
|
Add tests for packbits and unpackbits
|
Python
|
mit
|
okuta/chainer,niboshi/chainer,ktnyt/chainer,chainer/chainer,ktnyt/chainer,jnishi/chainer,ysekky/chainer,pfnet/chainer,wkentaro/chainer,keisuke-umezawa/chainer,ktnyt/chainer,chainer/chainer,hvy/chainer,jnishi/chainer,anaruse/chainer,wkentaro/chainer,keisuke-umezawa/chainer,ronekko/chainer,niboshi/chainer,chainer/chainer,kashif/chainer,okuta/chainer,rezoo/chainer,keisuke-umezawa/chainer,cupy/cupy,chainer/chainer,hvy/chainer,jnishi/chainer,ktnyt/chainer,niboshi/chainer,cupy/cupy,keisuke-umezawa/chainer,okuta/chainer,okuta/chainer,jnishi/chainer,hvy/chainer,wkentaro/chainer,delta2323/chainer,cupy/cupy,kiyukuta/chainer,tkerola/chainer,cupy/cupy,niboshi/chainer,hvy/chainer,aonotas/chainer,wkentaro/chainer
|
09ef3ba394faf9edc941e30e5c3f86bffa96d645
|
plugins/eightball.py
|
plugins/eightball.py
|
# Copyright (c) 2013-2014 Molly White
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software
# and associated documentation files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or
# substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING
# BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from plugins.util import command
from random import choice
@command("8ball", "8-ball")
def eightball(m):
"""Returns 8-ball advice."""
with open(m.bot.base_path + '/plugins/responses/8ball.txt', 'r') as replies:
lines = replies.read().splitlines()
m.bot.private_message(m.location, choice(lines))
|
# Copyright (c) 2013-2014 Molly White
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software
# and associated documentation files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or
# substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING
# BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from plugins.util import command
from random import choice
@command("8ball", "8-ball")
def eightball(m):
"""Returns 8-ball advice."""
#- !8ball [question]
#-
#- ```irc
#- < GorillaWarfare> !8ball
#- < GorillaBot> Most likely.
#- ```
#-
#- Returns a magic 8 ball response.
with open(m.bot.base_path + '/plugins/responses/8ball.txt', 'r') as replies:
lines = replies.read().splitlines()
m.bot.private_message(m.location, choice(lines))
|
Add documentation for 8ball command
|
Add documentation for 8ball command
|
Python
|
mit
|
quanticle/GorillaBot,molly/GorillaBot,quanticle/GorillaBot,molly/GorillaBot
|
7c5061e4fbf0737ce07f13cb9102cdbbacf73115
|
pyethapp/tests/test_genesis.py
|
pyethapp/tests/test_genesis.py
|
import pytest
from ethereum import blocks
from ethereum.db import DB
from ethereum.config import Env
from pyethapp.utils import merge_dict
from pyethapp.utils import update_config_from_genesis_json
import pyethapp.config as konfig
from pyethapp.profiles import PROFILES
def check_genesis(profile):
config = dict(eth=dict())
# Set config values based on profile selection
merge_dict(config, PROFILES[profile])
# Load genesis config
update_config_from_genesis_json(config, config['eth']['genesis'])
konfig.update_config_with_defaults(config, {'eth': {'block': blocks.default_config}})
print config['eth'].keys()
bc = config['eth']['block']
print bc.keys()
env = Env(DB(), bc)
genesis = blocks.genesis(env)
print 'genesis.hash', genesis.hash.encode('hex')
print 'expected', config['eth']['genesis_hash']
assert genesis.hash == config['eth']['genesis_hash'].decode('hex')
@pytest.mark.xfail # FIXME
def test_olympic():
check_genesis('olympic')
def test_frontier():
check_genesis('frontier')
if __name__ == '__main__':
test_genesis()
|
from pprint import pprint
import pytest
from ethereum import blocks
from ethereum.db import DB
from ethereum.config import Env
from pyethapp.utils import merge_dict
from pyethapp.utils import update_config_from_genesis_json
import pyethapp.config as konfig
from pyethapp.profiles import PROFILES
@pytest.mark.parametrize('profile', PROFILES.keys())
def test_profile(profile):
config = dict(eth=dict())
konfig.update_config_with_defaults(config, {'eth': {'block': blocks.default_config}})
# Set config values based on profile selection
merge_dict(config, PROFILES[profile])
# Load genesis config
update_config_from_genesis_json(config, config['eth']['genesis'])
bc = config['eth']['block']
pprint(bc)
env = Env(DB(), bc)
genesis = blocks.genesis(env)
assert genesis.hash.encode('hex') == config['eth']['genesis_hash']
|
Fix & cleanup profile genesis tests
|
Fix & cleanup profile genesis tests
|
Python
|
mit
|
ethereum/pyethapp,gsalgado/pyethapp,gsalgado/pyethapp,changwu-tw/pyethapp,RomanZacharia/pyethapp,changwu-tw/pyethapp,RomanZacharia/pyethapp,ethereum/pyethapp
|
71d42d763bdb2d0c1bd8474a4da99695d5b77f91
|
alg_selection_sort.py
|
alg_selection_sort.py
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def selection_sort(nums):
"""Selection Sort algortihm.
Time complexity: O(n^2).
Space complexity: O(1).
"""
# Start from the last num, iteratively select next max num to swap them.
for i in reversed(range(len(nums))):
max_i = 0
for j in range(1, i + 1):
# Update max pos max_i to get max num in loop i.
if nums[j] > nums[max_i]:
max_i = j
nums[max_i], nums[i] = nums[i], nums[max_i]
def main():
nums = [54, 26, 93, 17, 77, 31, 44, 55, 20]
print('By selection sort: ')
selection_sort(nums)
print(nums)
if __name__ == '__main__':
main()
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def selection_sort(nums):
"""Selection Sort algortihm.
Time complexity: O(n^2).
Space complexity: O(1).
"""
# Start from the last num, iteratively select max num to swap.
for i in reversed(range(len(nums))):
i_max = 0
for j in range(1, i + 1):
if nums[j] > nums[i_max]:
i_max = j
nums[i_max], nums[i] = nums[i], nums[i_max]
def main():
nums = [54, 26, 93, 17, 77, 31, 44, 55, 20]
print('By selection sort: ')
selection_sort(nums)
print(nums)
if __name__ == '__main__':
main()
|
Revise to i_max and enhance comments
|
Revise to i_max and enhance comments
|
Python
|
bsd-2-clause
|
bowen0701/algorithms_data_structures
|
a8d3790e4ef539c2a833fa493aeef4456b4a5dbb
|
unchecked_repos.py
|
unchecked_repos.py
|
#!/usr/bin/env python
"""List repos missing from repos.yaml."""
from __future__ import print_function
import yaml
from helpers import paginated_get
REPOS_URL = "https://api.github.com/orgs/{org}/repos"
# This is hacky; you need to have repo-tools-data cloned locally one dir up.
# To do this properly, you should use yamldata.py
with open("../repo-tools-data/repos.yaml") as repos_yaml:
tracked_repos = yaml.load(repos_yaml)
repos = list(paginated_get(REPOS_URL.format(org="edX")))
shown_any = False
for r in repos:
if not r['private'] and not r['fork']:
if r['full_name'] not in tracked_repos:
if not shown_any:
print("\n### Untracked repos:")
print("{r[full_name]}: {r[description]}".format(r=r))
shown_any = True
shown_any = False
actual_repos = set(r['full_name'] for r in repos)
for tracked in tracked_repos:
if tracked not in actual_repos:
if not shown_any:
print("\n### Disappeared repos:")
print(tracked)
shown_any = True
|
#!/usr/bin/env python
"""List repos missing from repos.yaml."""
from __future__ import print_function
import yaml
from helpers import paginated_get
REPOS_URL = "https://api.github.com/orgs/{org}/repos"
# This is hacky; you need to have repo-tools-data cloned locally one dir up.
# To do this properly, you should use yamldata.py
with open("../repo-tools-data/repos.yaml") as repos_yaml:
tracked_repos = yaml.load(repos_yaml)
ORGS = ["edX", "edx-solutions"]
repos = []
for org in ORGS:
repos.extend(paginated_get(REPOS_URL.format(org=org)))
shown_any = False
for r in repos:
if not r['private'] and not r['fork']:
if r['full_name'] not in tracked_repos:
if not shown_any:
print("\n### Untracked repos:")
print("{r[full_name]}: {r[description]}".format(r=r))
shown_any = True
shown_any = False
actual_repos = set(r['full_name'] for r in repos)
for tracked in tracked_repos:
if tracked not in actual_repos:
if not shown_any:
print("\n### Disappeared repos:")
print(tracked)
shown_any = True
|
Check for unchecked repos in more than just the edx org
|
Check for unchecked repos in more than just the edx org
|
Python
|
apache-2.0
|
edx/repo-tools,edx/repo-tools
|
4a9d1a373b5a460f1e793dd94d0c248e81b75f40
|
website/addons/box/settings/defaults.py
|
website/addons/box/settings/defaults.py
|
# OAuth app keys
BOX_KEY = None
BOX_SECRET = None
BOX_AUTH_CSRF_TOKEN = 'box-auth-csrf-token'
|
# OAuth app keys
BOX_KEY = None
BOX_SECRET = None
BOX_OAUTH_TOKEN_ENDPOINT = 'https://www.box.com/api/oauth2/token'
BOX_OAUTH_AUTH_ENDPOINT = 'https://www.box.com/api/oauth2/authorize'
|
Add oauth endpoints to settings
|
Add oauth endpoints to settings
|
Python
|
apache-2.0
|
caneruguz/osf.io,GageGaskins/osf.io,GageGaskins/osf.io,ticklemepierce/osf.io,ticklemepierce/osf.io,kwierman/osf.io,samchrisinger/osf.io,CenterForOpenScience/osf.io,felliott/osf.io,DanielSBrown/osf.io,zkraime/osf.io,MerlinZhang/osf.io,mfraezz/osf.io,pattisdr/osf.io,monikagrabowska/osf.io,monikagrabowska/osf.io,asanfilippo7/osf.io,fabianvf/osf.io,RomanZWang/osf.io,brandonPurvis/osf.io,barbour-em/osf.io,erinspace/osf.io,alexschiller/osf.io,abought/osf.io,baylee-d/osf.io,zamattiac/osf.io,petermalcolm/osf.io,HalcyonChimera/osf.io,Johnetordoff/osf.io,ZobairAlijan/osf.io,barbour-em/osf.io,billyhunt/osf.io,CenterForOpenScience/osf.io,GageGaskins/osf.io,caseyrygt/osf.io,cosenal/osf.io,jnayak1/osf.io,ZobairAlijan/osf.io,lyndsysimon/osf.io,jinluyuan/osf.io,monikagrabowska/osf.io,cldershem/osf.io,Ghalko/osf.io,TomHeatwole/osf.io,ckc6cz/osf.io,leb2dg/osf.io,GaryKriebel/osf.io,jolene-esposito/osf.io,cosenal/osf.io,alexschiller/osf.io,caseyrollins/osf.io,aaxelb/osf.io,Nesiehr/osf.io,binoculars/osf.io,cwisecarver/osf.io,KAsante95/osf.io,felliott/osf.io,barbour-em/osf.io,adlius/osf.io,himanshuo/osf.io,SSJohns/osf.io,doublebits/osf.io,himanshuo/osf.io,petermalcolm/osf.io,lamdnhan/osf.io,lamdnhan/osf.io,mluo613/osf.io,zachjanicki/osf.io,ckc6cz/osf.io,KAsante95/osf.io,kwierman/osf.io,emetsger/osf.io,ZobairAlijan/osf.io,sbt9uc/osf.io,felliott/osf.io,mluke93/osf.io,reinaH/osf.io,cslzchen/osf.io,samanehsan/osf.io,asanfilippo7/osf.io,jeffreyliu3230/osf.io,DanielSBrown/osf.io,samanehsan/osf.io,ticklemepierce/osf.io,cldershem/osf.io,ZobairAlijan/osf.io,icereval/osf.io,brandonPurvis/osf.io,danielneis/osf.io,baylee-d/osf.io,amyshi188/osf.io,icereval/osf.io,reinaH/osf.io,brianjgeiger/osf.io,leb2dg/osf.io,kch8qx/osf.io,dplorimer/osf,cslzchen/osf.io,jnayak1/osf.io,samchrisinger/osf.io,zkraime/osf.io,TomHeatwole/osf.io,zachjanicki/osf.io,sbt9uc/osf.io,billyhunt/osf.io,crcresearch/osf.io,mluo613/osf.io,chrisseto/osf.io,doublebits/osf.io,jeffreyliu3230/osf.io,pattisdr/osf.io,MerlinZhang/osf.io,danielneis/osf.io,dplorimer/osf,danielneis/osf.io,chrisseto/osf.io,abought/osf.io,zamattiac/osf.io,hmoco/osf.io,kushG/osf.io,chrisseto/osf.io,jolene-esposito/osf.io,reinaH/osf.io,abought/osf.io,aaxelb/osf.io,kch8qx/osf.io,haoyuchen1992/osf.io,zachjanicki/osf.io,SSJohns/osf.io,njantrania/osf.io,Johnetordoff/osf.io,haoyuchen1992/osf.io,acshi/osf.io,doublebits/osf.io,mluo613/osf.io,jeffreyliu3230/osf.io,Johnetordoff/osf.io,sbt9uc/osf.io,ckc6cz/osf.io,hmoco/osf.io,cslzchen/osf.io,chennan47/osf.io,amyshi188/osf.io,laurenrevere/osf.io,TomBaxter/osf.io,erinspace/osf.io,RomanZWang/osf.io,mfraezz/osf.io,bdyetton/prettychart,zamattiac/osf.io,caneruguz/osf.io,leb2dg/osf.io,KAsante95/osf.io,cwisecarver/osf.io,rdhyee/osf.io,barbour-em/osf.io,GaryKriebel/osf.io,MerlinZhang/osf.io,alexschiller/osf.io,brianjgeiger/osf.io,rdhyee/osf.io,Nesiehr/osf.io,jnayak1/osf.io,chrisseto/osf.io,Ghalko/osf.io,crcresearch/osf.io,dplorimer/osf,HarryRybacki/osf.io,mattclark/osf.io,cosenal/osf.io,caseyrollins/osf.io,fabianvf/osf.io,wearpants/osf.io,HarryRybacki/osf.io,chennan47/osf.io,caneruguz/osf.io,Ghalko/osf.io,Nesiehr/osf.io,Nesiehr/osf.io,cwisecarver/osf.io,emetsger/osf.io,SSJohns/osf.io,jeffreyliu3230/osf.io,HarryRybacki/osf.io,RomanZWang/osf.io,caseyrollins/osf.io,kushG/osf.io,GageGaskins/osf.io,TomBaxter/osf.io,MerlinZhang/osf.io,crcresearch/osf.io,haoyuchen1992/osf.io,DanielSBrown/osf.io,lyndsysimon/osf.io,wearpants/osf.io,sloria/osf.io,asanfilippo7/osf.io,brandonPurvis/osf.io,icereval/osf.io,mluke93/osf.io,njantrania/osf.io,fabianvf/osf.io,revanthkolli/osf.io,HalcyonChimera/osf.io,binoculars/osf.io,jinluyuan/osf.io,kwierman/osf.io,jmcarp/osf.io,zkraime/osf.io,kwierman/osf.io,aaxelb/osf.io,TomHeatwole/osf.io,TomHeatwole/osf.io,Johnetordoff/osf.io,billyhunt/osf.io,SSJohns/osf.io,brianjgeiger/osf.io,acshi/osf.io,HarryRybacki/osf.io,lyndsysimon/osf.io,billyhunt/osf.io,mattclark/osf.io,mluke93/osf.io,Ghalko/osf.io,hmoco/osf.io,petermalcolm/osf.io,njantrania/osf.io,acshi/osf.io,haoyuchen1992/osf.io,cslzchen/osf.io,mluo613/osf.io,amyshi188/osf.io,acshi/osf.io,cldershem/osf.io,cwisecarver/osf.io,samchrisinger/osf.io,billyhunt/osf.io,jolene-esposito/osf.io,jmcarp/osf.io,jmcarp/osf.io,wearpants/osf.io,RomanZWang/osf.io,jinluyuan/osf.io,caseyrygt/osf.io,arpitar/osf.io,pattisdr/osf.io,danielneis/osf.io,aaxelb/osf.io,saradbowman/osf.io,bdyetton/prettychart,doublebits/osf.io,baylee-d/osf.io,cosenal/osf.io,njantrania/osf.io,kushG/osf.io,emetsger/osf.io,laurenrevere/osf.io,amyshi188/osf.io,sloria/osf.io,GaryKriebel/osf.io,samanehsan/osf.io,bdyetton/prettychart,revanthkolli/osf.io,GaryKriebel/osf.io,himanshuo/osf.io,hmoco/osf.io,monikagrabowska/osf.io,revanthkolli/osf.io,binoculars/osf.io,ckc6cz/osf.io,asanfilippo7/osf.io,rdhyee/osf.io,wearpants/osf.io,zachjanicki/osf.io,himanshuo/osf.io,mattclark/osf.io,caneruguz/osf.io,kch8qx/osf.io,ticklemepierce/osf.io,abought/osf.io,jmcarp/osf.io,fabianvf/osf.io,kch8qx/osf.io,revanthkolli/osf.io,alexschiller/osf.io,CenterForOpenScience/osf.io,kushG/osf.io,jnayak1/osf.io,RomanZWang/osf.io,brandonPurvis/osf.io,arpitar/osf.io,chennan47/osf.io,dplorimer/osf,CenterForOpenScience/osf.io,arpitar/osf.io,bdyetton/prettychart,mfraezz/osf.io,rdhyee/osf.io,HalcyonChimera/osf.io,zkraime/osf.io,KAsante95/osf.io,leb2dg/osf.io,mluo613/osf.io,TomBaxter/osf.io,sloria/osf.io,adlius/osf.io,petermalcolm/osf.io,caseyrygt/osf.io,arpitar/osf.io,laurenrevere/osf.io,lyndsysimon/osf.io,samanehsan/osf.io,samchrisinger/osf.io,mfraezz/osf.io,emetsger/osf.io,HalcyonChimera/osf.io,adlius/osf.io,jolene-esposito/osf.io,jinluyuan/osf.io,alexschiller/osf.io,lamdnhan/osf.io,DanielSBrown/osf.io,kch8qx/osf.io,brianjgeiger/osf.io,doublebits/osf.io,saradbowman/osf.io,KAsante95/osf.io,adlius/osf.io,erinspace/osf.io,felliott/osf.io,zamattiac/osf.io,sbt9uc/osf.io,GageGaskins/osf.io,reinaH/osf.io,monikagrabowska/osf.io,cldershem/osf.io,brandonPurvis/osf.io,acshi/osf.io,mluke93/osf.io,caseyrygt/osf.io,lamdnhan/osf.io
|
62d7c94968d70564839b32375fac6608720c2a67
|
backend/pycon/urls.py
|
backend/pycon/urls.py
|
from api.views import GraphQLView
from django.contrib import admin
from django.urls import include, path
from django.views.decorators.csrf import csrf_exempt
urlpatterns = [
path("admin/", admin.site.urls),
path("graphql", csrf_exempt(GraphQLView.as_view()), name="graphql"),
path("user/", include("users.urls")),
path("", include("social_django.urls", namespace="social")),
path("", include("payments.urls")),
]
|
from api.views import GraphQLView
from django.conf import settings
from django.conf.urls.static import static
from django.contrib import admin
from django.urls import include, path
from django.views.decorators.csrf import csrf_exempt
urlpatterns = [
path("admin/", admin.site.urls),
path("graphql", csrf_exempt(GraphQLView.as_view()), name="graphql"),
path("user/", include("users.urls")),
path("", include("social_django.urls", namespace="social")),
path("", include("payments.urls")),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
Add media url when running in debug mode
|
Add media url when running in debug mode
|
Python
|
mit
|
patrick91/pycon,patrick91/pycon
|
7185c5ef58757949197081808bf237f0111e7a86
|
packages/mono.py
|
packages/mono.py
|
class MonoPackage (Package):
def __init__ (self):
Package.__init__ (self, 'mono', '2.10.6',
sources = [
'http://download.mono-project.com/sources/%{name}/%{name}-%{version}.tar.bz2',
'patches/mono-runtime-relocation.patch'
],
configure_flags = [
'--with-jit=yes',
'--with-ikvm=no',
'--with-mcs-docs=no',
'--with-moonlight=no',
'--enable-quiet-build'
]
)
# Mono (in libgc) likes to fail to build randomly
self.make = 'for i in 1 2 3 4 5 6 7 8 9 10; do make && break; done'
# def prep (self):
# Package.prep (self)
# self.sh ('patch -p1 < "%{sources[1]}"')
def install (self):
Package.install (self)
if Package.profile.name == 'darwin':
self.sh ('sed -ie "s/libcairo.so.2/libcairo.2.dylib/" "%{prefix}/etc/mono/config"')
MonoPackage ()
|
class MonoPackage (Package):
def __init__ (self):
Package.__init__ (self, 'mono', '2.10.6',
sources = [
'http://download.mono-project.com/sources/%{name}/%{name}-%{version}.tar.bz2',
'patches/mono-runtime-relocation.patch'
],
configure_flags = [
'--with-jit=yes',
'--with-ikvm=no',
'--with-mcs-docs=no',
'--with-moonlight=no',
'--enable-quiet-build',
]
)
if Package.profile.name == 'darwin':
self.configure_flags.extend ([
# fix build on lion, it uses 64-bit host even with -m32
'--build=i386-apple-darwin11.2.0',
])
# Mono (in libgc) likes to fail to build randomly
self.make = 'for i in 1 2 3 4 5 6 7 8 9 10; do make && break; done'
# def prep (self):
# Package.prep (self)
# self.sh ('patch -p1 < "%{sources[1]}"')
def install (self):
Package.install (self)
if Package.profile.name == 'darwin':
self.sh ('sed -ie "s/libcairo.so.2/libcairo.2.dylib/" "%{prefix}/etc/mono/config"')
MonoPackage ()
|
Fix building Mono 32-bit with Mac 10.7 SDK
|
Fix building Mono 32-bit with Mac 10.7 SDK
|
Python
|
mit
|
mono/bockbuild,BansheeMediaPlayer/bockbuild,bl8/bockbuild,BansheeMediaPlayer/bockbuild,BansheeMediaPlayer/bockbuild,bl8/bockbuild,bl8/bockbuild,mono/bockbuild
|
6246c26365b2df4cbb91142969aa857c7187e094
|
app/test_base.py
|
app/test_base.py
|
from flask.ext.testing import TestCase
import unittest
from app import app, db
class BaseTestCase(TestCase):
def create_app(self):
app.config.from_object('config.TestingConfiguration')
return app
def setUp(self):
db.create_all()
def tearDown(self):
db.session.remove()
db.drop_all()
def login(self, username, password):
return self.client.post('/login', data=dict(
username=username,
password=password
), follow_redirects=True)
def logout(self):
return self.client.get('/logout', follow_redirects=True)
if __name__ == '__main__':
unittest.main()
|
from flask.ext.testing import TestCase
import unittest
from app import create_app, db
class BaseTestCase(TestCase):
def create_app(self):
return create_app('config.TestingConfiguration')
def setUp(self):
db.create_all()
def tearDown(self):
db.session.remove()
db.drop_all()
def login(self, username, password):
return self.client.post('/login', data=dict(
username=username,
password=password
), follow_redirects=True)
def logout(self):
return self.client.get('/logout', follow_redirects=True)
if __name__ == '__main__':
unittest.main()
|
Update tests to leverage factory pattern
|
Update tests to leverage factory pattern
|
Python
|
mit
|
rtfoley/scorepy,rtfoley/scorepy,rtfoley/scorepy
|
fcd98cc714b5a790eaf2e946c492ab4e14700568
|
scripts/award_badge_to_user.py
|
scripts/award_badge_to_user.py
|
#!/usr/bin/env python
"""Award a badge to a user.
:Copyright: 2006-2018 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
import click
from byceps.services.user_badge import service as badge_service
from byceps.util.system import get_config_filename_from_env_or_exit
from bootstrap.validators import validate_user_screen_name
from bootstrap.util import app_context
@click.command()
@click.argument('badge_slug')
@click.argument('user', callback=validate_user_screen_name)
def execute(badge_slug, user):
badge = badge_service.find_badge_by_slug(badge_slug)
if badge is None:
raise click.BadParameter('Unknown badge slug "{}".'.format(badge_slug))
click.echo('Awarding badge "{}" to user "{}" ... '
.format(badge.label, user.screen_name), nl=False)
badge_service.award_badge_to_user(badge.id, user.id)
click.secho('done.', fg='green')
if __name__ == '__main__':
config_filename = get_config_filename_from_env_or_exit()
with app_context(config_filename):
execute()
|
#!/usr/bin/env python
"""Award a badge to a user.
:Copyright: 2006-2018 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
import click
from byceps.database import db
from byceps.services.user_badge.models.badge import Badge, BadgeID
from byceps.services.user_badge import service as badge_service
from byceps.util.system import get_config_filename_from_env_or_exit
from bootstrap.validators import validate_user_screen_name
from bootstrap.util import app_context
@click.command()
@click.argument('badge_slug')
@click.argument('user', callback=validate_user_screen_name)
def execute(badge_slug, user):
badge_id = find_badge_id_for_badge_slug(badge_slug)
click.echo('Awarding badge "{}" to user "{}" ... '
.format(badge_slug, user.screen_name), nl=False)
badge_service.award_badge_to_user(badge_id, user.id)
click.secho('done.', fg='green')
def find_badge_id_for_badge_slug(slug: str) -> BadgeID:
"""Finde the badge with that slug and return its ID, or raise an
error if not found.
"""
badge_id = db.session \
.query(Badge.id) \
.filter_by(slug=slug) \
.scalar()
if badge_id is None:
raise click.BadParameter('Unknown badge slug "{}".'.format(slug))
return badge_id
if __name__ == '__main__':
config_filename = get_config_filename_from_env_or_exit()
with app_context(config_filename):
execute()
|
Change script to avoid creation of badge URLs to make it work outside of a *party-specific* app context
|
Change script to avoid creation of badge URLs to make it work outside of a *party-specific* app context
|
Python
|
bsd-3-clause
|
homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps,m-ober/byceps,m-ober/byceps,m-ober/byceps
|
8332dc01c3c743543f4c3faff44da84436ae5da2
|
planner/forms.py
|
planner/forms.py
|
from django.contrib.auth.forms import AuthenticationForm
from django import forms
from django.core.validators import MinLengthValidator
from .models import PoolingUser
from users.forms import UserCreationForm
class LoginForm(AuthenticationForm):
username = forms.CharField(widget=forms.EmailInput(attrs={'placeholder': 'Email',
'class': 'form-control',
}))
password = forms.CharField(widget=forms.PasswordInput(attrs={'placeholder': 'Password',
'class': 'form-control',
}))
class SearchTrip(forms.Form):
"""
Pay attention that id fields are meant to be hidden, since we suppose they come from
an autocomplete AJAX request via an another CharField.
"""
origin_id = forms.IntegerField()
destination_id = forms.IntegerField()
datetime = forms.DateTimeField()
class PoolingUserForm(forms.ModelForm):
class Meta:
model = PoolingUser
# Exclude the one-to-one relation with User
fields = ['birth_date', 'driving_license', 'cellphone_number']
class UserForm(UserCreationForm):
class Meta(UserCreationForm.Meta):
fields = ('email', 'first_name', 'last_name')
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for field_name in self.Meta.fields:
self[field_name].field.required = True
self['password1'].field.validators = [MinLengthValidator(6)]
|
from django.contrib.auth.forms import AuthenticationForm
from django import forms
from django.core.validators import MinLengthValidator
from .models import PoolingUser, Trip, Step
from users.forms import UserCreationForm
class LoginForm(AuthenticationForm):
username = forms.CharField(widget=forms.EmailInput(attrs={'placeholder': 'Email',
'class': 'form-control',
}))
password = forms.CharField(widget=forms.PasswordInput(attrs={'placeholder': 'Password',
'class': 'form-control',
}))
class SearchTrip(forms.Form):
"""
Pay attention that id fields are meant to be hidden, since we suppose they come from
an autocomplete AJAX request via an another CharField.
"""
origin_id = forms.IntegerField()
destination_id = forms.IntegerField()
datetime = forms.DateTimeField()
class PoolingUserForm(forms.ModelForm):
class Meta:
model = PoolingUser
# Exclude the one-to-one relation with User
fields = ['birth_date', 'driving_license', 'cellphone_number']
class TripForm(forms.ModelForm):
class Meta:
model = Trip
fields = ['date_origin', 'max_num_passengers']
class StepForm(forms.ModelForm):
class Meta:
model = Step
fields = ['origin', 'destination', 'hour_origin', 'hour_destination', 'max_price']
class UserForm(UserCreationForm):
class Meta(UserCreationForm.Meta):
fields = ('email', 'first_name', 'last_name')
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for field_name in self.Meta.fields:
self[field_name].field.required = True
self['password1'].field.validators = [MinLengthValidator(6)]
|
Add Trip and Step ModelForms
|
Add Trip and Step ModelForms
|
Python
|
mit
|
livingsilver94/getaride,livingsilver94/getaride,livingsilver94/getaride
|
bd4e1c3f511ac1163e39d99fdc8e70f261023c44
|
setup/create_player_seasons.py
|
setup/create_player_seasons.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import concurrent.futures
from db.common import session_scope
from db.player import Player
from utils.player_data_retriever import PlayerDataRetriever
def create_player_seasons(simulation=False):
data_retriever = PlayerDataRetriever()
with session_scope() as session:
players = session.query(Player).all()[:25]
with concurrent.futures.ThreadPoolExecutor(max_workers=8) as threads:
future_tasks = {
threads.submit(
data_retriever.retrieve_player_seasons,
player.player_id, simulation
): player for player in players
}
for future in concurrent.futures.as_completed(future_tasks):
try:
plr_seasons = future.result()
print(len(plr_seasons))
except Exception as e:
print("Concurrent task generated an exception: %s" % e)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import concurrent.futures
from db.common import session_scope
from db.player import Player
from utils.player_data_retriever import PlayerDataRetriever
def create_player_seasons(simulation=False):
data_retriever = PlayerDataRetriever()
with session_scope() as session:
players = session.query(Player).all()[:]
with concurrent.futures.ThreadPoolExecutor(max_workers=8) as threads:
future_tasks = {
threads.submit(
data_retriever.retrieve_player_seasons,
player.player_id, simulation
): player for player in players
}
for future in concurrent.futures.as_completed(future_tasks):
try:
plr_seasons = future.result()
except Exception as e:
print("Concurrent task generated an exception: %s" % e)
|
Update player season retrieval function
|
Update player season retrieval function
|
Python
|
mit
|
leaffan/pynhldb
|
f2a88e4849876970c29b568b897dff88ffe09306
|
djrichtextfield/urls.py
|
djrichtextfield/urls.py
|
from django.conf.urls import url
from djrichtextfield.views import InitView
urlpatterns = [
url('^init.js$', InitView.as_view(), name='djrichtextfield_init')
]
|
from django.urls import path
from djrichtextfield.views import InitView
urlpatterns = [
path('init.js', InitView.as_view(), name='djrichtextfield_init')
]
|
Use path instead of soon to be deprecated url
|
Use path instead of soon to be deprecated url
|
Python
|
mit
|
jaap3/django-richtextfield,jaap3/django-richtextfield
|
c080865fdb36da2718774ddff436325d947be323
|
test/test_fit_allocator.py
|
test/test_fit_allocator.py
|
from support import lib,ffi
from qcgc_test import QCGCTest
class FitAllocatorTest(QCGCTest):
def test_macro_consistency(self):
self.assertEqual(2**lib.QCGC_LARGE_FREE_LIST_FIRST_EXP, lib.qcgc_small_free_lists + 1)
last_exp = lib.QCGC_LARGE_FREE_LIST_FIRST_EXP + lib.qcgc_large_free_lists - 1
self.assertLess(2**last_exp, 2**lib.QCGC_ARENA_SIZE_EXP)
self.assertEqual(2**(last_exp + 1), 2**lib.QCGC_ARENA_SIZE_EXP)
def test_small_free_list_index(self):
for i in range(1, lib.qcgc_small_free_lists + 1):
self.assertTrue(lib.is_small(i))
self.assertEqual(lib.small_index(i), i - 1);
def test_large_free_list_index(self):
index = -1;
for i in range(2**lib.QCGC_LARGE_FREE_LIST_FIRST_EXP, 2**lib.QCGC_ARENA_SIZE_EXP):
if (i & (i - 1) == 0):
# Check for power of two
index = index + 1
self.assertFalse(lib.is_small(i))
self.assertEqual(index, lib.large_index(i));
|
from support import lib,ffi
from qcgc_test import QCGCTest
class FitAllocatorTest(QCGCTest):
def test_macro_consistency(self):
self.assertEqual(2**lib.QCGC_LARGE_FREE_LIST_FIRST_EXP, lib.qcgc_small_free_lists + 1)
last_exp = lib.QCGC_LARGE_FREE_LIST_FIRST_EXP + lib.qcgc_large_free_lists - 1
self.assertLess(2**last_exp, 2**lib.QCGC_ARENA_SIZE_EXP)
self.assertEqual(2**(last_exp + 1), 2**lib.QCGC_ARENA_SIZE_EXP)
def test_small_free_list_index(self):
for i in range(1, lib.qcgc_small_free_lists + 1):
self.assertTrue(lib.is_small(i))
self.assertEqual(lib.small_index(i), i - 1);
self.assertTrue(lib.small_index_to_cells(i - 1), i);
def test_large_free_list_index(self):
index = -1;
for i in range(2**lib.QCGC_LARGE_FREE_LIST_FIRST_EXP, 2**lib.QCGC_ARENA_SIZE_EXP):
if (i & (i - 1) == 0):
# Check for power of two
index = index + 1
self.assertFalse(lib.is_small(i))
self.assertEqual(index, lib.large_index(i));
|
Add test for index to cells
|
Add test for index to cells
|
Python
|
mit
|
ntruessel/qcgc,ntruessel/qcgc,ntruessel/qcgc
|
b0a6652a11236409ec3e2606e04621f714a3ab63
|
test/test_jobs/__init__.py
|
test/test_jobs/__init__.py
|
# -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2013 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
|
# -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2013 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
from pybossa.jobs import create_dict_jobs
from default import Test, with_context
class TestJobs(Test):
@with_context
def test_create_dict_jobs(self):
"""Test JOB create_dict_jobs works."""
data = [{'id': 1, 'short_name': 'app'}]
jobs = create_dict_jobs(data, 'function')
assert len(jobs) == 1
assert jobs[0]['name'] == 'function'
|
Test generic creator of jobs.
|
Test generic creator of jobs.
|
Python
|
agpl-3.0
|
jean/pybossa,Scifabric/pybossa,geotagx/pybossa,OpenNewsLabs/pybossa,jean/pybossa,inteligencia-coletiva-lsd/pybossa,geotagx/pybossa,stefanhahmann/pybossa,stefanhahmann/pybossa,PyBossa/pybossa,Scifabric/pybossa,inteligencia-coletiva-lsd/pybossa,OpenNewsLabs/pybossa,PyBossa/pybossa
|
af31c71e49b7d63c24ab7d7c04a5e908451263e2
|
iati/core/tests/test_utilities.py
|
iati/core/tests/test_utilities.py
|
"""A module containing tests for the library implementation of accessing utilities."""
from lxml import etree
import iati.core.resources
import iati.core.utilities
class TestUtilities(object):
"""A container for tests relating to utilities"""
def test_convert_to_schema(self):
"""Check that an etree can be converted to a schema."""
path = iati.core.resources.path_schema('iati-activities-schema')
tree = iati.core.resources.load_as_tree(path)
if not tree:
assert False
schema = iati.core.utilities.convert_to_schema(tree)
assert isinstance(schema, etree.XMLSchema)
def test_log(self):
pass
def test_log_error(self):
pass
|
"""A module containing tests for the library implementation of accessing utilities."""
from lxml import etree
import iati.core.resources
import iati.core.utilities
class TestUtilities(object):
"""A container for tests relating to utilities"""
def test_convert_to_schema(self):
"""Check that an etree can be converted to a schema."""
path = iati.core.resources.path_schema('iati-activities-schema')
tree = iati.core.resources.load_as_tree(path)
if not tree:
assert False
schema = iati.core.utilities.convert_to_schema(tree)
assert isinstance(schema, etree.XMLSchema)
def test_log(self):
pass
def test_log_error(self):
pass
def test_log_exception(self):
pass
def test_log_warning(self):
pass
|
Add more logging test stubs
|
Add more logging test stubs
|
Python
|
mit
|
IATI/iati.core,IATI/iati.core
|
0deac2fe49d1240a1d5fee1b9c47313bde84d609
|
seleniumlogin/__init__.py
|
seleniumlogin/__init__.py
|
from importlib import import_module
from django.contrib.auth import SESSION_KEY, BACKEND_SESSION_KEY, HASH_SESSION_KEY
def force_login(user, driver, base_url):
from django.conf import settings
SessionStore = import_module(settings.SESSION_ENGINE).SessionStore
selenium_login_start_page = getattr(settings, 'SELENIUM_LOGIN_START_PAGE', '/page_404/')
driver.get('{}{}'.format(base_url, selenium_login_start_page))
session = SessionStore()
session[SESSION_KEY] = user.id
session[BACKEND_SESSION_KEY] = settings.AUTHENTICATION_BACKENDS[0]
session[HASH_SESSION_KEY] = user.get_session_auth_hash()
session.save()
domain = base_url.split(':')[-2].split('/')[-1]
cookie = {
'name': settings.SESSION_COOKIE_NAME,
'value': session.session_key,
'path': '/',
'domain': domain
}
driver.add_cookie(cookie)
driver.refresh()
|
from importlib import import_module
from django.contrib.auth import SESSION_KEY, BACKEND_SESSION_KEY, HASH_SESSION_KEY
def force_login(user, driver, base_url):
from django.conf import settings
SessionStore = import_module(settings.SESSION_ENGINE).SessionStore
selenium_login_start_page = getattr(settings, 'SELENIUM_LOGIN_START_PAGE', '/page_404/')
driver.get('{}{}'.format(base_url, selenium_login_start_page))
session = SessionStore()
session[SESSION_KEY] = user.id
session[BACKEND_SESSION_KEY] = settings.AUTHENTICATION_BACKENDS[0]
session[HASH_SESSION_KEY] = user.get_session_auth_hash()
session.save()
domain = base_url.rpartition('://')[2].split('/')[0].split(':')[0]
cookie = {
'name': settings.SESSION_COOKIE_NAME,
'value': session.session_key,
'path': '/',
'domain': domain
}
driver.add_cookie(cookie)
driver.refresh()
|
Change how the base_url is turned into a domain
|
Change how the base_url is turned into a domain
|
Python
|
mit
|
feffe/django-selenium-login,feffe/django-selenium-login
|
56e764835e75035452a6a1ea06c386ec61dbe872
|
src/rinoh/stylesheets/__init__.py
|
src/rinoh/stylesheets/__init__.py
|
# This file is part of rinohtype, the Python document preparation system.
#
# Copyright (c) Brecht Machiels.
#
# Use of this source code is subject to the terms of the GNU Affero General
# Public License v3. See the LICENSE file or http://www.gnu.org/licenses/.
import inspect
import os
import sys
from .. import DATA_PATH
from ..style import StyleSheetFile
from .matcher import matcher
__all__ = ['matcher', 'sphinx', 'sphinx_base14']
STYLESHEETS_PATH = os.path.join(DATA_PATH, 'stylesheets')
def path(filename):
return os.path.join(STYLESHEETS_PATH, filename)
sphinx = StyleSheetFile(path('sphinx.rts'))
sphinx_article = StyleSheetFile(path('sphinx_article.rts'))
sphinx_base14 = StyleSheetFile(path('base14.rts'))
# generate docstrings for the StyleSheet instances
for name, stylesheet in inspect.getmembers(sys.modules[__name__]):
if not isinstance(stylesheet, StyleSheetFile):
continue
stylesheet.__doc__ = (':entry point name: ``{}``\n\n{}'
.format(stylesheet, stylesheet.description))
|
# This file is part of rinohtype, the Python document preparation system.
#
# Copyright (c) Brecht Machiels.
#
# Use of this source code is subject to the terms of the GNU Affero General
# Public License v3. See the LICENSE file or http://www.gnu.org/licenses/.
import inspect
import os
import sys
from .. import DATA_PATH
from ..style import StyleSheetFile
from .matcher import matcher
__all__ = ['matcher', 'sphinx', 'sphinx_base14']
STYLESHEETS_PATH = os.path.join(DATA_PATH, 'stylesheets')
def path(filename):
return os.path.join(STYLESHEETS_PATH, filename)
sphinx = StyleSheetFile(path('sphinx.rts'))
sphinx_article = StyleSheetFile(path('sphinx_article.rts'))
sphinx_base14 = StyleSheetFile(path('base14.rts'))
# generate docstrings for the StyleSheet instances
for name, stylesheet in inspect.getmembers(sys.modules[__name__]):
if not isinstance(stylesheet, StyleSheetFile):
continue
stylesheet.__doc__ = ('{}\n\nEntry point name: ``{}``'
.format(stylesheet.description, stylesheet))
|
Fix the auto-generated docstrings of style sheets
|
Fix the auto-generated docstrings of style sheets
|
Python
|
agpl-3.0
|
brechtm/rinohtype,brechtm/rinohtype,brechtm/rinohtype
|
b448d52e5a30346633dd20e52431af39eb6859ec
|
importer/importer/connections.py
|
importer/importer/connections.py
|
import aioes
from .utils import wait_for_all_services
from .settings import ELASTICSEARCH_ENDPOINTS
async def connect_to_elasticsearch():
print("Connecting to Elasticsearch...")
await wait_for_all_services(ELASTICSEARCH_ENDPOINTS, timeout=10)
elastic = aioes.Elasticsearch(ELASTICSEARCH_ENDPOINTS)
await elastic.cluster.health(wait_for_status='yellow', timeout='5s')
return elastic
|
import aioes
from .utils import wait_for_all_services
from .settings import ELASTICSEARCH_ENDPOINTS
async def connect_to_elasticsearch():
print("Connecting to Elasticsearch...")
await wait_for_all_services(ELASTICSEARCH_ENDPOINTS, timeout=10)
elastic = aioes.Elasticsearch(ELASTICSEARCH_ENDPOINTS)
return elastic
|
Remove the pointless cluster health check
|
Remove the pointless cluster health check
|
Python
|
mit
|
despawnerer/theatrics,despawnerer/theatrics,despawnerer/theatrics
|
70d435e1176a1132db6a04c34c04567df354d1d9
|
cla_backend/apps/reports/management/commands/mi_cb1_report.py
|
cla_backend/apps/reports/management/commands/mi_cb1_report.py
|
# coding=utf-8
import logging
from django.core.management.base import BaseCommand
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = "This runs the MCCB1sSLA report"
def handle(self, *args, **options):
self.create_report()
def create_report():
print("stuff goes here")
# '{"action": "Export", "csrfmiddlewaretoken": "PQk4Pt55CL0NBapx9hSqZTJkSn6tL6TL", "date_from": "08/05/2021", "date_to": "10/05/2021"}'
# report_data = json_stuff_goes_here
# ExportTask().delay(user_person.pk, filename_of_report, mi_cb1_extract_agilisys, report_data)
|
# coding=utf-8
import logging
from django.core.management.base import BaseCommand
from reports.tasks import ExportTask
from core.models import get_web_user
from django.views.decorators.csrf import csrf_exempt
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = "This runs the MCCB1sSLA report"
def handle(self, *args, **options):
self.create_report()
@csrf_exempt
def create_report(self):
report_data = '{"action": "Export", "csrfmiddlewaretoken": "PQk4Pt55CL0NBapx9hSqZTJkSn6tL6TL", "date_from": "2021-05-08", "date_to": "2021-05-10"}'
# report_data = json_stuff_goes_here
web_user = get_web_user()
filename_of_report = "WEEKLY-REPORT-TEST.csv"
ExportTask().delay(web_user.pk, filename_of_report, "MICB1Extract", report_data)
|
Send weekly report to aws
|
Send weekly report to aws
|
Python
|
mit
|
ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend
|
28803e4669f4c7b2b84e53e39e3a0a99ff57572d
|
skyfield/__main__.py
|
skyfield/__main__.py
|
# -*- coding: utf-8 -*-
import pkg_resources
import skyfield
from skyfield.api import load
from skyfield.functions import load_bundled_npy
def main():
print('Skyfield version: {0}'.format(skyfield.__version__))
print('jplephem version: {0}'.format(version_of('jplephem')))
print('sgp4 version: {0}'.format(version_of('sgp4')))
ts = load.timescale()
fmt = '%Y-%m-%d'
final_leap = (ts._leap_tai[-1] - 1) / (24 * 60 * 60)
print('Built-in leap seconds table ends with leap second at: {0}'
.format(ts.tai_jd(final_leap).utc_strftime()))
arrays = load_bundled_npy('iers.npz')
tt, delta_t = arrays['delta_t_recent']
start = ts.tt_jd(tt[0])
end = ts.tt_jd(tt[-1])
print('Built-in ∆T table from finals2000A.all covers: {0} to {1}'
.format(start.utc_strftime(fmt), end.utc_strftime(fmt)))
def version_of(distribution):
try:
d = pkg_resources.get_distribution(distribution)
except pkg_resources.DistributionNotFound:
return 'Unknown'
else:
return d.version
main()
|
# -*- coding: utf-8 -*-
import pkg_resources
import numpy as np
import skyfield
from skyfield.api import load
from skyfield.functions import load_bundled_npy
def main():
print('Skyfield version: {0}'.format(skyfield.__version__))
print('jplephem version: {0}'.format(version_of('jplephem')))
print('sgp4 version: {0}'.format(version_of('sgp4')))
ts = load.timescale()
fmt = '%Y-%m-%d'
final_leap = (ts._leap_tai[-1] - 1) / (24 * 60 * 60)
print('Built-in leap seconds table ends with leap second at: {0}'
.format(ts.tai_jd(final_leap).utc_strftime()))
arrays = load_bundled_npy('iers.npz')
daily_tt = arrays['tt_jd_minus_arange']
daily_tt += np.arange(len(daily_tt))
start = ts.tt_jd(daily_tt[0])
end = ts.tt_jd(daily_tt[-1])
print('Built-in ∆T table from finals2000A.all covers: {0} to {1}'
.format(start.utc_strftime(fmt), end.utc_strftime(fmt)))
def version_of(distribution):
try:
d = pkg_resources.get_distribution(distribution)
except pkg_resources.DistributionNotFound:
return 'Unknown'
else:
return d.version
main()
|
Fix “python -m skyfield” following ∆T array rename
|
Fix “python -m skyfield” following ∆T array rename
|
Python
|
mit
|
skyfielders/python-skyfield,skyfielders/python-skyfield
|
b812a8da81ec9943d11b8cb9f709e234c90a2282
|
stylo/utils.py
|
stylo/utils.py
|
from uuid import uuid4
class MessageBus:
"""A class that is used behind the scenes to coordinate events and timings of
animations.
"""
def __init__(self):
self.subs = {}
def new_id(self):
"""Use this to get a name to use for your events."""
return str(uuid4())
def register(self, event, obj):
"""Register to receive notifications of an event.
:param event: The name of the kind of event to receive
:param obj: The object to receive that kind of message.
"""
if event not in self.subs:
self.subs[event] = [obj]
return
self.subs[event].append(obj)
def send(self, event, **kwargs):
"""Send a message to whoever may be listening."""
if event not in self.subs:
return
for obj in self.subs[event]:
params = get_parameters(obj)
values = {k: v for k, v in kwargs.items() if k in params}
obj(**values)
_message_bus = MessageBus()
def get_message_bus():
"""A function that returns an instance of the message bus to ensure everyone uses
the same instance."""
return _message_bus
|
import inspect
from uuid import uuid4
def get_parameters(f):
return list(inspect.signature(f).parameters.keys())
class MessageBus:
"""A class that is used behind the scenes to coordinate events and timings of
animations.
"""
def __init__(self):
self.subs = {}
def new_id(self):
"""Use this to get a name to use for your events."""
return str(uuid4())
def register(self, event, obj):
"""Register to receive notifications of an event.
:param event: The name of the kind of event to receive
:param obj: The object to receive that kind of message.
"""
if event not in self.subs:
self.subs[event] = [obj]
return
self.subs[event].append(obj)
def send(self, event, **kwargs):
"""Send a message to whoever may be listening."""
if event not in self.subs:
return
for obj in self.subs[event]:
params = get_parameters(obj)
values = {k: v for k, v in kwargs.items() if k in params}
obj(**values)
_message_bus = MessageBus()
def get_message_bus():
"""A function that returns an instance of the message bus to ensure everyone uses
the same instance."""
return _message_bus
|
Add the function back for now
|
Add the function back for now
|
Python
|
mit
|
alcarney/stylo,alcarney/stylo
|
602184794c3f38bf6307cf68f4d61294b523c009
|
examples/LKE_example.py
|
examples/LKE_example.py
|
from pygraphc.misc.LKE import *
from pygraphc.evaluation.ExternalEvaluation import *
# set input and output path
ip_address = '161.166.232.17'
standard_path = '/home/hudan/Git/labeled-authlog/dataset/Hofstede2014/dataset1/' + ip_address
standard_file = standard_path + 'auth.log.anon.labeled'
analyzed_file = 'auth.log.anon'
prediction_file = 'lke-result-' + ip_address + '.txt'
OutputPath = './results'
para = Para(path=standard_path, logname=analyzed_file, save_path=OutputPath)
# run LKE method
myparser = LKE(para)
time = myparser.main_process()
clusters = myparser.get_clusters()
original_logs = myparser.logs
# perform evaluation
ExternalEvaluation.set_cluster_label_id(None, clusters, original_logs, prediction_file)
homogeneity_completeness_vmeasure = ExternalEvaluation.get_homogeneity_completeness_vmeasure(standard_file,
prediction_file)
# print evaluation result
print homogeneity_completeness_vmeasure
print ('The running time of LKE is', time)
|
from pygraphc.misc.LKE import *
from pygraphc.evaluation.ExternalEvaluation import *
# set input and output path
dataset_path = '/home/hudan/Git/labeled-authlog/dataset/Hofstede2014/dataset1_perday/'
groundtruth_file = dataset_path + 'Dec 1.log.labeled'
analyzed_file = 'Dec 1.log'
OutputPath = '/home/hudan/Git/pygraphc/result/misc/'
prediction_file = OutputPath + 'Dec 1.log.perline'
para = Para(path=dataset_path, logname=analyzed_file, save_path=OutputPath)
# run LKE method
myparser = LKE(para)
time = myparser.main_process()
clusters = myparser.get_clusters()
original_logs = myparser.logs
# perform evaluation
ExternalEvaluation.set_cluster_label_id(None, clusters, original_logs, prediction_file)
# get evaluation of clustering performance
ar = ExternalEvaluation.get_adjusted_rand(groundtruth_file, prediction_file)
ami = ExternalEvaluation.get_adjusted_mutual_info(groundtruth_file, prediction_file)
nmi = ExternalEvaluation.get_normalized_mutual_info(groundtruth_file, prediction_file)
h = ExternalEvaluation.get_homogeneity(groundtruth_file, prediction_file)
c = ExternalEvaluation.get_completeness(groundtruth_file, prediction_file)
v = ExternalEvaluation.get_vmeasure(groundtruth_file, prediction_file)
# print evaluation result
print ar, ami, nmi, h, c, v
print ('The running time of LKE is', time)
|
Edit path and external evaluation
|
Edit path and external evaluation
|
Python
|
mit
|
studiawan/pygraphc
|
2739999c6fa0628e7cfe7a918e3cde3b7d791d66
|
tests/astroplpython/data/test_Timeseries.py
|
tests/astroplpython/data/test_Timeseries.py
|
'''
Created on Jul 16, 2014
@author: thomas
'''
import unittest
class TestTimeseries (unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_strToXTArray (self):
import astroplpython.data.Timeseries as t
strarr = ['(1,1.)', '(2,2.)', '(2.1,3.)', '(2.018,4.)']
x_t_list = t.x_t.strToXTArray(strarr)
print (str(x_t_list))
self.assertEqual(4, len(x_t_list), "list has right number of elements")
x = [1, 2, 2.1, 2.018]
t = [1., 2., 3., 4.]
i = 0
while (i < 4):
self.assertEquals(x[i], x_t_list[i].value) #, "x value is correct")
self.assertEquals(t[i], x_t_list[i].time, "t value is correct")
i += 1
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()
|
'''
Created on Jul 16, 2014
@author: thomas
'''
import unittest
class TestTimeseries (unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_strToXTArray (self):
import astroplpython.data.Timeseries as Timeseries
# test data strarr has combination of integer, floats.
#
strarr = ['(1,1)', '(2,2.)', '(2.1,3.)', '(2.018,4.)']
x_t_list = Timeseries.x_t.strToXTArray(strarr)
#print (str(x_t_list))
self.assertEqual(4, len(x_t_list), "list has right number of elements")
# Check class, return values. In checking values be sure
# to check that we cast back to float
x = [1., 2.0, 2.1, 2.018]
t = [1.000, 2.0, 3.0, 4.0]
i = 0
while (i < 4):
#print (str(x_t_list[i]))
#print (str(x_t_list[i].value))
self.assertIsInstance(x_t_list[i], Timeseries.x_t, "is class of Timeseries")
self.assertEquals(x[i], x_t_list[i].value) #, "x value is correct")
self.assertEquals(t[i], x_t_list[i].time, "t value is correct")
i += 1
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()
|
Modify test to match changes in class
|
Modify test to match changes in class
|
Python
|
mit
|
brianthomas/astroplpython,brianthomas/astroplpython
|
5b7789d519be7251c58b68879f013d5f3bf0c950
|
tests/thread/thread_stacksize1.py
|
tests/thread/thread_stacksize1.py
|
# test setting the thread stack size
#
# MIT license; Copyright (c) 2016 Damien P. George on behalf of Pycom Ltd
import sys
import _thread
# different implementations have different minimum sizes
if sys.implementation == 'micropython':
sz = 2 * 1024
else:
sz = 32 * 1024
def foo():
pass
def thread_entry():
foo()
with lock:
global n_finished
n_finished += 1
# test set/get of stack size
print(_thread.stack_size())
print(_thread.stack_size(sz))
print(_thread.stack_size() == sz)
print(_thread.stack_size())
lock = _thread.allocate_lock()
n_thread = 2
n_finished = 0
# set stack size and spawn a few threads
_thread.stack_size(sz)
for i in range(n_thread):
_thread.start_new_thread(thread_entry, ())
# busy wait for threads to finish
while n_finished < n_thread:
pass
print('done')
|
# test setting the thread stack size
#
# MIT license; Copyright (c) 2016 Damien P. George on behalf of Pycom Ltd
import sys
import _thread
# different implementations have different minimum sizes
if sys.implementation.name == 'micropython':
sz = 2 * 1024
else:
sz = 32 * 1024
def foo():
pass
def thread_entry():
foo()
with lock:
global n_finished
n_finished += 1
# reset stack size to default
_thread.stack_size()
# test set/get of stack size
print(_thread.stack_size())
print(_thread.stack_size(sz))
print(_thread.stack_size() == sz)
print(_thread.stack_size())
lock = _thread.allocate_lock()
n_thread = 2
n_finished = 0
# set stack size and spawn a few threads
_thread.stack_size(sz)
for i in range(n_thread):
_thread.start_new_thread(thread_entry, ())
# busy wait for threads to finish
while n_finished < n_thread:
pass
print('done')
|
Make stack-size test run correctly and reliable on uPy.
|
tests/thread: Make stack-size test run correctly and reliable on uPy.
|
Python
|
mit
|
mhoffma/micropython,HenrikSolver/micropython,ryannathans/micropython,dmazzella/micropython,mhoffma/micropython,PappaPeppar/micropython,MrSurly/micropython,tuc-osg/micropython,oopy/micropython,oopy/micropython,SHA2017-badge/micropython-esp32,AriZuu/micropython,toolmacher/micropython,redbear/micropython,mhoffma/micropython,tobbad/micropython,selste/micropython,adafruit/circuitpython,toolmacher/micropython,alex-robbins/micropython,emfcamp/micropython,Timmenem/micropython,MrSurly/micropython-esp32,micropython/micropython-esp32,dmazzella/micropython,mhoffma/micropython,dxxb/micropython,blazewicz/micropython,PappaPeppar/micropython,adafruit/circuitpython,pramasoul/micropython,kerneltask/micropython,lowRISC/micropython,pfalcon/micropython,puuu/micropython,dxxb/micropython,tuc-osg/micropython,kerneltask/micropython,tobbad/micropython,MrSurly/micropython,SHA2017-badge/micropython-esp32,AriZuu/micropython,selste/micropython,ganshun666/micropython,hiway/micropython,redbear/micropython,lowRISC/micropython,turbinenreiter/micropython,tralamazza/micropython,pramasoul/micropython,oopy/micropython,SHA2017-badge/micropython-esp32,Peetz0r/micropython-esp32,hosaka/micropython,chrisdearman/micropython,MrSurly/micropython,bvernoux/micropython,TDAbboud/micropython,tuc-osg/micropython,Peetz0r/micropython-esp32,Timmenem/micropython,MrSurly/micropython-esp32,deshipu/micropython,ryannathans/micropython,pozetroninc/micropython,infinnovation/micropython,chrisdearman/micropython,adafruit/circuitpython,adafruit/circuitpython,chrisdearman/micropython,infinnovation/micropython,matthewelse/micropython,swegener/micropython,hiway/micropython,MrSurly/micropython-esp32,adafruit/micropython,tralamazza/micropython,alex-march/micropython,swegener/micropython,toolmacher/micropython,PappaPeppar/micropython,puuu/micropython,adafruit/circuitpython,cwyark/micropython,hiway/micropython,deshipu/micropython,deshipu/micropython,cwyark/micropython,blazewicz/micropython,pozetroninc/micropython,SHA2017-badge/micropython-esp32,tuc-osg/micropython,micropython/micropython-esp32,alex-march/micropython,adafruit/micropython,alex-march/micropython,pozetroninc/micropython,henriknelson/micropython,jmarcelino/pycom-micropython,henriknelson/micropython,turbinenreiter/micropython,MrSurly/micropython,kerneltask/micropython,emfcamp/micropython,swegener/micropython,ganshun666/micropython,pozetroninc/micropython,kerneltask/micropython,PappaPeppar/micropython,Peetz0r/micropython-esp32,tuc-osg/micropython,Timmenem/micropython,AriZuu/micropython,dxxb/micropython,matthewelse/micropython,toolmacher/micropython,chrisdearman/micropython,SHA2017-badge/micropython-esp32,dmazzella/micropython,HenrikSolver/micropython,lowRISC/micropython,HenrikSolver/micropython,swegener/micropython,MrSurly/micropython-esp32,bvernoux/micropython,MrSurly/micropython-esp32,pfalcon/micropython,torwag/micropython,henriknelson/micropython,blazewicz/micropython,TDAbboud/micropython,bvernoux/micropython,hosaka/micropython,oopy/micropython,alex-robbins/micropython,jmarcelino/pycom-micropython,deshipu/micropython,Peetz0r/micropython-esp32,TDAbboud/micropython,Peetz0r/micropython-esp32,turbinenreiter/micropython,redbear/micropython,bvernoux/micropython,mhoffma/micropython,alex-march/micropython,adafruit/micropython,dxxb/micropython,blazewicz/micropython,micropython/micropython-esp32,emfcamp/micropython,tobbad/micropython,pramasoul/micropython,puuu/micropython,torwag/micropython,pozetroninc/micropython,toolmacher/micropython,hosaka/micropython,adafruit/micropython,henriknelson/micropython,bvernoux/micropython,infinnovation/micropython,turbinenreiter/micropython,HenrikSolver/micropython,oopy/micropython,adafruit/circuitpython,alex-robbins/micropython,matthewelse/micropython,PappaPeppar/micropython,micropython/micropython-esp32,ryannathans/micropython,ganshun666/micropython,selste/micropython,deshipu/micropython,torwag/micropython,ryannathans/micropython,MrSurly/micropython,hiway/micropython,redbear/micropython,tralamazza/micropython,AriZuu/micropython,alex-march/micropython,tralamazza/micropython,ryannathans/micropython,trezor/micropython,henriknelson/micropython,alex-robbins/micropython,trezor/micropython,tobbad/micropython,trezor/micropython,puuu/micropython,TDAbboud/micropython,pfalcon/micropython,turbinenreiter/micropython,alex-robbins/micropython,emfcamp/micropython,TDAbboud/micropython,hosaka/micropython,kerneltask/micropython,redbear/micropython,HenrikSolver/micropython,matthewelse/micropython,pfalcon/micropython,jmarcelino/pycom-micropython,emfcamp/micropython,adafruit/micropython,tobbad/micropython,AriZuu/micropython,ganshun666/micropython,dmazzella/micropython,torwag/micropython,chrisdearman/micropython,pfalcon/micropython,selste/micropython,selste/micropython,matthewelse/micropython,Timmenem/micropython,trezor/micropython,pramasoul/micropython,swegener/micropython,matthewelse/micropython,jmarcelino/pycom-micropython,lowRISC/micropython,pramasoul/micropython,cwyark/micropython,infinnovation/micropython,puuu/micropython,ganshun666/micropython,cwyark/micropython,dxxb/micropython,hiway/micropython,blazewicz/micropython,lowRISC/micropython,jmarcelino/pycom-micropython,Timmenem/micropython,torwag/micropython,infinnovation/micropython,hosaka/micropython,trezor/micropython,cwyark/micropython,micropython/micropython-esp32
|
ec9b1f0ebda55e3e02e597e10ac28d62286b922f
|
SimPEG/EM/NSEM/Utils/__init__.py
|
SimPEG/EM/NSEM/Utils/__init__.py
|
""" module SimPEG.EM.NSEM.Utils
Collection of utilities that are usefull for the NSEM problem
NOTE: These utilities are not well test, use with care
"""
from __future__ import absolute_import
from .MT1Dsolutions import get1DEfields # Add the names of the functions
from .MT1Danalytic import getEHfields, getImpedance
from .dataUtils import (getAppRes, appResPhs, rec_to_ndarr, rotateData,
skindepth, makeAnalyticSolution, plotMT1DModelData,
plotImpAppRes, printTime, convert3Dto1Dobject,
resampleNSEMdataAtFreq, extract_data_info)
from .ediFilesUtils import (EDIimporter, _findLatLong, _findLine, _findEDIcomp)
from .testUtils import (getAppResPhs, setup1DSurvey, setupSimpegNSEM_ePrimSec,
random, halfSpace, blockInhalfSpace, twoLayer)
|
""" module SimPEG.EM.NSEM.Utils
Collection of utilities that are usefull for the NSEM problem
NOTE: These utilities are not well test, use with care
"""
from __future__ import absolute_import
from .MT1Dsolutions import get1DEfields # Add the names of the functions
from .MT1Danalytic import getEHfields, getImpedance
from .dataUtils import (getAppRes, appResPhs, rec_to_ndarr, rotateData,
skindepth, makeAnalyticSolution, plotMT1DModelData,
plotImpAppRes, printTime, convert3Dto1Dobject,
resample_data, extract_data_info)
from .ediFilesUtils import (EDIimporter, _findLatLong, _findLine, _findEDIcomp)
from .testUtils import (getAppResPhs, setup1DSurvey, setupSimpegNSEM_ePrimSec,
random, halfSpace, blockInhalfSpace, twoLayer)
|
Fix import issue due to name changes
|
Fix import issue due to name changes
|
Python
|
mit
|
simpeg/simpeg
|
3a470c02a1a171f876200258897d6e277a1aab91
|
tournamentcontrol/competition/signals/__init__.py
|
tournamentcontrol/competition/signals/__init__.py
|
from django.db import models
from tournamentcontrol.competition.signals.custom import match_forfeit # noqa
from tournamentcontrol.competition.signals.ladders import ( # noqa
changed_points_formula,
scale_ladder_entry,
team_ladder_entry_aggregation,
)
from tournamentcontrol.competition.signals.matches import ( # noqa
match_saved_handler,
notify_match_forfeit_email,
)
from tournamentcontrol.competition.signals.places import ( # noqa
set_ground_latlng,
set_ground_timezone,
)
from tournamentcontrol.competition.signals.teams import delete_team # noqa
def delete_related(sender, instance, *args, **kwargs):
for ro, __ in instance._meta.get_all_related_objects_with_model():
name = ro.get_accessor_name()
if isinstance(ro.field, models.ManyToManyField):
continue
if isinstance(instance, ro.model):
continue
manager = getattr(instance, name)
for obj in manager.all():
obj.delete()
|
from django.db import models
from tournamentcontrol.competition.signals.custom import match_forfeit # noqa
from tournamentcontrol.competition.signals.ladders import ( # noqa
changed_points_formula,
scale_ladder_entry,
team_ladder_entry_aggregation,
)
from tournamentcontrol.competition.signals.matches import ( # noqa
match_saved_handler,
notify_match_forfeit_email,
)
from tournamentcontrol.competition.signals.places import ( # noqa
set_ground_latlng,
set_ground_timezone,
)
from tournamentcontrol.competition.signals.teams import delete_team # noqa
def delete_related(sender, instance, *args, **kwargs):
for ro, __ in [
(f, f.model)
for f in instance._meta.get_fields()
if (f.one_to_many or f.one_to_one)
and f.auto_created and not f.concrete]:
name = ro.get_accessor_name()
if isinstance(ro.field, models.ManyToManyField):
continue
if isinstance(instance, ro.model):
continue
manager = getattr(instance, name)
for obj in manager.all():
obj.delete()
|
Stop using the undocumented get_all_related_objects_with_model API
|
Stop using the undocumented get_all_related_objects_with_model API
|
Python
|
bsd-3-clause
|
goodtune/vitriolic,goodtune/vitriolic,goodtune/vitriolic,goodtune/vitriolic
|
cbb90d03b83a495b1c46514a583538f2cfc0d29c
|
test/functional/test_manager.py
|
test/functional/test_manager.py
|
from osmviz.manager import PILImageManager, OSMManager
import PIL.Image as Image
def test_pil():
imgr = PILImageManager("RGB")
osm = OSMManager(image_manager=imgr)
image, bnds = osm.createOSMImage((30, 35, -117, -112), 9)
wh_ratio = float(image.size[0]) / image.size[1]
image2 = image.resize((int(800 * wh_ratio), 800), Image.ANTIALIAS)
del image
image2.show()
if __name__ == "__main__":
test_pil()
# End of file
|
from osmviz.manager import PILImageManager, OSMManager
import PIL.Image as Image
def test_pil():
image_manager = PILImageManager("RGB")
osm = OSMManager(image_manager=image_manager)
image, bounds = osm.createOSMImage((30, 31, -117, -116), 9)
wh_ratio = float(image.size[0]) / image.size[1]
image2 = image.resize((int(800 * wh_ratio), 800), Image.ANTIALIAS)
del image
image2.show()
if __name__ == "__main__":
test_pil()
# End of file
|
Reduce number of tiles downloaded
|
Reduce number of tiles downloaded
|
Python
|
mit
|
hugovk/osmviz,hugovk/osmviz
|
257686cfa72318c0b476d4623731080f848c4942
|
app.py
|
app.py
|
import requests
from flask import Flask, render_template
app = Flask(__name__)
BBC_id= "bbc-news"
API_KEY = "c4002216fa5446d582b5f31d73959d36"
@app.route("/")
def index():
r = requests.get(
f"https://newsapi.org/v1/articles?source={BBC_id}&sortBy=top&apiKey={API_KEY}"
)
return render_template("index.html", articles=r.json().get("articles"))
if __name__ == "__main__":
app.run()
|
import requests
from flask import Flask, render_template
app = Flask(__name__, instance_relative_config=True)
app.config.from_pyfile("appconfig.py")
BBC_id= "bbc-news"
@app.route("/")
def index():
r = requests.get(
f"https://newsapi.org/v1/articles?source={BBC_id}&sortBy=top&apiKey={app.config['API_KEY']}"
)
return render_template("index.html", articles=r.json().get("articles"))
if __name__ == "__main__":
app.run()
|
Use instance folder to load configuration file.
|
Use instance folder to load configuration file.
|
Python
|
mit
|
alchermd/headlines,alchermd/headlines
|
d1ccd3e93043d11a22e873e7ccdb76d749746151
|
api/app/app.py
|
api/app/app.py
|
import os
import logging
from flask import Flask
from model.base import db
from route.base import blueprint
# Register models and routes
import model
import route
logging.basicConfig(level=logging.INFO)
app = Flask(__name__)
# app.config['PROPAGATE_EXCEPTIONS'] = True
app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql://' +\
os.environ['USER'] + ':' +\
os.environ['PASSWORD'] + '@' +\
'db/' + os.environ['SCHEMA']
db.init_app(app)
with app.test_request_context():
db.create_all()
db.session.commit()
app.register_blueprint(blueprint)
|
import os
import logging
from uwsgidecorators import postfork
from flask import Flask
from model.base import db
from route.base import blueprint
# Register models and routes
import model
import route
logging.basicConfig(level=logging.INFO)
app = Flask(__name__)
# app.config['PROPAGATE_EXCEPTIONS'] = True
app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql://' +\
os.environ['USER'] + ':' +\
os.environ['PASSWORD'] + '@' +\
'db/' + os.environ['SCHEMA']
db.init_app(app)
with app.test_request_context():
db.create_all()
db.session.commit()
app.register_blueprint(blueprint)
@postfork
def refresh_db():
db.session.remove()
db.init_app(app)
|
Refresh db connections on uwsgi fork
|
Refresh db connections on uwsgi fork
|
Python
|
mit
|
hexa4313/velov-companion-server,hexa4313/velov-companion-server
|
08fbfa49129a42821b128913e4aa9fbacf966f20
|
grizzly-jersey/setup.py
|
grizzly-jersey/setup.py
|
import subprocess
import sys
import setup_util
import os
def start(args):
try:
subprocess.check_call("mvn clean package shade:shade", shell=True, cwd="grizzly-jersey")
subprocess.Popen("java -jar target/grizzly-jersey-example-0.1.jar".rsplit(" "), cwd="grizzly-jersey")
return 0
except subprocess.CalledProcessError:
return 1
def stop():
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'grizzly-jersey' in line:
pid = int(line.split(None, 2)[1])
os.kill(pid, 9)
return 0
|
import subprocess
import sys
import setup_util
import os
def start(args):
try:
subprocess.check_call("mvn clean package", shell=True, cwd="grizzly-jersey")
subprocess.Popen("java -jar target/grizzly-jersey-example-0.1.jar".rsplit(" "), cwd="grizzly-jersey")
return 0
except subprocess.CalledProcessError:
return 1
def stop():
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'grizzly-jersey' in line:
pid = int(line.split(None, 2)[1])
os.kill(pid, 9)
return 0
|
Fix the build so it no longer double-shades. This removes all the warnings it printed.
|
Fix the build so it no longer double-shades. This removes all the warnings it printed.
|
Python
|
bsd-3-clause
|
yunspace/FrameworkBenchmarks,grob/FrameworkBenchmarks,herloct/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,grob/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,sxend/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,testn/FrameworkBenchmarks,dmacd/FB-try1,k-r-g/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,sgml/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,joshk/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,methane/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,zapov/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,grob/FrameworkBenchmarks,herloct/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,doom369/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,sxend/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,jamming/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,sgml/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,sgml/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,valyala/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,actframework/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,methane/FrameworkBenchmarks,sgml/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,sgml/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,testn/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,dmacd/FB-try1,sgml/FrameworkBenchmarks,joshk/FrameworkBenchmarks,zloster/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,torhve/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,zloster/FrameworkBenchmarks,khellang/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,jamming/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,actframework/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,grob/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,Verber/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,actframework/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,sxend/FrameworkBenchmarks,testn/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,herloct/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,methane/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,torhve/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,Verber/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,Verber/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,dmacd/FB-try1,sxend/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,jamming/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,testn/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,joshk/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,valyala/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,zapov/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,torhve/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,khellang/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,joshk/FrameworkBenchmarks,valyala/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,doom369/FrameworkBenchmarks,zloster/FrameworkBenchmarks,leafo/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,zloster/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,jamming/FrameworkBenchmarks,zloster/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,jamming/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,denkab/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,grob/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,khellang/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,testn/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,methane/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,joshk/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,herloct/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,torhve/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,doom369/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,jamming/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,dmacd/FB-try1,Jesterovskiy/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,Verber/FrameworkBenchmarks,testn/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,denkab/FrameworkBenchmarks,testn/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,doom369/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,sgml/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,leafo/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,sgml/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,sxend/FrameworkBenchmarks,testn/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,methane/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,Verber/FrameworkBenchmarks,joshk/FrameworkBenchmarks,khellang/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,denkab/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,jamming/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,doom369/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,herloct/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,valyala/FrameworkBenchmarks,herloct/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,sxend/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,dmacd/FB-try1,kbrock/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,actframework/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,doom369/FrameworkBenchmarks,leafo/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,khellang/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,jamming/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,khellang/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,sgml/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,denkab/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,doom369/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,actframework/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,sxend/FrameworkBenchmarks,valyala/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,denkab/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,zloster/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,zapov/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,torhve/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,zloster/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,dmacd/FB-try1,knewmanTE/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,actframework/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,valyala/FrameworkBenchmarks,grob/FrameworkBenchmarks,leafo/FrameworkBenchmarks,jamming/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,torhve/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,zloster/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,herloct/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,zapov/FrameworkBenchmarks,valyala/FrameworkBenchmarks,grob/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,actframework/FrameworkBenchmarks,actframework/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,zapov/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,actframework/FrameworkBenchmarks,sxend/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,valyala/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,doom369/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,actframework/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,valyala/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,doom369/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,jamming/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,torhve/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,khellang/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,methane/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,testn/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,sxend/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,testn/FrameworkBenchmarks,methane/FrameworkBenchmarks,methane/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,Verber/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,zloster/FrameworkBenchmarks,zloster/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,joshk/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,actframework/FrameworkBenchmarks,grob/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,Verber/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,valyala/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,testn/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,leafo/FrameworkBenchmarks,methane/FrameworkBenchmarks,denkab/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,sgml/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,khellang/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,sxend/FrameworkBenchmarks,zapov/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,zloster/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,doom369/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,Verber/FrameworkBenchmarks,dmacd/FB-try1,Rayne/FrameworkBenchmarks,sxend/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,torhve/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,leafo/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,denkab/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,Verber/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,zloster/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,zloster/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,zloster/FrameworkBenchmarks,methane/FrameworkBenchmarks,doom369/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,grob/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,sxend/FrameworkBenchmarks,testn/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,methane/FrameworkBenchmarks,joshk/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,khellang/FrameworkBenchmarks,methane/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,doom369/FrameworkBenchmarks,denkab/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,herloct/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,Verber/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,zloster/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,sxend/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,joshk/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,herloct/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,joshk/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,dmacd/FB-try1,leafo/FrameworkBenchmarks,actframework/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,methane/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,zapov/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,denkab/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,leafo/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,actframework/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,sgml/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,joshk/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,methane/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,dmacd/FB-try1,circlespainter/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,torhve/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,doom369/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,zapov/FrameworkBenchmarks,actframework/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,sxend/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,valyala/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,sgml/FrameworkBenchmarks,zapov/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,khellang/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,doom369/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,herloct/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,jamming/FrameworkBenchmarks,sxend/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,denkab/FrameworkBenchmarks,grob/FrameworkBenchmarks,dmacd/FB-try1,youprofit/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,actframework/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,valyala/FrameworkBenchmarks,doom369/FrameworkBenchmarks,grob/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,jamming/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,khellang/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,joshk/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,zloster/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,zloster/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,Verber/FrameworkBenchmarks,leafo/FrameworkBenchmarks,herloct/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,sxend/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,joshk/FrameworkBenchmarks,torhve/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,Verber/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,denkab/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,dmacd/FB-try1,zane-techempower/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,Verber/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,denkab/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,joshk/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,herloct/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,sgml/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,leafo/FrameworkBenchmarks,actframework/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,sgml/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,torhve/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,zapov/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,Verber/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,valyala/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,testn/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,doom369/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,grob/FrameworkBenchmarks,grob/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,zapov/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,doom369/FrameworkBenchmarks,khellang/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,zapov/FrameworkBenchmarks,valyala/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,zapov/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,khellang/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,jamming/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,zapov/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,khellang/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,herloct/FrameworkBenchmarks,jamming/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,leafo/FrameworkBenchmarks,herloct/FrameworkBenchmarks,leafo/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,zapov/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,sxend/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,denkab/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,sxend/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,zloster/FrameworkBenchmarks,dmacd/FB-try1,hamiltont/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,zapov/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,grob/FrameworkBenchmarks,herloct/FrameworkBenchmarks,torhve/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,testn/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,denkab/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks
|
8ae763c69bbba11a264f8404b8189a53c63d4f40
|
marathon_itests/environment.py
|
marathon_itests/environment.py
|
import time
from itest_utils import wait_for_marathon
from itest_utils import print_container_logs
def before_all(context):
wait_for_marathon()
def after_scenario(context, scenario):
"""If a marathon client object exists in our context, delete any apps in Marathon and wait until they die."""
if scenario.status != 'passed':
print "Zookeeper container logs:"
print_container_logs('zookeeper')
print "Marathon container logs:"
print_container_logs('marathon')
if context.client:
while True:
apps = context.client.list_apps()
if not apps:
break
for app in apps:
context.client.delete_app(app.id, force=True)
time.sleep(0.5)
while context.client.list_deployments():
time.sleep(0.5)
|
import time
from itest_utils import wait_for_marathon
from itest_utils import print_container_logs
def before_all(context):
wait_for_marathon()
def after_scenario(context, scenario):
"""If a marathon client object exists in our context, delete any apps in Marathon and wait until they die."""
if context.client:
while True:
apps = context.client.list_apps()
if not apps:
break
for app in apps:
context.client.delete_app(app.id, force=True)
time.sleep(0.5)
while context.client.list_deployments():
time.sleep(0.5)
def after_step(context, step):
if step.status == "failed":
print "Zookeeper container logs:"
print_container_logs('zookeeper')
print "Marathon container logs:"
print_container_logs('marathon')
|
Move log print to after_step
|
Move log print to after_step
|
Python
|
apache-2.0
|
Yelp/paasta,gstarnberger/paasta,gstarnberger/paasta,Yelp/paasta,somic/paasta,somic/paasta
|
605443886582d13c2b45b19fad86854bf4e8ddbd
|
backend/catalogue/serializers.py
|
backend/catalogue/serializers.py
|
from rest_framework import serializers
from .models import Release, Track, Comment
class CommentSerializer(serializers.ModelSerializer):
class Meta:
model = Comment
fields = ('id', 'comment')
class TrackSerializer(serializers.ModelSerializer):
cdid = serializers.StringRelatedField(
read_only=True
)
class Meta:
model = Track
fields = ('trackid', 'tracknum', 'trackartist', 'tracktitle', 'tracklength', 'cdid')
class ReleaseSerializer(serializers.ModelSerializer):
tracks = serializers.HyperlinkedIdentityField(view_name='release-tracks')
comments = serializers.HyperlinkedIdentityField(view_name='release-comments')
class Meta:
model = Release
fields = ('id', 'arrivaldate', 'artist', 'title', 'year', 'local', 'cpa', 'compilation', 'female', 'tracks', 'comments')
|
from rest_framework import serializers
from .models import Release, Track, Comment
class CommentSerializer(serializers.ModelSerializer):
class Meta:
model = Comment
fields = ('id', 'comment')
class TrackSerializer(serializers.ModelSerializer):
class Meta:
model = Track
fields = ('trackid', 'tracknum', 'trackartist', 'tracktitle', 'tracklength', 'release')
class ReleaseSerializer(serializers.ModelSerializer):
tracks = serializers.HyperlinkedIdentityField(view_name='release-tracks')
comments = serializers.HyperlinkedIdentityField(view_name='release-comments')
class Meta:
model = Release
fields = ('id', 'arrivaldate', 'artist', 'title', 'year','company','genre','format', 'local', 'cpa', 'compilation', 'female', 'tracks', 'comments')
|
Add more fields to Release serializer.
|
Add more fields to Release serializer.
|
Python
|
mit
|
ThreeDRadio/playlists,ThreeDRadio/playlists,ThreeDRadio/playlists
|
1118541b1cdea7f6079bb63d000ba54f69dfa119
|
books/views.py
|
books/views.py
|
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.shortcuts import render
from books import models
from books import forms
@login_required
def receipt_list(request, user_id):
user = User.objects.get(id=user_id)
ctx = {}
ctx['user'] = user
ctx['receipts'] = models.Receipt.objects.filter(user=user).order_by('-id')
return render(request, 'receipt_list.html', context=ctx)
@login_required
def receipt_create(request, user_id):
if request.method == "POST":
form = forms.ReceiptForm(request.POST)
if form.is_valid():
data = form.cleaned_data
models.Receipt.objects.create(title=data.get("title"),
price=data.get("price"),
user=request.user)
return HttpResponseRedirect(reverse('receipt_list',
args=[request.user.id]))
else:
form = forms.ReceiptForm()
return render(request, 'receipt_create.html', {'form': form})
|
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from django.shortcuts import redirect
from django.shortcuts import render
from books import models
from books import forms
@login_required
def receipt_list(request, user_id):
user = User.objects.get(id=user_id)
ctx = {}
ctx['user'] = user
ctx['receipts'] = models.Receipt.objects.filter(user=user).order_by('-id')
return render(request, 'receipt_list.html', context=ctx)
@login_required
def receipt_create(request, user_id):
if request.method == "POST":
form = forms.ReceiptForm(request.POST)
if form.is_valid():
form.instance.user = request.user
form.save()
return redirect(reverse('receipt_list', args=[request.user.id]))
else:
form = forms.ReceiptForm()
return render(request, 'receipt_create.html', {'form': form})
|
Use form.save for receipt creation
|
Use form.save for receipt creation
|
Python
|
mit
|
trimailov/finance,trimailov/finance,trimailov/finance
|
1e01e66f23f7a2ca541a29d29658749f95352c41
|
generate-key.py
|
generate-key.py
|
#!/usr/bin/python
import os
import sqlite3
import sys
import time
if len(sys.argv) < 3:
raise ValueError('Usage: %s "Firstnam Lastname" email@example.com' % sys.argv[0])
db = sqlite3.connect('/var/lib/zon-api/data.db')
api_key = str(os.urandom(26).encode('hex'))
tier = 'free'
name = sys.argv[1]
email = sys.argv[2]
requests = 0
reset = int(time.time())
query = 'INSERT INTO client VALUES (?, ?, ?, ?, ?, ?)'
db.execute(query, (api_key, tier, name, email, requests, reset))
db.commit()
db.close()
print api_key
|
#!/usr/bin/python
import os
import sqlite3
import sys
import time
db = sqlite3.connect('/var/lib/zon-api/data.db')
if len(sys.argv) < 3:
print('Usage: %s "Firstname Lastname" email@example.com' % sys.argv[0])
print('\nLast keys:')
query = 'SELECT * FROM client ORDER by reset DESC limit 10'
for client in db.execute(query):
print('{0}: "{2}" {3}'.format(*client))
sys.exit(1)
api_key = str(os.urandom(26).encode('hex'))
tier = 'free'
name = sys.argv[1]
email = sys.argv[2]
requests = 0
reset = int(time.time())
query = 'INSERT INTO client VALUES (?, ?, ?, ?, ?, ?)'
db.execute(query, (api_key, tier, name, email, requests, reset))
db.commit()
db.close()
print api_key
|
Print last 10 generated keys when no arguments were given.
|
Print last 10 generated keys when no arguments were given.
|
Python
|
bsd-3-clause
|
ZeitOnline/content-api,ZeitOnline/content-api
|
2af5eff46cbae0927aeee135c22304e108519659
|
server/python_django/file_uploader/__init__.py
|
server/python_django/file_uploader/__init__.py
|
"""
@author: Ferdinand E. Silva
@email: ferdinandsilva@ferdinandsilva.com
@website: http://ferdinandsilva.com
"""
import os
from django.utils import simplejson as json
class qqFileUploader(object):
def __init__(self, allowedExtensions=None, sizeLimit=1024):
self.allowedExtensions = allowedExtensions or []
self.sizeLimit = sizeLimit
def handleUpload(self, request, uploadDirectory):
#read file info from stream
uploaded = request.read
#get file size
fileSize = int(uploaded.im_self.META["CONTENT_LENGTH"])
#get file name
fileName = uploaded.im_self.META["HTTP_X_FILE_NAME"]
#check first for allowed file extensions
#read the file content, if it is not read when the request is multi part then the client get an error
fileContent = uploaded(fileSize)
if self._getExtensionFromFileName(fileName) in self.allowedExtensions or ".*" in self.allowedExtensions:
#check file size
if fileSize <= self.sizeLimit:
#upload file
#write file
file = open(os.path.join(uploadDirectory, fileName), "wb+")
file.write(fileContent)
file.close()
return json.dumps({"success": True})
else:
return json.dumps({"error": "File is too large."})
else:
return json.dumps({"error": "File has an invalid extension."})
def _getExtensionFromFileName(self, fileName):
filename, extension = os.path.splitext(fileName)
return extension
|
"""
@author: Ferdinand E. Silva
@email: ferdinandsilva@ferdinandsilva.com
@website: http://ferdinandsilva.com
"""
import os
from django.conf import settings
from django.utils import simplejson as json
class qqFileUploader(object):
def __init__(self, allowedExtensions=None, sizeLimit=None):
self.allowedExtensions = allowedExtensions or []
self.sizeLimit = sizeLimit or settings.FILE_UPLOAD_MAX_MEMORY_SIZE
def handleUpload(self, request, uploadDirectory):
#read file info from stream
uploaded = request.read
#get file size
fileSize = int(uploaded.im_self.META["CONTENT_LENGTH"])
#get file name
fileName = uploaded.im_self.META["HTTP_X_FILE_NAME"]
#check first for allowed file extensions
#read the file content, if it is not read when the request is multi part then the client get an error
fileContent = uploaded(fileSize)
if self._getExtensionFromFileName(fileName) in self.allowedExtensions or ".*" in self.allowedExtensions:
#check file size
if fileSize <= self.sizeLimit:
#upload file
#write file
file = open(os.path.join(uploadDirectory, fileName), "wb+")
file.write(fileContent)
file.close()
return json.dumps({"success": True})
else:
return json.dumps({"error": "File is too large."})
else:
return json.dumps({"error": "File has an invalid extension."})
def _getExtensionFromFileName(self, fileName):
filename, extension = os.path.splitext(fileName)
return extension
|
Use the default file upload max memory size
|
Use the default file upload max memory size
|
Python
|
mit
|
SimonWaldherr/uploader,SimonWaldherr/uploader,FineUploader/fine-uploader,SimonWaldherr/uploader,FineUploader/fine-uploader,SimonWaldherr/uploader,SimonWaldherr/uploader,SimonWaldherr/uploader,SimonWaldherr/uploader,FineUploader/fine-uploader
|
5c874677cc978e1cdd563a563d62bae162d3b7ac
|
mycroft/skills/audioservice.py
|
mycroft/skills/audioservice.py
|
import time
from mycroft.messagebus.message import Message
class AudioService():
def __init__(self, emitter):
self.emitter = emitter
self.emitter.on('MycroftAudioServiceTrackInfoReply', self._track_info)
self.info = None
def _track_info(self, message=None):
self.info = message.data
def play(self, tracks=[], utterance=''):
self.emitter.emit(Message('MycroftAudioServicePlay',
data={'tracks': tracks,
'utterance': utterance}))
def track_info(self):
self.info = None
self.emitter.emit(Message('MycroftAudioServiceTrackInfo'))
while self.info is None:
time.sleep(0.1)
return self.info
|
import time
from mycroft.messagebus.message import Message
class AudioService():
def __init__(self, emitter):
self.emitter = emitter
self.emitter.on('MycroftAudioServiceTrackInfoReply', self._track_info)
self.info = None
def _track_info(self, message=None):
self.info = message.data
def play(self, tracks=[], utterance=''):
if isinstance(tracks, basestring):
tracks = [tracks]
elif not isinstance(tracks, list):
raise ValueError
self.emitter.emit(Message('MycroftAudioServicePlay',
data={'tracks': tracks,
'utterance': utterance}))
def track_info(self):
self.info = None
self.emitter.emit(Message('MycroftAudioServiceTrackInfo'))
while self.info is None:
time.sleep(0.1)
return self.info
|
Add check for valid type of tracks
|
Add check for valid type of tracks
|
Python
|
apache-2.0
|
aatchison/mycroft-core,MycroftAI/mycroft-core,MycroftAI/mycroft-core,aatchison/mycroft-core,linuxipho/mycroft-core,forslund/mycroft-core,linuxipho/mycroft-core,Dark5ide/mycroft-core,Dark5ide/mycroft-core,forslund/mycroft-core
|
77ef9f4a7ccd51d7b070da31ff4c30768653bb7b
|
tools/build_modref_templates.py
|
tools/build_modref_templates.py
|
#!/usr/bin/env python
"""Script to auto-generate our API docs.
"""
# stdlib imports
import os
# local imports
from apigen import ApiDocWriter
#*****************************************************************************
if __name__ == '__main__':
package = 'nipype'
outdir = os.path.join('api','generated')
docwriter = ApiDocWriter(package)
# Packages that should not be included in generated API docs.
docwriter.package_skip_patterns += ['\.externals$',
'\.utils$',
]
# Modules that should not be included in generated API docs.
docwriter.module_skip_patterns += ['\.version$',
'\.interfaces\.afni$',
]
docwriter.write_api_docs(outdir)
docwriter.write_index(outdir, 'gen', relative_to='api')
print '%d files written' % len(docwriter.written_modules)
|
#!/usr/bin/env python
"""Script to auto-generate our API docs.
"""
# stdlib imports
import os
# local imports
from apigen import ApiDocWriter
#*****************************************************************************
if __name__ == '__main__':
package = 'nipype'
outdir = os.path.join('api','generated')
docwriter = ApiDocWriter(package)
# Packages that should not be included in generated API docs.
docwriter.package_skip_patterns += ['\.externals$',
'\.utils$',
]
# Modules that should not be included in generated API docs.
docwriter.module_skip_patterns += ['\.version$',
'\.interfaces\.afni$',
'\.pipeline\.alloy$',
'\.pipeline\.s3_node_wrapper$',
]
docwriter.write_api_docs(outdir)
docwriter.write_index(outdir, 'gen', relative_to='api')
print '%d files written' % len(docwriter.written_modules)
|
Remove alloy and s3 from generated docs, just for 0.1 release.
|
Remove alloy and s3 from generated docs, just for 0.1 release.
git-svn-id: 24f545668198cdd163a527378499f2123e59bf9f@496 ead46cd0-7350-4e37-8683-fc4c6f79bf00
|
Python
|
bsd-3-clause
|
blakedewey/nipype,gerddie/nipype,FCP-INDI/nipype,rameshvs/nipype,blakedewey/nipype,satra/NiPypeold,FCP-INDI/nipype,FredLoney/nipype,arokem/nipype,dmordom/nipype,pearsonlab/nipype,pearsonlab/nipype,Leoniela/nipype,glatard/nipype,mick-d/nipype_source,mick-d/nipype,dmordom/nipype,arokem/nipype,carlohamalainen/nipype,mick-d/nipype_source,sgiavasis/nipype,FredLoney/nipype,arokem/nipype,mick-d/nipype,FredLoney/nipype,carolFrohlich/nipype,JohnGriffiths/nipype,JohnGriffiths/nipype,glatard/nipype,gerddie/nipype,mick-d/nipype,sgiavasis/nipype,blakedewey/nipype,fprados/nipype,glatard/nipype,carlohamalainen/nipype,carolFrohlich/nipype,pearsonlab/nipype,grlee77/nipype,blakedewey/nipype,wanderine/nipype,mick-d/nipype,carolFrohlich/nipype,dmordom/nipype,rameshvs/nipype,JohnGriffiths/nipype,sgiavasis/nipype,fprados/nipype,FCP-INDI/nipype,iglpdc/nipype,grlee77/nipype,iglpdc/nipype,glatard/nipype,grlee77/nipype,wanderine/nipype,wanderine/nipype,dgellis90/nipype,JohnGriffiths/nipype,FCP-INDI/nipype,rameshvs/nipype,rameshvs/nipype,satra/NiPypeold,wanderine/nipype,christianbrodbeck/nipype,gerddie/nipype,gerddie/nipype,dgellis90/nipype,iglpdc/nipype,fprados/nipype,dgellis90/nipype,pearsonlab/nipype,sgiavasis/nipype,arokem/nipype,carolFrohlich/nipype,carlohamalainen/nipype,iglpdc/nipype,grlee77/nipype,dgellis90/nipype,Leoniela/nipype,mick-d/nipype_source,Leoniela/nipype,christianbrodbeck/nipype
|
0f3c33de86d38cf47f84df97a79e838d37264b7c
|
sugar/session/LogWriter.py
|
sugar/session/LogWriter.py
|
import os
import sys
import dbus
class LogWriter:
def __init__(self, application):
self._application = application
bus = dbus.SessionBus()
proxy_obj = bus.get_object('com.redhat.Sugar.Logger', '/com/redhat/Sugar/Logger')
self._logger = dbus.Interface(proxy_obj, 'com.redhat.Sugar.Logger')
def start(self):
if os.environ.has_key('SUGAR_USE_CONSOLE'):
sys.stdout = self
sys.stderr = self
def write(self, s):
self._logger.log(self._application, s)
|
import os
import sys
import dbus
import gobject
class LogWriter:
def __init__(self, application):
self._application = application
bus = dbus.SessionBus()
proxy_obj = bus.get_object('com.redhat.Sugar.Logger', '/com/redhat/Sugar/Logger')
self._logger = dbus.Interface(proxy_obj, 'com.redhat.Sugar.Logger')
def start(self):
if os.environ.has_key('SUGAR_USE_CONSOLE'):
sys.stdout = self
sys.stderr = self
def write(self, s):
gobject.idle_add(self._write, s)
def _write(self, s):
self._logger.log(self._application, s)
return False
|
Add messages on idle so that we don't break
|
Add messages on idle so that we don't break
|
Python
|
lgpl-2.1
|
sugarlabs/sugar-toolkit,ceibal-tatu/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit,manuq/sugar-toolkit-gtk3,quozl/sugar-toolkit-gtk3,godiard/sugar-toolkit-gtk3,Daksh/sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit,gusDuarte/sugar-toolkit-gtk3,i5o/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit-gtk3,tchx84/sugar-toolkit-gtk3,tchx84/sugar-toolkit-gtk3,godiard/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit,puneetgkaur/sugar-toolkit-gtk3,Daksh/sugar-toolkit-gtk3,samdroid-apps/sugar-toolkit-gtk3,samdroid-apps/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit-gtk3,i5o/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit,puneetgkaur/backup_sugar_sugartoolkit,ceibal-tatu/sugar-toolkit,samdroid-apps/sugar-toolkit-gtk3,gusDuarte/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit-gtk3,quozl/sugar-toolkit-gtk3,puneetgkaur/backup_sugar_sugartoolkit,puneetgkaur/sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit-gtk3,puneetgkaur/backup_sugar_sugartoolkit,samdroid-apps/sugar-toolkit-gtk3,quozl/sugar-toolkit-gtk3,quozl/sugar-toolkit-gtk3,puneetgkaur/sugar-toolkit-gtk3,manuq/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit,i5o/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit-gtk3,gusDuarte/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit,manuq/sugar-toolkit-gtk3,tchx84/sugar-toolkit-gtk3,Daksh/sugar-toolkit-gtk3,gusDuarte/sugar-toolkit-gtk3,godiard/sugar-toolkit-gtk3,i5o/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit,tchx84/debian-pkg-sugar-toolkit
|
2022357fd0f81be6f3ca91718a6c8c1d1d46ac1b
|
examples/olfaction/config_files/gen_olf_stimuli.py
|
examples/olfaction/config_files/gen_olf_stimuli.py
|
"""
Create odorant stimuli in hd5 format
"""
"""
Create the gexf configuration based on E. Hallem's cell paper on 2006
"""
import numpy as np
import h5py
osn_num = 1375;
f = h5py.File("al.hdf5","w")
dt = 1e-4 # time step
Ot = 2000 # number of data point during reset period
Rt = 1000 # number of data point during odor delivery period
Nt = 4*Ot + 3*Rt # number of data point
t = np.arange(0,dt*Nt,dt)
I = -1.*0.0195 # amplitude of the onset odorant concentration
u_on = I*np.ones( Ot, dtype=np.float64)
u_off = np.zeros( Ot, dtype=np.float64)
u_reset = np.zeros( Rt, dtype=np.float64)
u = np.concatenate((u_off,u_reset,u_on,u_reset,u_off,u_reset,u_on))
u_all = np.transpose( np.kron( np.ones((osn_num,1)), u))
# create the dataset
dset = f.create_dataset("acetone_on_off.hdf5",(Nt, osn_num), dtype=np.float64,\
data = u_all)
f.close()
|
"""
Create odorant stimuli in hd5 format
"""
"""
Create the gexf configuration based on E. Hallem's cell paper on 2006
"""
import numpy as np
import h5py
osn_num = 1375;
f = h5py.File("olfactory_stimulus.h5","w")
dt = 1e-4 # time step
Ot = 2000 # number of data point during reset period
Rt = 1000 # number of data point during odor delivery period
Nt = 4*Ot + 3*Rt # number of data point
t = np.arange(0,dt*Nt,dt)
I = -1.*0.0195 # amplitude of the onset odorant concentration
u_on = I*np.ones( Ot, dtype=np.float64)
u_off = np.zeros( Ot, dtype=np.float64)
u_reset = np.zeros( Rt, dtype=np.float64)
u = np.concatenate((u_off,u_reset,u_on,u_reset,u_off,u_reset,u_on))
u_all = np.transpose( np.kron( np.ones((osn_num,1)), u))
# create the dataset
dset = f.create_dataset("real",(Nt, osn_num), dtype=np.float64,\
data = u_all)
f.close()
|
Rename olfactory stimulus file and internal array.
|
Rename olfactory stimulus file and internal array.
--HG--
branch : LPU
|
Python
|
bsd-3-clause
|
cerrno/neurokernel
|
58d131e8aceb1adbbcdce2e1d4a86f5fb4615196
|
Lib/xml/__init__.py
|
Lib/xml/__init__.py
|
"""Core XML support for Python.
This package contains three sub-packages:
dom -- The W3C Document Object Model. This supports DOM Level 1 +
Namespaces.
parsers -- Python wrappers for XML parsers (currently only supports Expat).
sax -- The Simple API for XML, developed by XML-Dev, led by David
Megginson and ported to Python by Lars Marius Garshol. This
supports the SAX 2 API.
"""
if __name__ == "xml":
try:
import _xmlplus
except ImportError:
pass
else:
import sys
sys.modules[__name__] = _xmlplus
|
"""Core XML support for Python.
This package contains three sub-packages:
dom -- The W3C Document Object Model. This supports DOM Level 1 +
Namespaces.
parsers -- Python wrappers for XML parsers (currently only supports Expat).
sax -- The Simple API for XML, developed by XML-Dev, led by David
Megginson and ported to Python by Lars Marius Garshol. This
supports the SAX 2 API.
"""
try:
import _xmlplus
except ImportError:
pass
else:
import sys
sys.modules[__name__] = _xmlplus
|
Remove the outer test for __name__; not necessary.
|
Remove the outer test for __name__; not necessary.
|
Python
|
mit
|
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
|
2b58318ad7134a8c894b70918520a89b51a2d6dd
|
cla_backend/apps/reports/tests/test_utils.py
|
cla_backend/apps/reports/tests/test_utils.py
|
import mock
import os
from boto.s3.connection import S3Connection
from django.test import TestCase, override_settings
from reports.utils import get_s3_connection
class UtilsTestCase(TestCase):
@override_settings(AWS_ACCESS_KEY_ID="000000000001", AWS_SECRET_ACCESS_KEY="000000000002")
def test_get_s3_connection(self):
envs = {"AWS_S3_HOST": "s3.eu-west-2.amazonaws.com", "S3_USE_SIGV4": "True"}
with mock.patch.dict(os.environ, envs):
conn = get_s3_connection()
self.assertIsInstance(conn, S3Connection)
|
import mock
import os
from boto.s3.connection import S3Connection
from django.test import TestCase, override_settings
from reports.utils import get_s3_connection
class UtilsTestCase(TestCase):
@override_settings(AWS_ACCESS_KEY_ID="000000000001", AWS_SECRET_ACCESS_KEY="000000000002", AWS_S3_HOST="s3.eu-west-2.amazonaws.com")
def test_get_s3_connection(self):
envs = {"S3_USE_SIGV4": "True"}
with mock.patch.dict(os.environ, envs):
conn = get_s3_connection()
self.assertIsInstance(conn, S3Connection)
|
Modify s3 connection test for new AWS_S3_HOST setting
|
Modify s3 connection test for new AWS_S3_HOST setting
The value is now calculated from the env var at load time, so mocking
the env var value is not effective
(cherry picked from commit 044219df7123e3a03a38cc06c9e8e8e9e80b0cbe)
|
Python
|
mit
|
ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend
|
9477478f81315edcc0e5859b2325ea70694ea2be
|
lemon/sitemaps/views.py
|
lemon/sitemaps/views.py
|
from django.shortcuts import render
from django.utils.translation import get_language
from lemon.sitemaps.models import Item
def sitemap_xml(request):
qs = Item.objects.filter(sites=request.site, enabled=True, language=get_language())
return render(request, 'sitemaps/sitemap.xml',
{'object_list': qs}, content_type='application/xml')
|
from django.shortcuts import render
from lemon.sitemaps.models import Item
def sitemap_xml(request):
qs = Item.objects.filter(sites=request.site, enabled=True)
return render(request, 'sitemaps/sitemap.xml',
{'object_list': qs}, content_type='application/xml')
|
Remove language filtration in sitemap.xml
|
Remove language filtration in sitemap.xml
|
Python
|
bsd-3-clause
|
trilan/lemon,trilan/lemon,trilan/lemon
|
6bf26f15855ee6e13e11a2b026ee90b9302a68a7
|
PyFVCOM/__init__.py
|
PyFVCOM/__init__.py
|
"""
The FVCOM Python toolbox (PyFVCOM)
"""
__version__ = '1.6.1'
__author__ = 'Pierre Cazenave'
__credits__ = ['Pierre Cazenave']
__license__ = 'MIT'
__maintainer__ = 'Pierre Cazenave'
__email__ = 'pica@pml.ac.uk'
import inspect
from warnings import warn
# Import everything!
from PyFVCOM import buoy_tools
from PyFVCOM import cst_tools
from PyFVCOM import ctd_tools
from PyFVCOM import current_tools
from PyFVCOM import grid_tools
from PyFVCOM import ll2utm
from PyFVCOM import ocean_tools
from PyFVCOM import stats_tools
from PyFVCOM import tide_tools
from PyFVCOM import tidal_ellipse
from PyFVCOM import process_results
from PyFVCOM import read_results
from PyFVCOM import plot
|
"""
The FVCOM Python toolbox (PyFVCOM)
"""
__version__ = '1.6.1'
__author__ = 'Pierre Cazenave'
__credits__ = ['Pierre Cazenave']
__license__ = 'MIT'
__maintainer__ = 'Pierre Cazenave'
__email__ = 'pica@pml.ac.uk'
import inspect
from warnings import warn
# Import everything!
from PyFVCOM import buoy_tools
from PyFVCOM import cst_tools
from PyFVCOM import ctd_tools
from PyFVCOM import current_tools
from PyFVCOM import grid_tools
from PyFVCOM import ll2utm
from PyFVCOM import ll2utm as coordinate_tools
from PyFVCOM import ocean_tools
from PyFVCOM import stats_tools
from PyFVCOM import tide_tools
from PyFVCOM import tidal_ellipse
from PyFVCOM import process_results
from PyFVCOM import read_results
from PyFVCOM import plot
|
Add a better name for the coordinate functions. Eventually, ll2utm will be deprecated.
|
Add a better name for the coordinate functions. Eventually, ll2utm will be deprecated.
|
Python
|
mit
|
pwcazenave/PyFVCOM
|
751c38ebe052a689b7962491ffd5f54b593da397
|
harvesting/datahub.io/fix-urls.py
|
harvesting/datahub.io/fix-urls.py
|
import sys
fix_url = sys.argv[1]
for line in sys.stdin:
e = line.strip().split(" ")
if e[0].startswith("_:"):
e[0] = "<%s>" % e[0].replace("_:",fix_url)
if e[2].startswith("_:"):
e[2] = "<%s>" % e[2].replace("_:",fix_url)
print(" ".join(e))
|
import sys
fix_url = sys.argv[1]
dct = "<http://purl.org/dc/terms/"
dcelems = ["contributor", "coverage>", "creator>", "date>", "description>",
"format>", "identifier>", "language>", "publisher>", "relation>",
"rights>", "source>", "subject>", "title>", "type>"]
for line in sys.stdin:
e = line.strip().split(" ")
if e[0].startswith("_:"):
e[0] = "<%s>" % e[0].replace("_:",fix_url)
if e[1].startswith(dct) and e[1][len(dct):] in dcelems:
e[1] = "<http://purl.org/dc/elements/1.1/" + e[1][len(dct):]
if e[2].startswith("_:"):
e[2] = "<%s>" % e[2].replace("_:",fix_url)
print(" ".join(e))
|
Fix datathub DCT uris to DC
|
Fix datathub DCT uris to DC
|
Python
|
apache-2.0
|
liderproject/linghub,liderproject/linghub,liderproject/linghub,liderproject/linghub
|
e8d321c35d6e0a8294e0766c3836efe192ae2df0
|
print_items_needing_requeue.py
|
print_items_needing_requeue.py
|
"""
Walks through your greader-logs directory (or directory containing them)
and prints every item_name that has been finished but has no valid .warc.gz
(as determined by greader-warc-checker's .verification logs)
"""
import os
import sys
try:
import simplejson as json
except ImportError:
import json
basename = os.path.basename
def main():
basedirs = sys.argv[1:]
valids = set()
invalids = set()
for basedir in basedirs:
for directory, dirnames, filenames in os.walk(basedir):
if basename(directory).startswith("."):
print "Skipping dotdir %r" % (directory,)
continue
for f in filenames:
if f.startswith("."):
print "Skipping dotfile %r" % (f,)
continue
fname = os.path.join(directory, f)
if fname.endswith(".verification"):
with open(fname, "rb") as fh:
for line in fh:
data = json.loads(line)
if data["valid"]:
valids.add(data["item_name"])
else:
invalids.add(data["item_name"])
needs_requeue = sorted(invalids - valids)
for item_name in needs_requeue:
print item_name
if __name__ == '__main__':
main()
|
"""
Walks through your greader-logs directory (or directory containing them)
and prints every item_name that has been finished but has no valid .warc.gz
(as determined by greader-warc-checker's .verification logs)
"""
import os
import sys
try:
import simplejson as json
except ImportError:
import json
basename = os.path.basename
def main():
greader_items = sys.argv[1]
basedirs = sys.argv[2:]
assert basedirs, "Give me some basedirs containing .verification files"
valids = set()
invalids = set()
largest = 0
for basedir in basedirs:
for directory, dirnames, filenames in os.walk(basedir):
if basename(directory).startswith("."):
continue
for f in filenames:
if f.startswith("."):
continue
fname = os.path.join(directory, f)
if fname.endswith(".verification"):
with open(fname, "rb") as fh:
for line in fh:
data = json.loads(line)
if data["valid"]:
valids.add(data["item_name"])
else:
invalids.add(data["item_name"])
largest = max(largest, int(data["item_name"], 10))
for n in xrange(largest):
item_name = str(n).zfill(10)
if not item_name in valids and os.path.exists(greader_items + '/' + item_name[:6] + '/' + item_name + '.gz'):
print item_name
if __name__ == '__main__':
main()
|
Print items that are bad *or* missing
|
Print items that are bad *or* missing
|
Python
|
mit
|
ludios/greader-warc-checker
|
400c8de8a3a714da21c0e2b175c6e4adad3677b9
|
syft/__init__.py
|
syft/__init__.py
|
import importlib
import pkgutil
ignore_packages = set(['test'])
def import_submodules(package, recursive=True):
""" Import all submodules of a module, recursively, including subpackages
:param package: package (name or actual module)
:type package: str | module
:rtype: dict[str, types.ModuleType]
"""
if isinstance(package, str):
package = importlib.import_module(package)
results = {}
for loader, name, is_pkg in pkgutil.walk_packages(package.__path__):
if(name not in ignore_packages):
full_name = package.__name__ + '.' + name
results[full_name] = importlib.import_module(full_name)
if recursive and is_pkg:
results.update(import_submodules(full_name))
return results
# import submodules recursively
import_submodules(__name__)
|
import importlib
import pkgutil
ignore_packages = set(['test'])
def import_submodules(package, recursive=True):
""" Import all submodules of a module, recursively, including subpackages
:param package: package (name or actual module)
:type package: str | module
:rtype: dict[str, types.ModuleType]
"""
if isinstance(package, str):
package = importlib.import_module(package)
results = {}
for loader, name, is_pkg in pkgutil.walk_packages(package.__path__):
# test submodule names are 'syft.test.*', so this matches the 'ignore_packages' above
if name.split('.')[1] not in ignore_packages:
full_name = package.__name__ + '.' + name
results[full_name] = importlib.import_module(full_name)
if recursive and is_pkg:
results.update(import_submodules(full_name))
return results
# import submodules recursively
import_submodules(__name__)
|
Check for the name of the submodule we'd like to ignore in a more general way.
|
Check for the name of the submodule we'd like to ignore in a more general way.
|
Python
|
apache-2.0
|
aradhyamathur/PySyft,sajalsubodh22/PySyft,OpenMined/PySyft,dipanshunagar/PySyft,sajalsubodh22/PySyft,dipanshunagar/PySyft,joewie/PySyft,cypherai/PySyft,cypherai/PySyft,joewie/PySyft,aradhyamathur/PySyft,OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft
|
849b9eb93220af324343facb5f83d112de952fa0
|
mpltools/util.py
|
mpltools/util.py
|
import matplotlib.pyplot as plt
__all__ = ['figure', 'figsize']
def figure(aspect_ratio=1.3, scale=1, width=None, *args, **kwargs):
"""Return matplotlib figure window.
Parameters
----------
aspect_ratio : float
Aspect ratio, width / height, of figure.
scale : float
Scale default size of the figure.
width : float
Figure width in inches. If None, default to rc parameters.
See Also
--------
figsize
"""
assert 'figsize' not in kwargs
size = figsize(aspect_ratio=aspect_ratio, scale=scale, width=width)
return plt.figure(figsize=size, *args, **kwargs)
def figsize(aspect_ratio=1.3, scale=1, width=None):
"""Return figure size (width, height) in inches.
Parameters
----------
aspect_ratio : float
Aspect ratio, width / height, of figure.
scale : float
Scale default size of the figure.
width : float
Figure width in inches. If None, default to rc parameters.
"""
if width is None:
width, h = plt.rcParams['figure.figsize']
height = width / aspect_ratio
size = (width * scale, height * scale)
return size
|
import matplotlib.pyplot as plt
__all__ = ['figure', 'figsize']
def figure(aspect_ratio=1.3, scale=1, width=None, *args, **kwargs):
"""Return matplotlib figure window.
Calculate figure height using `aspect_ratio` and *default* figure width.
Parameters
----------
aspect_ratio : float
Aspect ratio, width / height, of figure.
scale : float
Scale default size of the figure.
width : float
Figure width in inches. If None, default to rc parameters.
See Also
--------
figsize
"""
assert 'figsize' not in kwargs
size = figsize(aspect_ratio=aspect_ratio, scale=scale, width=width)
return plt.figure(figsize=size, *args, **kwargs)
def figsize(aspect_ratio=1.3, scale=1, width=None):
"""Return figure size (width, height) in inches.
Calculate figure height using `aspect_ratio` and *default* figure width.
Parameters
----------
aspect_ratio : float
Aspect ratio, width / height, of figure.
scale : float
Scale default size of the figure.
width : float
Figure width in inches. If None, default to rc parameters.
"""
if width is None:
width, h = plt.rcParams['figure.figsize']
height = width / aspect_ratio
size = (width * scale, height * scale)
return size
|
Add note to docstring of `figure` and `figsize`.
|
ENH: Add note to docstring of `figure` and `figsize`.
|
Python
|
bsd-3-clause
|
tonysyu/mpltools,matteoicardi/mpltools
|
fef12d2a5cce5c1db488a4bb11b9c21b83a66cab
|
avocado/export/_json.py
|
avocado/export/_json.py
|
import json
import inspect
from _base import BaseExporter
class JSONGeneratorEncoder(json.JSONEncoder):
"Handle generator objects and expressions."
def default(self, obj):
if inspect.isgenerator(obj):
return list(obj)
return super(JSONGeneratorEncoder, self).default(obj)
class JSONExporter(BaseExporter):
file_extension = 'json'
content_type = 'application/json'
preferred_formats = ('number', 'string')
def write(self, iterable, buff=None):
buff = self.get_file_obj(buff)
encoder = JSONGeneratorEncoder()
for chunk in encoder.iterencode(self.read(iterable)):
buff.write(chunk)
return buff
|
import inspect
from django.core.serializers.json import DjangoJSONEncoder
from _base import BaseExporter
class JSONGeneratorEncoder(DjangoJSONEncoder):
"Handle generator objects and expressions."
def default(self, obj):
if inspect.isgenerator(obj):
return list(obj)
return super(JSONGeneratorEncoder, self).default(obj)
class JSONExporter(BaseExporter):
file_extension = 'json'
content_type = 'application/json'
preferred_formats = ('number', 'string')
def write(self, iterable, buff=None):
buff = self.get_file_obj(buff)
encoder = JSONGeneratorEncoder()
for chunk in encoder.iterencode(self.read(iterable)):
buff.write(chunk)
return buff
|
Update JSONGeneratorEncoder to subclass DjangoJSONEncoder This handles Decimals and datetimes
|
Update JSONGeneratorEncoder to subclass DjangoJSONEncoder
This handles Decimals and datetimes
|
Python
|
bsd-2-clause
|
murphyke/avocado,murphyke/avocado,murphyke/avocado,murphyke/avocado
|
f7fac123bf72af01272bc27a1dfabb788f611908
|
bandit/backends/smtp.py
|
bandit/backends/smtp.py
|
from __future__ import unicode_literals
from django.core.mail.backends.smtp import EmailBackend as SMTPBackend
from bandit.backends.base import HijackBackendMixin, LogOnlyBackendMixin
class HijackSMTPBackend(HijackBackendMixin, SMTPBackend):
"""
This backend intercepts outgoing messages drops them to a single email
address.
"""
pass
class LogOnlySMTPBackend(LogOnlyBackendMixin, SMTPBackend):
"""
This backend intercepts outgoing messages and logs them, allowing
only messages destined for ADMINS to be sent via SMTP.
"""
pass
|
from __future__ import unicode_literals
from django.core.mail.backends.smtp import EmailBackend as SMTPBackend
from bandit.backends.base import HijackBackendMixin, LogOnlyBackendMixin
class HijackSMTPBackend(HijackBackendMixin, SMTPBackend):
"""
This backend intercepts outgoing messages drops them to a single email
address.
"""
pass
class LogOnlySMTPBackend(LogOnlyBackendMixin, SMTPBackend):
"""
This backend intercepts outgoing messages and logs them, allowing
only messages destined for ADMINS, BANDIT_EMAIL, SERVER_EMAIL, or
BANDIT_WHITELIST to be sent via SMTP.
"""
pass
|
Update LogOnlySMTPBackend docstring. Not only admin emails are allowed, all approved emails are still sent.
|
Update LogOnlySMTPBackend docstring.
Not only admin emails are allowed, all approved emails are still sent.
|
Python
|
bsd-3-clause
|
caktus/django-email-bandit,caktus/django-email-bandit
|
527593c5f183054e330894e6b7161e24cca265a5
|
lily/notes/factories.py
|
lily/notes/factories.py
|
import random
import factory
from factory.declarations import SubFactory, SelfAttribute, LazyAttribute
from factory.django import DjangoModelFactory
from faker.factory import Factory
from lily.accounts.factories import AccountFactory
from lily.contacts.factories import ContactFactory
from lily.users.factories import LilyUserFactory
from .models import Note
faker = Factory.create('nl_NL')
class NoteFactory(DjangoModelFactory):
content = LazyAttribute(lambda o: faker.text())
author = SubFactory(LilyUserFactory, tenant=SelfAttribute('..tenant'))
@factory.lazy_attribute
def subject(self):
SubjectFactory = random.choice([AccountFactory, ContactFactory])
return SubjectFactory(tenant=self.tenant)
class Meta:
model = Note
|
import random
from datetime import datetime
import pytz
import factory
from factory.declarations import SubFactory, SelfAttribute, LazyAttribute
from factory.django import DjangoModelFactory
from faker.factory import Factory
from lily.accounts.factories import AccountFactory
from lily.contacts.factories import ContactFactory
from lily.users.factories import LilyUserFactory
from .models import Note
faker = Factory.create('nl_NL')
class NoteFactory(DjangoModelFactory):
content = LazyAttribute(lambda o: faker.text())
author = SubFactory(LilyUserFactory, tenant=SelfAttribute('..tenant'))
sort_by_date = LazyAttribute(lambda o: datetime.now(tz=pytz.utc))
@factory.lazy_attribute
def subject(self):
SubjectFactory = random.choice([AccountFactory, ContactFactory])
return SubjectFactory(tenant=self.tenant)
class Meta:
model = Note
|
Fix so testdata can be loaded when setting up local environment
|
Fix so testdata can be loaded when setting up local environment
|
Python
|
agpl-3.0
|
HelloLily/hellolily,HelloLily/hellolily,HelloLily/hellolily,HelloLily/hellolily
|
245dd2ef403cd88aebf5dd8923585a9e0489dd97
|
mongoalchemy/util.py
|
mongoalchemy/util.py
|
# The MIT License
#
# Copyright (c) 2010 Jeffrey Jenkins
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
def classproperty(fun):
class Descriptor(property):
def __get__(self, instance, owner):
return fun(owner)
return Descriptor()
class UNSET(object):
def __repr__(self):
return 'UNSET'
def __eq__(self, other):
return other.__class__ == self.__class__
UNSET = UNSET()
|
# The MIT License
#
# Copyright (c) 2010 Jeffrey Jenkins
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
def classproperty(fun):
class Descriptor(property):
def __get__(self, instance, owner):
return fun(owner)
return Descriptor()
class UNSET(object):
def __repr__(self):
return 'UNSET'
def __eq__(self, other):
return other.__class__ == self.__class__
def __nonzero__(self):
return False
UNSET = UNSET()
|
Change UNSET to so bool(UNSET) is False.
|
Change UNSET to so bool(UNSET) is False.
|
Python
|
mit
|
shakefu/MongoAlchemy,shakefu/MongoAlchemy,shakefu/MongoAlchemy
|
ba98874be9370ec49c2c04e89d456f723b5d083c
|
monitoring/test/test_data/exceptions.py
|
monitoring/test/test_data/exceptions.py
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from monascaclient.openstack.common.apiclient import exceptions as monascacli
from openstack_dashboard.test.test_data import exceptions
def data(TEST):
TEST.exceptions = exceptions.data
monitoring_exception = monascacli.ClientException
TEST.exceptions.monitoring = exceptions.create_stubbed_exception(
monitoring_exception)
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# NOTE(dmllr): Remove me when we require monascaclient >= 1.3.0
try:
from monascaclient.apiclient import exceptions as monascacli
except ImportError:
from monascaclient.openstack.common.apiclient import exceptions as monascacli
from openstack_dashboard.test.test_data import exceptions
def data(TEST):
TEST.exceptions = exceptions.data
monitoring_exception = monascacli.ClientException
TEST.exceptions.monitoring = exceptions.create_stubbed_exception(
monitoring_exception)
|
Adjust tests for python-monascaclient >= 1.3.0
|
Adjust tests for python-monascaclient >= 1.3.0
the exceptions module was moved out of the openstack.common namespace,
so try to import the new location first and fall back to the old
one if it doesn't exist.
Change-Id: I3305775baaab15dca8d5e7e5cfc0932f94d4d153
|
Python
|
apache-2.0
|
openstack/monasca-ui,openstack/monasca-ui,openstack/monasca-ui,stackforge/monasca-ui,stackforge/monasca-ui,stackforge/monasca-ui,stackforge/monasca-ui,openstack/monasca-ui
|
8ef41f9ac8ec8a7b7fc9e63b2ff6453782c41d62
|
demo/__init__.py
|
demo/__init__.py
|
"""Package for PythonTemplateDemo."""
__project__ = 'PythonTemplateDemo'
__version__ = '0.0.0'
VERSION = __project__ + '-' + __version__
|
"""Package for PythonTemplateDemo."""
__project__ = 'PythonTemplateDemo'
__version__ = '0.0.0'
VERSION = __project__ + '-' + __version__
PYTHON_VERSION = 3, 4
import sys
if not sys.version_info >= PYTHON_VERSION: # pragma: no cover (manual test)
exit("Python {}.{}+ is required.".format(*PYTHON_VERSION))
|
Deploy Travis CI build 381 to GitHub
|
Deploy Travis CI build 381 to GitHub
|
Python
|
mit
|
jacebrowning/template-python-demo
|
edf151feea948ebf4a9f00a0248ab1f363cacfac
|
scaffolder/commands/install.py
|
scaffolder/commands/install.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from optparse import make_option
from optparse import OptionParser
from scaffolder import get_minion_path
from scaffolder.core.template import TemplateManager
from scaffolder.core.commands import BaseCommand
class InstallCommand(BaseCommand):
option_list = BaseCommand.option_list + (
make_option(
"-t",
"--target",
dest="target_dir",
default=get_minion_path('weaver'),
help='Project Templates directory.',
metavar="TEMPLATES_DIR"
),
)
def __init__(self, name, help='', aliases=(), stdout=None, stderr=None):
help = 'install: Installs a Project Template.'
parser = OptionParser(
version=self.get_version(),
option_list=self.get_option_list(),
usage='\n %prog {0} ACTION [OPTIONS]'.format(name)
)
aliases = ('tmp',)
BaseCommand.__init__(self, name, parser=parser, help=help, aliases=aliases)
def run(self, *args, **options):
src = args[0]
tgt = options.get('target_dir')
manager = TemplateManager()
manager.install(src=src, dest=tgt)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from optparse import make_option
from optparse import OptionParser
from scaffolder import get_minion_path
from scaffolder.core.template import TemplateManager
from scaffolder.core.commands import BaseCommand
class InstallCommand(BaseCommand):
option_list = BaseCommand.option_list + (
make_option(
"-t",
"--target",
dest="target_dir",
default=get_minion_path('weaver'),
help='Project Templates directory.',
metavar="TEMPLATES_DIR"
),
)
help = 'Installs a Project Template.'
def run(self, *args, **options):
src = args[0]
tgt = options.get('target_dir')
manager = TemplateManager()
manager.install(src=src, dest=tgt)
|
Remove __init__ method, not needed.
|
InstallCommand: Remove __init__ method, not needed.
|
Python
|
mit
|
goliatone/minions
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.