commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
7f8cc08405639f32cc4a92ee478281f14fb365a9 | Add taobao.com authentication, oauth 2.0 | ImaginationForPeople/velruse,ImaginationForPeople/velruse,bbangert/velruse,miedzinski/velruse,bbangert/velruse,miedzinski/velruse | velruse/providers/taobao.py | velruse/providers/taobao.py | """Taobao Authentication Views"""
from hashlib import md5
from json import loads
import time
import requests
from pyramid.httpexceptions import HTTPFound
from velruse.api import AuthenticationComplete
from velruse.exceptions import AuthenticationDenied
from velruse.exceptions import ThirdPartyFailure
from velruse.utils import flat_url
class TaobaoAuthenticationComplete(AuthenticationComplete):
"""Taobao auth complete"""
def includeme(config):
config.add_route("taobao_login", "/taobao/login")
config.add_route("taobao_process", "/taobao/process",
use_global_views=True,
factory=taobao_process)
config.add_view(taobao_login, route_name="taobao_login")
def taobao_login(request):
"""Initiate a taobao login"""
config = request.registry.settings
gh_url = flat_url('https://oauth.taobao.com/authorize',
client_id=config['velruse.taobao.app_id'],
response_type='code',
redirect_uri=request.route_url('taobao_process'))
return HTTPFound(location=gh_url)
def taobao_process(request):
"""Process the taobao redirect"""
config = request.registry.settings
code = request.GET.get('code')
if not code:
reason = request.GET.get('error', 'No reason provided.')
return AuthenticationDenied(reason)
# Now retrieve the access token with the code
r = requests.post('https://oauth.taobao.com/token',
dict(grant_type='authorization_code',
client_id=config['velruse.taobao.app_id'],
client_secret=config['velruse.taobao.app_secret'],
redirect_uri=request.route_url('taobao_process'),
code=code))
if r.status_code != 200:
raise ThirdPartyFailure("Status %s: %s" % (r.status_code, r.content))
data = loads(r.content)
access_token = data['access_token']
# Retrieve profile data
params = {
'method': 'taobao.user.get',
'timestamp': time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()),
'format': 'json',
'app_key': config['velruse.taobao.app_id'],
'v': '2.0',
'sign_method': 'md5',
'fields': 'user_id,nick',
'session': access_token
}
src = config['velruse.taobao.app_secret']\
+ ''.join(["%s%s" % (k, v) for k, v in sorted(params.items())])\
+ config['velruse.taobao.app_secret']
params['sign'] = md5(src).hexdigest().upper()
get_user_info_url = flat_url('http://gw.api.taobao.com/router/rest',
**params)
r = requests.get(get_user_info_url)
if r.status_code != 200:
raise ThirdPartyFailure("Status %s: %s" % (r.status_code, r.content))
data = loads(r.content)
profile = {}
username = data['user_get_response']['user']['nick']
profile['accounts'] = [{
'domain':'taobao.com',
'username':username,
'userid':data['user_get_response']['user']['user_id'],
}]
profile['displayName'] = username
profile['preferredUsername'] = username
cred = {'oauthAccessToken': access_token}
return TaobaoAuthenticationComplete(profile=profile, credentials=cred)
| mit | Python | |
8d22023a065992f2f218920229df0d8de8feb463 | improve naming/organization | BlackPole/bp-openembedded,scottellis/overo-oe,dellysunnymtech/sakoman-oe,thebohemian/openembedded,sledz/oe,BlackPole/bp-openembedded,sentient-energy/emsw-oe-mirror,sutajiokousagi/openembedded,sentient-energy/emsw-oe-mirror,JamesAng/goe,SIFTeam/openembedded,scottellis/overo-oe,nx111/openembeded_openpli2.1_nx111,openembedded/openembedded,thebohemian/openembedded,sledz/oe,openembedded/openembedded,JamesAng/goe,sutajiokousagi/openembedded,hulifox008/openembedded,scottellis/overo-oe,mrchapp/arago-oe-dev,buglabs/oe-buglabs,sutajiokousagi/openembedded,BlackPole/bp-openembedded,rascalmicro/openembedded-rascal,sledz/oe,xifengchuo/openembedded,JamesAng/oe,thebohemian/openembedded,sentient-energy/emsw-oe-mirror,giobauermeister/openembedded,buglabs/oe-buglabs,yyli/overo-oe,sampov2/audio-openembedded,buglabs/oe-buglabs,dellysunnymtech/sakoman-oe,openembedded/openembedded,scottellis/overo-oe,JamesAng/oe,sutajiokousagi/openembedded,yyli/overo-oe,thebohemian/openembedded,dellysunnymtech/sakoman-oe,giobauermeister/openembedded,nx111/openembeded_openpli2.1_nx111,BlackPole/bp-openembedded,mrchapp/arago-oe-dev,openembedded/openembedded,sledz/oe,yyli/overo-oe,sentient-energy/emsw-oe-mirror,thebohemian/openembedded,nx111/openembeded_openpli2.1_nx111,JamesAng/goe,nx111/openembeded_openpli2.1_nx111,scottellis/overo-oe,giobauermeister/openembedded,xifengchuo/openembedded,JamesAng/goe,buglabs/oe-buglabs,hulifox008/openembedded,openembedded/openembedded,openembedded/openembedded,openembedded/openembedded,dellysunnymtech/sakoman-oe,sledz/oe,mrchapp/arago-oe-dev,hulifox008/openembedded,sledz/oe,yyli/overo-oe,giobauermeister/openembedded,thebohemian/openembedded,dellysunnymtech/sakoman-oe,nx111/openembeded_openpli2.1_nx111,dellysunnymtech/sakoman-oe,xifengchuo/openembedded,JamesAng/oe,buglabs/oe-buglabs,rascalmicro/openembedded-rascal,yyli/overo-oe,hulifox008/openembedded,xifengchuo/openembedded,SIFTeam/openembedded,scottellis/overo-oe,dave-billin/overo-ui-moos-auv,dave-billin/overo-ui-moos-auv,hulifox008/openembedded,SIFTeam/openembedded,JamesAng/goe,sutajiokousagi/openembedded,xifengchuo/openembedded,thebohemian/openembedded,giobauermeister/openembedded,dave-billin/overo-ui-moos-auv,sutajiokousagi/openembedded,mrchapp/arago-oe-dev,SIFTeam/openembedded,nx111/openembeded_openpli2.1_nx111,sutajiokousagi/openembedded,xifengchuo/openembedded,xifengchuo/openembedded,yyli/overo-oe,openembedded/openembedded,sentient-energy/emsw-oe-mirror,dellysunnymtech/sakoman-oe,JamesAng/goe,dellysunnymtech/sakoman-oe,dave-billin/overo-ui-moos-auv,giobauermeister/openembedded,BlackPole/bp-openembedded,xifengchuo/openembedded,dave-billin/overo-ui-moos-auv,openembedded/openembedded,rascalmicro/openembedded-rascal,nx111/openembeded_openpli2.1_nx111,sentient-energy/emsw-oe-mirror,nx111/openembeded_openpli2.1_nx111,JamesAng/oe,sentient-energy/emsw-oe-mirror,dellysunnymtech/sakoman-oe,rascalmicro/openembedded-rascal,xifengchuo/openembedded,sampov2/audio-openembedded,rascalmicro/openembedded-rascal,sampov2/audio-openembedded,hulifox008/openembedded,giobauermeister/openembedded,mrchapp/arago-oe-dev,dave-billin/overo-ui-moos-auv,sampov2/audio-openembedded,rascalmicro/openembedded-rascal,sledz/oe,yyli/overo-oe,BlackPole/bp-openembedded,SIFTeam/openembedded,SIFTeam/openembedded,sampov2/audio-openembedded,hulifox008/openembedded,sampov2/audio-openembedded,dave-billin/overo-ui-moos-auv,openembedded/openembedded,yyli/overo-oe,mrchapp/arago-oe-dev,scottellis/overo-oe,sampov2/audio-openembedded,buglabs/oe-buglabs,buglabs/oe-buglabs,giobauermeister/openembedded,giobauermeister/openembedded,BlackPole/bp-openembedded,JamesAng/oe,mrchapp/arago-oe-dev,SIFTeam/openembedded,rascalmicro/openembedded-rascal,JamesAng/oe,buglabs/oe-buglabs,rascalmicro/openembedded-rascal,openembedded/openembedded,JamesAng/goe,JamesAng/oe | lib/oe/test_types.py | lib/oe/test_types.py | import unittest
from oe.maketype import create, factory
class TestTypes(unittest.TestCase):
def assertIsInstance(self, obj, cls):
return self.assertTrue(isinstance(obj, cls))
def assertIsNot(self, obj, other):
return self.assertFalse(obj is other)
def assertFactoryCreated(self, value, type, **flags):
cls = factory(type)
self.assertIsNot(cls, None)
self.assertIsInstance(create(value, type, **flags), cls)
class TestBooleanType(TestTypes):
def test_invalid(self):
self.assertRaises(ValueError, create, '', 'boolean')
self.assertRaises(ValueError, create, 'foo', 'boolean')
self.assertRaises(TypeError, create, object(), 'boolean')
def test_true(self):
self.assertTrue(create('y', 'boolean'))
self.assertTrue(create('yes', 'boolean'))
self.assertTrue(create('1', 'boolean'))
self.assertTrue(create('t', 'boolean'))
self.assertTrue(create('true', 'boolean'))
self.assertTrue(create('TRUE', 'boolean'))
self.assertTrue(create('truE', 'boolean'))
def test_false(self):
self.assertFalse(create('n', 'boolean'))
self.assertFalse(create('no', 'boolean'))
self.assertFalse(create('0', 'boolean'))
self.assertFalse(create('f', 'boolean'))
self.assertFalse(create('false', 'boolean'))
self.assertFalse(create('FALSE', 'boolean'))
self.assertFalse(create('faLse', 'boolean'))
def test_bool_equality(self):
self.assertEqual(create('n', 'boolean'), False)
self.assertNotEqual(create('n', 'boolean'), True)
self.assertEqual(create('y', 'boolean'), True)
self.assertNotEqual(create('y', 'boolean'), False)
class TestList(TestTypes):
def assertListEqual(self, value, valid, sep=None):
obj = create(value, 'list', separator=sep)
self.assertEqual(obj, valid)
if sep is not None:
self.assertEqual(obj.separator, sep)
self.assertEqual(str(obj), obj.separator.join(obj))
def test_list_nosep(self):
testlist = ['alpha', 'beta', 'theta']
self.assertListEqual('alpha beta theta', testlist)
self.assertListEqual('alpha beta\ttheta', testlist)
self.assertListEqual('alpha', ['alpha'])
def test_list_usersep(self):
self.assertListEqual('foo:bar', ['foo', 'bar'], ':')
self.assertListEqual('foo:bar:baz', ['foo', 'bar', 'baz'], ':')
| import unittest
from oe.maketype import create, factory
class TestTypes(unittest.TestCase):
def assertIsInstance(self, obj, cls):
return self.assertTrue(isinstance(obj, cls))
def assertIsNot(self, obj, other):
return self.assertFalse(obj is other)
def assertFactoryCreated(self, value, type, **flags):
obj = factory(type)
self.assertIsNot(obj, None)
self.assertIsInstance(create(value, type, **flags), obj)
class TestBooleanType(TestTypes):
def test_boolean(self):
self.assertRaises(ValueError, create, '', 'boolean')
self.assertRaises(ValueError, create, 'foo', 'boolean')
self.assertRaises(TypeError, create, object(), 'boolean')
def test_boolean_true(self):
self.assertEqual(create('y', 'boolean'), True)
self.assertEqual(create('yes', 'boolean'), True)
self.assertEqual(create('1', 'boolean'), True)
self.assertEqual(create('t', 'boolean'), True)
self.assertEqual(create('true', 'boolean'), True)
self.assertEqual(create('TRUE', 'boolean'), True)
self.assertEqual(create('truE', 'boolean'), True)
def test_boolean_false(self):
self.assertEqual(create('n', 'boolean'), False)
self.assertEqual(create('no', 'boolean'), False)
self.assertEqual(create('0', 'boolean'), False)
self.assertEqual(create('f', 'boolean'), False)
self.assertEqual(create('false', 'boolean'), False)
self.assertEqual(create('FALSE', 'boolean'), False)
self.assertEqual(create('faLse', 'boolean'), False)
class TestList(TestTypes):
def assertListEqual(self, value, valid, sep=None):
obj = create(value, 'list', separator=sep)
self.assertEqual(obj, valid)
if sep is not None:
self.assertEqual(obj.separator, sep)
self.assertEqual(str(obj), obj.separator.join(obj))
def test_list_nosep(self):
testlist = ['alpha', 'beta', 'theta']
self.assertListEqual('alpha beta theta', testlist)
self.assertListEqual('alpha beta\ttheta', testlist)
self.assertListEqual('alpha', ['alpha'])
def test_list_usersep(self):
self.assertListEqual('foo:bar', ['foo', 'bar'], ':')
self.assertListEqual('foo:bar:baz', ['foo', 'bar', 'baz'], ':')
| mit | Python |
d683fee61eeec1c0ea9d0fbd2519b6c9b35616c2 | rename newton_test to smooth_test, add steepest_descent test | aringh/odl,kohr-h/odl,odlgroup/odl,kohr-h/odl,odlgroup/odl,aringh/odl | odl/test/solvers/smooth/smooth_test.py | odl/test/solvers/smooth/smooth_test.py | # Copyright 2014-2016 The ODL development group
#
# This file is part of ODL.
#
# ODL is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ODL is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ODL. If not, see <http://www.gnu.org/licenses/>.
"""Test for the Chambolle-Pock solver."""
# Imports for common Python 2/3 codebase
from __future__ import print_function, division, absolute_import
from future import standard_library
standard_library.install_aliases()
# External
import pytest
# Internal
import odl
from odl.operator import OpNotImplementedError
from odl.util.testutils import noise_element
@pytest.fixture(scope="module", params=['l2_squared', 'l2_squared_scaled'])
def functional(request):
"""Return a functional whose optimal value is at 0."""
name = request.param
# TODO: Add rosenbrock (#600) and quadratic (#606) functionals
if name == 'l2_squared':
space = odl.rn(3)
return odl.solvers.L2NormSquared(space)
elif name == 'l2_squared_scaled':
space = odl.uniform_discr(0, 1, 5)
scaling = odl.MultiplyOperator(space.element([1, 2, 3, 5, 5]),
domain=space)
return odl.solvers.L2NormSquared(space) * scaling
else:
assert False
@pytest.fixture(scope="module", params=['constant', 'backtracking'])
def functional_and_linesearch(request, functional):
"""Return a functional whose optimal value is at 0."""
name = request.param
if name == 'constant':
return functional, 1.0
else:
return functional, odl.solvers.BacktrackingLineSearch(functional)
@pytest.fixture(scope="module", params=['first', 'second'])
def broyden_impl(request):
return request.param
def test_newton_solver(functional_and_linesearch):
"""Test the newton solver."""
functional, line_search = functional_and_linesearch
try:
# Test if derivative exists
functional.gradient.derivative(functional.domain.zero())
except OpNotImplementedError:
return
# Solving the problem
x = functional.domain.zero()
odl.solvers.newtons_method(functional, x, maxiter=50, tol=1e-4,
line_search=line_search)
# Assert x is close to the optimum at [1, 1]
assert functional(x) < 1e-3
def test_bfgs_solver(functional_and_linesearch):
"""Test a quasi-newton solver."""
functional, line_search = functional_and_linesearch
x = noise_element(functional.domain)
odl.solvers.bfgs_method(functional, x, maxiter=50, tol=1e-4,
line_search=line_search)
assert functional(x) < 1e-3
def test_broydens_method(functional_and_linesearch, broyden_impl):
"""Test a quasi-newton solver."""
functional, line_search = functional_and_linesearch
x = noise_element(functional.domain)
odl.solvers.broydens_method(functional, x, maxiter=50, tol=1e-4,
line_search=line_search, impl=broyden_impl)
assert functional(x) < 1e-3
def test_steepest_descent(functional):
"""Test the steepest descent solver."""
line_search = odl.solvers.BacktrackingLineSearch(functional)
x = noise_element(functional.domain)
odl.solvers.steepest_descent(functional, x, maxiter=50, tol=1e-4,
line_search=line_search, callback=print)
assert functional(x) < 1e-3
if __name__ == '__main__':
pytest.main(str(__file__.replace('\\', '/') + ' -v'))
| mpl-2.0 | Python | |
961d8b3a27ed88f09f6bc59b71003f60efd9e601 | Create bloomberg_spider.py | laichunpongben/web_crawler | bloomberg_spider.py | bloomberg_spider.py | import scrapy
class BloombergSpider(scrapy.Spider):
name = 'bloomberg'
start_urls = [
'http://www.bloomberg.com/quote/AAPL:US',
'http://www.bloomberg.com/quote/GOOGL:US',
'http://www.bloomberg.com/quote/AMZN:US',
]
def parse(self, response):
for sel in response.css('meta'):
itemprop = sel.css('::attr(itemprop)').extract()
if itemprop:
yield {
'itemprop': itemprop,
'content': sel.css('::attr(content)').extract(),
}
| apache-2.0 | Python | |
aa050504a47f92738831153eee3d04a217ade9b7 | Add rapmap script | GastonLab/ddb-scripts,dgaston/ddb-ngsflow-scripts,dgaston/ddb-scripts | workflow-KSHV_RNA-Seq_RapMap.py | workflow-KSHV_RNA-Seq_RapMap.py | #!/usr/bin/env python
# Standard packages
import sys
import argparse
# Third-party packages
from toil.job import Job
# Package methods
from ddb import configuration
from ddb_ngsflow import gatk
from ddb_ngsflow import pipeline
from ddb_ngsflow.rna import rapmap
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('-s', '--samples_file', help="Input configuration file for samples")
parser.add_argument('-c', '--configuration', help="Configuration file for various settings")
Job.Runner.addToilOptions(parser)
args = parser.parse_args()
args.logLevel = "INFO"
sys.stdout.write("Parsing configuration data\n")
config = configuration.configure_runtime(args.configuration)
sys.stdout.write("Parsing sample data\n")
samples = configuration.configure_samples(args.samples_file, config)
# Workflow Graph definition. The following workflow definition should create a valid Directed Acyclic Graph (DAG)
root_job = Job.wrapJobFn(pipeline.spawn_batch_jobs, cores=1)
# Per sample jobs
for sample in samples:
# Alignment and Refinement Stages
flags = list()
align_job = Job.wrapJobFn(rapmap.rapmap_quasi_unpaired, config, sample, samples, flags,
cores=int(config['rapmap']['num_cores']),
memory="{}G".format(config['rapmap']['max_mem']))
# Create workflow from created jobs
root_job.addChild(align_job)
# Start workflow execution
Job.Runner.startToil(root_job, args)
| mit | Python | |
11a69bd2fe6e7eb9e2372dc8a21dd3c41b0ab2ef | Add iow_util.py | cybem/graphite-web-iow,cybem/graphite-web-iow,EinsamHauer/graphite-web-iow,EinsamHauer/graphite-web-iow,cybem/graphite-web-iow,cybem/graphite-web-iow,cybem/graphite-web-iow,EinsamHauer/graphite-web-iow,cybem/graphite-web-iow,EinsamHauer/graphite-web-iow,EinsamHauer/graphite-web-iow,EinsamHauer/graphite-web-iow | webapp/graphite/iow_util.py | webapp/graphite/iow_util.py | from django.conf import settings
def check_tenant(tenant):
if tenant not in settings.TENANT_LIST:
return settings.TENANT_LIST[0]
return tenant
| apache-2.0 | Python | |
29a6ac8b0744e8569928dbdf3648485f0fe78ae7 | add register handlers | SYHGroup/mau_mau_bot,pythonalliance/uno2bot,SYHGroup/mau_mau_bot,pythonalliance/uno2bot,jh0ker/mau_mau_bot,imlonghao/unocn_bot,jh0ker/mau_mau_bot | bot.py | bot.py |
from telegram import Updater, InlineQueryResultPhoto
from game_manager import GameManager
import card as c
from credentials import TOKEN
gm = GameManager()
u = Updater(TOKEN)
dp = u.dispatcher
def new_game(bot, update):
chat_id = update.message.chat_id
link = gm.generate_invite_link(u.bot.getMe().username, chat_id)
bot.sendMessage(chat_id,
text="Click this link to join the game: %s" % link)
def start(bot, update, args):
if args:
gm.join_game(args[0], update.message.from_user)
else:
bot.sendMessage(update.message.chat_id,
text="Please invite me to a group and "
"issue the /new command there.")
def inline(bot, update):
if update.inline_query:
user_id = update.inline_query.from_user.id
player = gm.userid_player[user_id]
playable = list()
for card in player.playable_cards():
playable.append(
InlineQueryResultPhoto(str(card),
card.get_image_link(),
card.get_thumb_link())
)
bot.answerInlineQuery(update.inline_query.id, playable)
else:
user_id = update.chosen_inline_result.from_user.id
game = gm.userid_game[user_id]
game.play_card(c.from_str(update.chosen_inline_result.id))
dp.addTelegramInlineHandler(inline)
dp.addTelegramCommandHandler('start', start)
dp.addTelegramCommandHandler('new', new_game)
u.start_polling()
|
from telegram import Updater, InlineQueryResultPhoto
from game_manager import GameManager
import card as c
from credentials import TOKEN
gm = GameManager()
u = Updater(TOKEN)
dp = u.dispatcher
def new_game(bot, update):
chat_id = update.message.chat_id
link = gm.generate_invite_link(u.bot.getMe().username, chat_id)
bot.sendMessage(chat_id,
text="Click this link to join the game: %s" % link)
def start(bot, update, args):
if args:
gm.join_game(args[0], update.message.from_user)
else:
bot.sendMessage(update.message.chat_id,
text="Please invite me to a group and "
"issue the /start command there.")
def inline(bot, update):
if update.inline_query:
user_id = update.inline_query.from_user.id
player = gm.userid_player[user_id]
playable = list()
for card in player.playable_cards():
playable.append(
InlineQueryResultPhoto(str(card),
card.get_image_link(),
card.get_thumb_link())
)
bot.answerInlineQuery(update.inline_query.id, playable)
else:
user_id = update.chosen_inline_result.from_user.id
game = gm.userid_game[user_id]
game.play_card(c.from_str(update.chosen_inline_result.id))
| agpl-3.0 | Python |
a2413403a59a313397b517c90a2405a0599a0fa6 | add initial code | smmalis37/ZBot | bot.py | bot.py | #! /usr/bin/python3
import discord
import asyncio
client = discord.Client()
discord_colors = discord.Color.__dict__
colors = list(filter(lambda x: isinstance(discord_colors[x],classmethod), discord_colors))
colors.sort()
@asyncio.coroutine
def handle_color(message):
words = message.content.split(' ')
if len(words) == 1:
yield from client.send_message(message.channel, 'Available colors are:\n' + (', '.join(colors)))
if len(words) == 2:
if (words[1] not in colors):
yield from client.send_message(message.channel, 'That is not a valid color.')
else:
author = message.author
print('Changing ' + author.name + '\'s color to ' + words[1])
old_roles = list(filter(lambda r: r.name not in colors, author.roles))
color_role = list(filter(lambda r: r.name == words[1], message.server.roles))[0]
new_roles = old_roles + [color_role]
yield from client.replace_roles(author, *new_roles)
yield from client.send_message(message.channel, "Changed your color to " + words[1])
@asyncio.coroutine
def refresh_roles(server):
print("Refreshing roles for " + server.name)
everyonePerm = server.default_role.permissions
to_edit = list(filter(lambda x: x.permissions != everyonePerm, filter(lambda x: x.name in colors, server.roles)))
for edit in to_edit:
yield from client.edit_role(server, edit, permissions=everyonePerm)
print("Edited " + edit.name)
@asyncio.coroutine
def create_roles(server):
print("Creating roles for " + server.name)
everyonePerm = server.default_role.permissions
role_names = list(map(lambda r: r.name, server.roles))
to_create = list(filter(lambda x: x not in role_names, colors))
for color in to_create:
yield from client.create_role(server, name=color, color=getattr(discord.Color, color)(), permissions=everyonePerm)
print("Created " + color)
def is_admin(member):
return "Admin" in map(lambda r: r.name, member.roles)
@client.async_event
def on_message(message):
words = message.content.split(' ')
command = words[0]
if command == '!help':
yield from client.send_message(message.channel, "Available commands:\n!help, !color")
if is_admin(message.author):
yield from client.send_message(message.channel, "Admin-only commands:\n!refresh_roles")
if command == '!color':
yield from handle_color(message)
if command == '!refresh_roles':
if is_admin(message.author):
yield from client.send_message(message.channel, "Refreshing roles")
yield from refresh_roles(message.server)
yield from create_roles(message.server)
yield from client.send_message(message.channel, "Done refreshing roles")
else:
yield from client.send_message(message.channel, "That's an admin-only command")
if command =='!debug':
if message.author.id == '123301224022933504':
try:
result = eval(' '.join(words[1:]))
except Exception as e:
result = '{0.__name__}: {1}'.format(type(e), e)
yield from client.send_message(message.channel, result)
@client.async_event
def on_ready():
print('Logged in!')
for server in client.servers:
yield from refresh_roles(server)
yield from create_roles(server)
print('Ready')
with open("creds") as f:
creds = tuple(map(lambda s: s.strip(), f.readlines()))
client.run(creds[0],creds[1])
| unlicense | Python | |
938fa9463b4cf248593ae1917bd6d7f9413a183a | add a modle to send serial data | 498143049/Uppercom,498143049/Uppercom,498143049/Uppercom,498143049/Uppercom,498143049/Uppercom | Python/servercode/simchipcomputer.py | Python/servercode/simchipcomputer.py | # coding=UTF-8
from time import sleep, ctime
import serial
import threading
port='com4';
baudrate=9600;
Myserial = serial.Serial(port,baudrate);
def Handle(Text124):
while True:
count = Myserial.inWaiting()
if count != 0:
recv = Myserial.read(count)
print(recv.decode('ascii'))
Myserial.flushInput()
sleep(0.1)
if __name__ == '__main__':
str='$ABADCDHB&'
th=threading.Thread(target = Handle, args=('sda',) )
th.setDaemon(True) #主线程退出,子线程也退出
th.start()
try:
while(True):
Myserial.write(bytes([36,36,36]));
sleep(2)
except KeyboardInterrupt :
Myserial.close()
print ("Off ") | mit | Python | |
8f2c2e566281507dfded1bae855ba0236694aac0 | Add a few tests. | bjornedstrom/python-signify,bjornedstrom/python-signify | test/system_test.py | test/system_test.py | # -*- coding: utf-8 -*-
# Copyright (C) 2015 Björn Edström <be@bjrn.se>
import signify
import unittest
class SignifyTest(unittest.TestCase):
KAT = [
{
'pub': """untrusted comment: bjorntest public key
RWQ100QRGZoxU+Oy1g7Ko+8LjK1AQLIEavp/NuL54An1DC0U2cfCLKEl
""",
'priv': """untrusted comment: bjorntest secret key
RWRCSwAAACqHVbmAUokJcTpgKhRbw+/W+Q7nrVPi3eU100QRGZoxU86ZWb3NjEp9ScrFddFy0o2D1KtZ0440imfaWmUebGfs0Hm+Fm9SCtaJgtjFtrUlPlmnjksY8zdcXr2NvjLsr0A=
""",
'message': """my message
""",
'sig': """untrusted comment: signature from bjorntest secret key
RWQ100QRGZoxU/gjzE8m6GYtfICqE0Ap8SdXRSHrpjnSBKMc2RMalgi5RKrEHmKfTmcsuB9ZzDCo6K6sYEqaEcEnnAFa0zCewAg=
"""
}
]
def setUp(self):
self.obj = signify.Signify()
def test_verify_success(self):
self.assertTrue(
self.obj.verify_simple(self.KAT[0]['pub'],
self.KAT[0]['sig'],
self.KAT[0]['message']))
def test_verify_failure(self):
broken_sig = self.KAT[0]['sig'].replace('Malgi', 'Magic')
self.assertRaises(
signify.InvalidSignature,
self.obj.verify_simple, self.KAT[0]['pub'],
broken_sig,
self.KAT[0]['message'])
def test_generate(self):
pub, priv = self.obj.generate_unsafe('test', None)
def test_sign(self):
pub, priv = self.obj.generate_unsafe('test', None)
sig = self.obj.sign(privkey_buf=priv,
message_buf='My Message')
self.assertTrue(
self.obj.verify_simple(pub,
sig,
'My Message'))
if __name__ == '__main__':
unittest.main()
| mit | Python | |
37d23e16f091f462f83708959cfff73d8811eb47 | build docker image for SPM | kaczmarj/neurodocker,kaczmarj/neurodocker | neurodocker/interfaces/tests/test_spm.py | neurodocker/interfaces/tests/test_spm.py | """Tests for neurodocker.interfaces.SPM"""
# Author: Jakub Kaczmarzyk <jakubk@mit.edu>
from __future__ import absolute_import, division, print_function
from io import BytesIO
import pytest
from neurodocker.docker_api import Dockerfile, DockerImage, DockerContainer
from neurodocker.parser import SpecsParser
from neurodocker.interfaces import SPM
class TestSPM(object):
"""Tests for SPM class."""
def test_install_centos7(self):
"""Install SPM12 and MATLAB R2017a on CentOS 7."""
specs = {'base': 'centos:7',
'software': {
'spm': {'version': '12', 'matlab_version': 'R2017a'}}}
parser = SpecsParser(specs=specs)
cmd = Dockerfile(specs=parser.specs, pkg_manager='yum').cmd
fileobj = BytesIO(cmd.encode('utf-8'))
image = DockerImage(fileobj=fileobj).build_raw()
container = DockerContainer(image)
container.start(working_dir='/home')
cmd = ["/bin/sh", "-c", """echo 'fprintf("\ndesired output")' > test.m """]
container.exec_run(cmd)
cmd = ["/bin/sh", "-c", "$SPMMCRCMD test.m"]
output = container.exec_run(cmd)
assert "error" not in output.lower(), "error running SPM command"
assert "desired output" in output, "expected output not found"
container.cleanup(remove=True, force=True)
| apache-2.0 | Python | |
51faed84f4d56fe3455a6568bdadbc9b16196175 | Add day 5 part 1. | SayWhat1/adventofcode2016 | day5-1.py | day5-1.py | """Module to find the passowrd on a bunny door."""
import hashlib
def main():
"""Run the main function."""
id = 'cxdnnyjw'
password = []
begin = '00000'
index = 0
while len(password) < 8:
test = id + str(index)
if begin == hashlib.md5(test).hexdigest()[0:5]:
password.append(hashlib.md5(test).hexdigest()[5])
index += 1
print('The password is {}').format(''.join(password))
if __name__ == '__main__':
main()
| mit | Python | |
c019af0f2d155ed2edaf600732218057cabc441e | Add test_client.py. | ibus/ibus-cros,ibus/ibus,fujiwarat/ibus,Keruspe/ibus,ibus/ibus,j717273419/ibus,ueno/ibus,ibus/ibus-cros,luoxsbupt/ibus,ibus/ibus,luoxsbupt/ibus,phuang/ibus,phuang/ibus,ibus/ibus-cros,j717273419/ibus,fujiwarat/ibus,Keruspe/ibus,luoxsbupt/ibus,phuang/ibus,j717273419/ibus,ibus/ibus,ueno/ibus,luoxsbupt/ibus,Keruspe/ibus,fujiwarat/ibus,ueno/ibus,luoxsbupt/ibus,fujiwarat/ibus,ueno/ibus,j717273419/ibus,ibus/ibus-cros,Keruspe/ibus,ueno/ibus,phuang/ibus | test/test_client.py | test/test_client.py | #! /usr/bin/python
# Test program for client APIs.
import time
import os
import sys
import select
import glib
import termios
import tty
import ibus
from ibus import keysyms
from ibus import modifier
class DemoTerm:
def __init__(self):
self.__term_old = termios.tcgetattr(0)
tty.setraw(0)
self.__bus = ibus.Bus()
self.__ic = self.__bus.create_input_context("DemoTerm")
self.__bus.set_capabilities(self.__ic, 7)
self.__bus.connect("commit-string", self.__commit_string_cb)
self.__bus.connect("update-preedit", self.__update_preedit_cb)
self.__bus.connect("show-preedit", self.__show_preedit_cb)
self.__bus.connect("hide-preedit", self.__hide_preedit_cb)
self.__bus.connect("update-lookup-table", self.__update_lookup_table_cb)
self.__bus.connect("show-lookup-table", self.__show_lookup_table_cb)
self.__bus.connect("hide-lookup-table", self.__hide_lookup_table_cb)
glib.io_add_watch(0, glib.IO_IN, self.__stdin_cb)
# self.__master_fd, self.__slave_fd = os.openpty()
# self.__run_shell()
def __stdin_cb(self, fd, condition):
c = ord(os.read(0, 1))
if c == 3:
self.__loop.quit()
try:
if c == 22: # Ctrl + V => Ctrl + space
retval = self.__bus.process_key_event(self.__ic,
keysyms.space, True, modifier.CONTROL_MASK)
elif c == 127: # BackSpace
self.__bus.process_key_event(self.__ic,
keysyms.BackSpace, True, 0)
retval = True
else:
retval = self.__bus.process_key_event(self.__ic, c, True, 0)
except:
retval = False
if retval == False:
os.write(1, chr(c))
return True
def __commit_string_cb(self, bus, ic, text):
print "commit: %s\r" % text
def __update_preedit_cb(self, bus, ic, text, attrs, cursor_pos, visible):
if visible:
print "preedit: %s\r" % text
else:
print "preedit:\r"
def __show_preedit_cb(self, bus, ic):
print "preedit show\r"
def __hide_preedit_cb(self, bus, ic):
print "preedit hide\r"
def __update_lookup_table_cb(self, bus, ic, lookup_table, visible):
if visible:
candidates = lookup_table.get_canidates_in_current_page()
i = 1
line = u"lookup table:"
for c in candidates:
line += " %d.%s" % (i, c[0])
i += 1
print line, "\r"
else:
print "lookup table:\r"
def __show_lookup_table_cb(self, bus, ic):
print "lookup table show\r"
def __hide_lookup_table_cb(self, bus, ic):
print "lookup table hide\r"
def __run_shell(self):
pid = os.fork()
if pid == 0: # child
os.close(0)
os.close(1)
os.close(2)
os.close(self.__master_fd)
os.dup2(self.__slave_fd, 0)
os.dup2(self.__slave_fd, 1)
os.dup2(self.__slave_fd, 2)
os.close(self.__slave_fd)
os.execv('/bin/bash', ["bash"])
os.exit(1)
def run(self):
self.__loop = glib.MainLoop()
self.__loop.run()
def close(self):
termios.tcsetattr(0, termios.TCSAFLUSH, self.__term_old)
def main():
term = DemoTerm()
try:
term.run()
except:
import traceback
traceback.print_exc()
term.close()
if __name__ == "__main__":
main()
| lgpl-2.1 | Python | |
d813448c1b9a16d58c8d24d27267893b39c4b908 | Add calc_coolfunc_profile.py: calculate cooling function proifle | liweitianux/chandra-acis-analysis,liweitianux/chandra-acis-analysis,liweitianux/chandra-acis-analysis | bin/calc_coolfunc_profile.py | bin/calc_coolfunc_profile.py | #!/usr/bin/env python3
#
# Copyright (c) 2017 Weitian LI <liweitianux@live.com>
# MIT license
"""
Calculate the cooling function profile with respect to the input
temperature profile by interpolating the previously calculated
cooling function table.
In this way, the cooling function profile can be calculated very
quickly, allowing much more iterations for the later Monte Carlo
calculations.
"""
import os
import sys
import argparse
import numpy as np
import scipy.interpolate as interpolate
def interpolate_cf(table, logy=True):
temp, cf = table[:, 0], table[:, 1]
if logy:
cf = np.log10(cf)
print("Interpolating cooling function table ...", file=sys.stderr)
interp = interpolate.interp1d(temp, cf, kind="linear")
return interp
def calc_cf_profile(tprofile, interp, logy=True):
radius, temp = tprofile[:, 0], tprofile[:, 1]
cf = interp(temp)
if logy:
cf = 10 ** cf
cfprofile = np.column_stack([radius, cf])
return cfprofile
def main():
parser = argparse.ArgumentParser(
description="Calculate cooling function profile by interpolations")
parser.add_argument("-t", "--table", dest="table", required=True,
help="previously calculated cooling function table")
parser.add_argument("-T", "--tprofile", dest="tprofile", required=True,
help="temperature profile " +
"(2-column: radius temperature)")
parser.add_argument("-o", "--outfile", dest="outfile", required=True,
help="output cooling function profile")
parser.add_argument("-C", "--clobber", dest="clobber", action="store_true",
help="overwrite existing files")
args = parser.parse_args()
if (not args.clobber) and os.path.exists(args.outfile):
raise OSError("Output file already exists: %s" % args.outfile)
table = np.loadtxt(args.table)
tprofile = np.loadtxt(args.tprofile)
cf_interp = interpolate_cf(table)
cf_profile = calc_cf_profile(tprofile, cf_interp)
np.savetxt(args.outfile, cf_profile)
if __name__ == "__main__":
main()
| mit | Python | |
18bc9e0fb7c084e56e77b54b69fca5471d04be5f | add missing Devince migration | MPIB/Lagerregal,vIiRuS/Lagerregal,MPIB/Lagerregal,vIiRuS/Lagerregal,MPIB/Lagerregal,vIiRuS/Lagerregal | devices/migrations/0007_device_used_in_rm_default.py | devices/migrations/0007_device_used_in_rm_default.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.13 on 2018-06-28 19:59
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('devices', '0006_device_used_in'),
]
operations = [
migrations.AlterField(
model_name='device',
name='used_in',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='devices.Device'),
),
]
| bsd-3-clause | Python | |
28dda425039716b23a22f8f61889d10c96467e17 | Add site context processor | nvbn/coviolations_web,nvbn/coviolations_web | app/context_processors.py | app/context_processors.py | from django.contrib.sites.models import Site
def request_site(request):
"""Current site instalnce processor"""
return {
'SITE': Site.objects.get_current(),
}
| mit | Python | |
7ea4d3d9117f0586749dad3ce1ff3a038c40ffa8 | Add missing file | annapowellsmith/openpresc,ebmdatalab/openprescribing,annapowellsmith/openpresc,annapowellsmith/openpresc,ebmdatalab/openprescribing,ebmdatalab/openprescribing,annapowellsmith/openpresc,ebmdatalab/openprescribing | openprescribing/openprescribing/slack.py | openprescribing/openprescribing/slack.py | import os
import requests
def notify_slack(message):
"""Posts the message to #general
"""
# Set the webhook_url to the one provided by Slack when you create
# the webhook at
# https://my.slack.com/services/new/incoming-webhook/
webhook_url = os.environ['SLACK_GENERAL_POST_KEY']
slack_data = {'text': message}
response = requests.post(webhook_url, json=slack_data)
if response.status_code != 200:
raise ValueError(
'Request to slack returned an error %s, the response is:\n%s'
% (response.status_code, response.text)
)
| mit | Python | |
41a83a622fda7776ddc9efdf5bed1be1cd698b51 | Test Lab for review, with support file codingbat | UWPCE-PythonCert/IntroPython2016,weidnem/IntroPython2016,UWPCE-PythonCert/IntroPython2016,weidnem/IntroPython2016,UWPCE-PythonCert/IntroPython2016,weidnem/IntroPython2016 | students/pvosper/session06/codingbat.py | students/pvosper/session06/codingbat.py | #!/usr/bin/env python3
# Coding Bat Samples for Test Lab
'''
Pick an example from codingbat:
http://codingbat.com
Do a bit of test-driven development on it:
run something on the web site.
write a few tests using the examples from the site.
then write the function, and fix it ‘till it passes the tests.
Do at least two of these...
'''
# List 2 > Count Evens
# http://codingbat.com/prob/p189616
'''
Return the number of even ints in the given array. Note: the % "mod" operator
computes the remainder, e.g. 5 % 2 is 1.
count_evens([2, 1, 2, 3, 4]) → 3
count_evens([2, 2, 0]) → 3
count_evens([1, 3, 5]) → 0
'''
def count_evens(nums):
count = len([num for num in nums if num % 2 == 0])
return count
# Logic-2 > close_far
# http://codingbat.com/prob/p160533
'''
Given three ints, a b c, return True if one of b or c is "close"
(differing from a by at most 1), while the other is "far", differing from
both other values by 2 or more. Note: abs(num) computes the absolute value of
a number.
close_far(1, 2, 10) → True
close_far(1, 2, 3) → False
close_far(4, 1, 3) → True
'''
def close_far(num_a, num_b, num_c):
# if (abs(num_a - num_b) or abs(num_a - num_c) <= 1) and (abs(num_a - num_b) or abs(num_a - num_c) >= 2):
t1 = abs(num_a - num_b)
t2 = abs(num_a - num_c)
t3 = abs(num_b - num_c)
if t1 <= 1 and t2 >= 2 and t3 >= 2:
return True
if t2 <= 1 and t3 >= 2 and t1 >= 2:
return True
if t3 <= 1 and t1 >= 2 and t2 >= 2:
return True
else:
return False
# String-2 > end_other
# http://codingbat.com/prob/p174314
'''
Given two strings, return True if either of the strings appears at the very end of the other string, ignoring upper/lower case differences (in other words, the computation should not be "case sensitive"). Note: s.lower() returns the lowercase version of a string.
end_other('Hiabc', 'abc') → True
end_other('AbC', 'HiaBc') → True
end_other('abc', 'abXabc') → True
'''
def end_other(str_a, str_b):
if str_a[-3:].lower() == str_b.lower():
return True
elif str_b[-3:].lower() == str_a.lower():
return True
else:
return False
if __name__ == '__main__':
print('\n=== MAIN ===\n')
print('\nList 2 > Count Evens')
print('[2, 1, 2, 3, 4]: ', count_evens([2, 1, 2, 3, 4]))
print('[2, 2, 0]: ', count_evens([2, 2, 0]))
print('[1, 3, 5]: ', count_evens([1, 3, 5]))
print('\nLogic-2 > close_far')
print('(1, 2, 10)', close_far(1, 2, 10))
print('(1, 2, 3)', close_far(1, 2, 3))
print('(4, 1, 3)', close_far(4, 1, 3))
print('\nString-2 > end_other')
print("('Hiabc', 'abc')", end_other('Hiabc', 'abc'))
print("('AbC', 'HiaBc')", end_other('AbC', 'HiaBc'))
print("('abc', 'abXabc')", end_other('abc', 'abXabc'))
'''
=== SAMPLE ===
In [1]: run codingbat.py
=== MAIN ===
List 2 > Count Evens
[2, 1, 2, 3, 4]: 3
[2, 2, 0]: 3
[1, 3, 5]: 0
Logic-2 > close_far
(1, 2, 10) True
(1, 2, 3) False
(4, 1, 3) True
String-2 > end_other
('Hiabc', 'abc') True
('AbC', 'HiaBc') True
('abc', 'abXabc') True
'''
| unlicense | Python | |
764a4396300fa5c50c7c129bc24ce1cfdd597c03 | add foo.py | daiz713/pxchar,daiz713/pxchar,daiz713/pxchar | foo.py | foo.py | # -*- coding: utf-8 -*-
# Project pxchar
import sys
import os.path
from PIL import Image
# 初期化
def init():
pass;
# テキストファイルを一文字ずつ読み込む
def readChar(fileName):
pass;
# 読み込んだ文字からピクセルデータを決定する
def determinePxColor(char):
pass;
# PNGファイルを出力する
def applyPxColor(color):
pass;
if __name__ == '__main__':
| mit | Python | |
82199c60097599f5273c97fee649473a8a069ec8 | Add missing migration | City-of-Helsinki/kerrokantasi,City-of-Helsinki/kerrokantasi,City-of-Helsinki/kerrokantasi,stephawe/kerrokantasi,vikoivun/kerrokantasi,City-of-Helsinki/kerrokantasi,stephawe/kerrokantasi,stephawe/kerrokantasi,vikoivun/kerrokantasi,vikoivun/kerrokantasi | democracy/migrations/0023_add_comment_location_and_images.py | democracy/migrations/0023_add_comment_location_and_images.py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-09-15 12:32
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
import djgeojson.fields
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('democracy', '0022_comment_author_name_editable'),
]
operations = [
migrations.CreateModel(
name='CommentImage',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(db_index=True, default=django.utils.timezone.now, editable=False, verbose_name='time of creation')),
('modified_at', models.DateTimeField(default=django.utils.timezone.now, editable=False, verbose_name='time of last modification')),
('published', models.BooleanField(db_index=True, default=True, verbose_name='public')),
('deleted', models.BooleanField(db_index=True, default=False, editable=False, verbose_name='deleted')),
('title', models.CharField(blank=True, default='', max_length=255, verbose_name='title')),
('caption', models.TextField(blank=True, default='', verbose_name='caption')),
('height', models.IntegerField(default=0, editable=False, verbose_name='height')),
('width', models.IntegerField(default=0, editable=False, verbose_name='width')),
('image', models.ImageField(height_field='height', upload_to='images/%Y/%m', verbose_name='image', width_field='width')),
('ordering', models.IntegerField(db_index=True, default=1, help_text='The ordering position for this object. Objects with smaller numbers appear first.', verbose_name='ordering')),
],
options={
'verbose_name_plural': 'comment images',
'verbose_name': 'comment image',
},
),
migrations.AlterModelOptions(
name='sectioncomment',
options={'ordering': ('-created_at',), 'verbose_name': 'section comment', 'verbose_name_plural': 'section comments'},
),
migrations.AddField(
model_name='sectioncomment',
name='geojson',
field=djgeojson.fields.GeometryField(blank=True, null=True, verbose_name='location'),
),
migrations.AddField(
model_name='commentimage',
name='comment',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='images', to='democracy.SectionComment'),
),
migrations.AddField(
model_name='commentimage',
name='created_by',
field=models.ForeignKey(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='commentimage_created', to=settings.AUTH_USER_MODEL, verbose_name='created by'),
),
migrations.AddField(
model_name='commentimage',
name='modified_by',
field=models.ForeignKey(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='commentimage_modified', to=settings.AUTH_USER_MODEL, verbose_name='last modified by'),
),
]
| mit | Python | |
e6b52bbb4353ef797a83ead2a8dd2037f284cbb1 | Update create_manufacturer_records.py | sagar30051991/ozsmart-erp,gangadharkadam/v6_erp,ShashaQin/erpnext,aruizramon/alec_erpnext,ShashaQin/erpnext,aruizramon/alec_erpnext,gangadharkadam/v6_erp,geekroot/erpnext,geekroot/erpnext,sagar30051991/ozsmart-erp,anandpdoshi/erpnext,indictranstech/erpnext,anandpdoshi/erpnext,indictranstech/osmosis-erpnext,ShashaQin/erpnext,gsnbng/erpnext,gsnbng/erpnext,mahabuber/erpnext,gangadharkadam/v6_erp,geekroot/erpnext,gsnbng/erpnext,mahabuber/erpnext,anandpdoshi/erpnext,indictranstech/osmosis-erpnext,mahabuber/erpnext,indictranstech/erpnext,aruizramon/alec_erpnext,indictranstech/osmosis-erpnext,njmube/erpnext,indictranstech/osmosis-erpnext,anandpdoshi/erpnext,geekroot/erpnext,njmube/erpnext,aruizramon/alec_erpnext,sagar30051991/ozsmart-erp,indictranstech/erpnext,gangadharkadam/v6_erp,mahabuber/erpnext,Aptitudetech/ERPNext,gsnbng/erpnext,sagar30051991/ozsmart-erp,indictranstech/erpnext,njmube/erpnext,njmube/erpnext,ShashaQin/erpnext | erpnext/patches/v6_16/create_manufacturer_records.py | erpnext/patches/v6_16/create_manufacturer_records.py | # Copyright (c) 2016, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe.utils import cstr
def execute():
frappe.reload_doc("stock", "doctype", "manufacturer")
frappe.reload_doctype("Item")
for d in frappe.db.sql("""select distinct manufacturer from tabItem
where ifnull(manufacturer, '') != '' and disabled=0"""):
manufacturer_name = cstr(d[0]).strip()
if manufacturer_name and not frappe.db.exists("Manufacturer", manufacturer_name):
man = frappe.new_doc("Manufacturer")
man.short_name = manufacturer_name
man.full_name = manufacturer_name
man.save()
| # Copyright (c) 2016, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe.utils import cstr
def execute():
frappe.reload_doctype("Manufacturer")
frappe.reload_doctype("Item")
for d in frappe.db.sql("""select distinct manufacturer from tabItem
where ifnull(manufacturer, '') != '' and disabled=0"""):
manufacturer_name = cstr(d[0]).strip()
if manufacturer_name and not frappe.db.exists("Manufacturer", manufacturer_name):
man = frappe.new_doc("Manufacturer")
man.short_name = manufacturer_name
man.full_name = manufacturer_name
man.save() | agpl-3.0 | Python |
f2bec4c2c2bd2cb55a98a4aeda52a40781734086 | Add simple binary heap implementation | nitsas/py3datastructs | binary_heap.py | binary_heap.py | """
A simple binary heap implementation (using a list).
Operations:
- __len__
- insert
- pop
- peek
TODO:
- Add operations: heapify (maybe in __init__)
Author:
Christos Nitsas
(nitsas)
(chrisnitsas)
Language:
Python 3(.4)
Date:
November, 2014
"""
import operator
__all__ = ['BinaryHeap']
class BinaryHeap:
"""
A simple binary (min) heap implementation (using a list).
The lowest valued items are retrieved first (the lowest valued item is
the one returned by `sorted(list(items))[0]`). A typical pattern for
items is a tuple in the form: (priority_number, data).
"""
def __init__(self):
"""Initialize an empty (min) heap."""
self._items = []
self._less = operator.lt
def __len__(self):
"""Return the number of items in the heap as an int."""
return len(self._items)
def _swap(self, a, b):
"""
Swap items in positions `a` and `b` of `self._items`.
"""
self._items[a], self._items[b] = self._items[b], self._items[a]
def _shift_up(self, node):
"""
Move node up in the tree, as long as needed.
node -- the position of the node in self._items
"""
parent = (node - 1) // 2
while node > 0 and not self._less(self._items[parent],
self._items[node]):
# swap item with its parent
self._swap(node, parent)
# update position pointers
node = parent
parent = (node - 1) // 2
def _shift_down(self, node):
"""
Move node down in the tree, as long as needed.
node -- the position of the node in self._items
"""
# initialize the positions of the node's children
# left child
left = 2 * node + 1
# right child
right = left + 1
while self._less(self._items[left], self._items[node]) or \
self._less(self._items[right], self._items[node]):
# the item is *less* than at least one of its children
# swap it with the *smallest* of its children
if self._less(self._items[left], self._items[right]):
self._swap(node, left)
node = left
else:
self._swap(node, right)
node = right
# update child position pointers
left = 2 * node + 1
right = left + 1
def insert(self, item):
"""
Insert a new item.
item -- the item to be inserted
A typical pattern for items is a tuple in the form:
(priority_number, data)
This operation's time complexity is `O(log(n))`, where `n` is the
number of items in the heap.
"""
# insert item at the end of the list of items
self._items.append(item)
# shift the item up as needed to restore the heap property
self._shift_up(len(self._items) - 1)
def peek(self):
"""
Return the item on top of the heap without removing the item.
Return the item with the *lowest* value according to the heap's
(partial) ordering (e.g. the min item if we have a min heap).
Raises a `LookupError('peek into empty heap')` if the heap is empty.
"""
if len(self._items) == 0:
raise LookupError('peek into empty heap')
return self._items[0]
def pop(self):
"""
Remove and return the item that's currently on top of the heap.
Remove and return the item with the *lowest* value according to the
heap's (partial) ordering (e.g. the min item if we have a min heap).
Raises a `LookupError('pop from empty heap')` if the heap is empty.
"""
if len(self._items) == 0:
raise LookupError('pop from empty heap')
# else:
# swap top item with the last item of self._items, and remove it
self._swap(0, -1)
min_item = self._items.pop()
# now repair the heap property
self._shift_down(0)
# return
return min_item
| mit | Python | |
a8efd5e94c206a9bbaf4a523fda7513acc2afa7f | add test-room-list.py | freedesktop-unofficial-mirror/telepathy__telepathy-salut,freedesktop-unofficial-mirror/telepathy__telepathy-salut,freedesktop-unofficial-mirror/telepathy__telepathy-salut,freedesktop-unofficial-mirror/telepathy__telepathy-salut | tests/twisted/avahi/test-room-list.py | tests/twisted/avahi/test-room-list.py | from saluttest import exec_test
from avahitest import AvahiAnnouncer, AvahiListener
from avahitest import get_host_name
import avahi
from xmppstream import setup_stream_listener, connect_to_stream
from servicetest import make_channel_proxy
from twisted.words.xish import xpath, domish
import time
import dbus
CHANNEL_TYPE_ROOMLIST = 'org.freedesktop.Telepathy.Channel.Type.RoomList'
HT_CONTACT = 1
HT_CONTACT_LIST = 3
TEXT_MESSAGE_TYPE_NORMAL = dbus.UInt32(0)
def test(q, bus, conn):
conn.Connect()
q.expect('dbus-signal', signal='StatusChanged', args=[0L, 0L])
basic_txt = { "txtvers": "1", "status": "avail" }
t = conn.RequestChannel(CHANNEL_TYPE_ROOMLIST, 0, 0, True)
channel = make_channel_proxy(conn, t, "Channel.Type.RoomList")
channel.ListRooms()
q.expect('dbus-signal', signal='ListingRooms', args=[True])
e = q.expect('dbus-signal', signal='GotRooms')
rooms = e.args[0]
assert rooms == []
q.expect('dbus-signal', signal='ListingRooms', args=[False])
# TODO: announce some Clique rooms and check is they are properly listed
if __name__ == '__main__':
exec_test(test)
| lgpl-2.1 | Python | |
81f9e6b245e81d653184579ca0c78b4d5559e715 | Create config-lite.py | pyreflos/RasPi-Rattle | config-lite.py | config-lite.py | ##
## User configuration file - edit these settings to suit your own project
##
file_path = '/home/pi/RPi-RTL/images/' ## path to save images
file_prefix = 'img_' ## prefix before timestamp.jpg, if needed - e.g. a project number
use_timestamp = True ## True = timestamp in filename, False = incremental numbering
| mit | Python | |
b0f112c6ab2a8860e9032adcccc4c90a4c43d5c3 | Create __init__.py | momotarou-zamurai/kibidango | maya/python/playblast/timeUnitConvasion/__init__.py | maya/python/playblast/timeUnitConvasion/__init__.py | mit | Python | ||
76ccb3e14da170000c8071203e931eeb8bc7c642 | Add a test case for deepcopy | AltSchool/dynamic-rest-client | tests/test_deepcopy.py | tests/test_deepcopy.py | from tests.models import (
Cat,
Location,
)
import copy
from rest_framework.test import APITestCase
class DeepcopyTestCase(APITestCase):
def test_cat(self):
home = Location(name='Home', blob='ILUVU')
papa = Cat(name='Papa')
kitkat = Cat(name='KitKat', home=home, parent=papa)
kitkat_clone = copy.deepcopy(kitkat)
self.assertEquals(kitkat.name, kitkat_clone.name)
self.assertEquals(kitkat.home.name, kitkat_clone.home.name)
self.assertEquals(kitkat.parent.name, kitkat_clone.parent.name)
| mit | Python | |
880a15054ab5fbc49afe2aafce584fd423e511fa | Define constants module | napalm-automation/napalm-iosxr,spotify/napalm,napalm-automation/napalm,spotify/napalm | napalm_iosxr/constants.py | napalm_iosxr/constants.py | """Constants for the IOS-XR driver."""
from __future__ import unicode_literals
from napalm_base.constants import * # noqa
SR_638170159_SOLVED = False
# this flag says if the Cisco TAC SR 638170159
# has been solved
#
# "XML Agent Does not retrieve correct BGP routes data"
# is a weird bug reported on 2016-02-22 22:54:21
# briefly, all BGP routes are handled by the XML agent
# in such a way they have the following details:
#
# - all neighbors are 0.0.0.0
# - all routes are 0.0.0.0/0
# - all RD = 0000000000000000
#
# because of this none of the data retrieved
# from the BGP oper is usable thus has direct implications
# in our implementation of `get_route_to` when retrieving
# the BGP protocol specific attributes.
| apache-2.0 | Python | |
47d770c6008116dd72c6c6b4572a0a92faa39e66 | Add test file for update.py | CristianCantoro/sbntoolkit,CristianCantoro/sbntoolkit | test/test_update.py | test/test_update.py | #! /usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import logging
basedir = os.path.realpath('..')
if basedir not in sys.path:
sys.path.append(basedir)
import update as up
# logging
LOGFORMAT_STDOUT = {
logging.DEBUG: '%(module)s:%(funcName)s:%(lineno)s - '
'%(levelname)-8s: %(message)s',
logging.INFO: '%(levelname)-8s: %(message)s',
logging.WARNING: '%(levelname)-8s: %(message)s',
logging.ERROR: '%(levelname)-8s: %(message)s',
logging.CRITICAL: '%(levelname)-8s: %(message)s'}
# --- root logger
rootlogger = logging.getLogger('sbnredirect')
rootlogger.setLevel(logging.DEBUG)
lvl_config_logger = logging.DEBUG
console = logging.StreamHandler()
console.setLevel(lvl_config_logger)
formatter = logging.Formatter(LOGFORMAT_STDOUT[lvl_config_logger])
console.setFormatter(formatter)
rootlogger.addHandler(console)
if __name__ == '__main__':
CONFIG_FILENAME = 'update.cfg'
config_file = os.path.realpath(os.path.join('..', CONFIG_FILENAME))
rootlogger.debug(config_file)
up.read_config(config_file)
| mit | Python | |
7101dac32926e0f1403b44a93a6f5882a0aa5d2e | Create woc32p3.py | Chuck8521/LunchtimeBoredom,Chuck8521/LunchtimeBoredom,Chuck8521/LunchtimeBoredom | woc32p3.py | woc32p3.py | #!/bin/python3
import sys
def boolProd(m1, m2):
def circularWalk(n, s, t, r_0, g, seed, p):
# Complete this function
dist = []
boolMatrix = [[] for w in range(n)]
for w in range(n):
for e in range(n):
boolMatrix[w].append(0)
for q in range(n):
if q == 0:
dist.append(r_0)
i = 1
while i <= r_0:
boolMatrix[q][q+i] = 1
boolMatrix[q][q-i] = 1
i += 1
else:
dist.append((dist[q - 1] * g + seed) % p)
i = 1
while i <= (dist[q - 1] * g + seed) % p:
if q+i < n:
boolMatrix[q][q+i] = 1
else:
boolMatrix[q][(q+i) % n] = 1
boolMatrix[q][q-i] = 1
i += 1
#boolMatrix is now right. Take products till answer and count steps
return boolMatrix
n, s, t = input().strip().split(' ')
n, s, t = [int(n), int(s), int(t)]
r_0, g, seed, p = input().strip().split(' ')
r_0, g, seed, p = [int(r_0), int(g), int(seed), int(p)]
result = circularWalk(n, s, t, r_0, g, seed, p)
print(result)
| mit | Python | |
97b933815dcbc179e25bc9c1c16cfa1153036ae1 | Add performance test for epsilon convolution | timqian/neurons,johannesmik/neurons | performance_tests/epsilon_convolution.py | performance_tests/epsilon_convolution.py | #!/usr/bin/python3
'''
Convolution
'''
from __future__ import print_function
import numpy as np
import cProfile
import random
import matplotlib.pyplot as plt
def eps(s, t_membran):
return np.exp(-s / t_membran)
def small_spiketrain():
# 1000 timesteps
# With 10 random spikes
s = np.array([0]*1000)
for i in range(10):
index = random.randint(0, 1000)
s[index] = 1
return s
def big_spiketrain():
# 1.000.000 timesteps Spiketrain
# With 10.000 random spikes
s = np.array([0]*1000000)
for i in range(10000):
index = random.randint(0, 1000000)
s[index] = 1
return s
if __name__ == "__main__":
import matplotlib.pyplot as plt
t_current = 0.3
t_membran = 20
# Epsilon Function as a vector
x = np.linspace(0, 200, 200)
epsilon_vector = eps(x, t_membran)
# Spiketrain
s = big_spiketrain()
# Convolute
s = (np.convolve(s, epsilon_vector, 'same'))
cProfile.run('np.convolve(s, epsilon_vector, "same")')
plt.plot(s, label='Convoluted Spiketrain')
plt.plot(x, epsilon_vector, label='epsilon vector')
plt.legend()
plt.show()
| bsd-2-clause | Python | |
5e27bf30286265f6ce2ba82a8a2edbae2bb421ae | add tests | wyplay/pytracemalloc,wyplay/pytracemalloc | test_tracemalloc.py | test_tracemalloc.py | import os
import sys
import time
import tracemalloc
import unittest
EMPTY_STRING_SIZE = sys.getsizeof(b'')
def get_lineno():
frame = sys._getframe(1)
return frame.f_lineno
def allocate_bytes(size):
filename = __file__
lineno = get_lineno() + 1
data = b'x' * (size - EMPTY_STRING_SIZE)
return data, (filename, lineno)
class TestTracemalloc(unittest.TestCase):
def setUp(self):
tracemalloc.enable()
def tearDown(self):
tracemalloc.disable()
def test_get_source(self):
obj, obj_source = allocate_bytes(12345)
source = tracemalloc.get_source(obj)
self.assertEqual(source, obj_source)
def test_get_stats(self):
total = 0
count = 0
objs = []
for index in range(5):
size = 1234
obj, source = allocate_bytes(size)
objs.append(obj)
total += size
count += 1
stats = tracemalloc.get_stats()
filename, lineno = source
self.assertEqual(stats[filename][lineno], (total, count))
def test_timer(self):
calls = []
def func(*args, **kw):
calls.append((args, kw))
# timer enabled
args = (1, 2, 3)
kwargs = {'arg': 4}
tracemalloc.start_timer(1, func, args, kwargs)
time.sleep(1)
obj, source = allocate_bytes(123)
self.assertEqual(len(calls), 1)
call = calls[0]
self.assertEqual(call, (args, kwargs))
# timer disabled
tracemalloc.stop_timer()
time.sleep(1)
obj2, source2 = allocate_bytes(123)
self.assertEqual(len(calls), 1)
if __name__ == "__main__":
unittest.main()
| mit | Python | |
fc3ac8ca281bccc2f50c9f1fdd9a16b0c8658a01 | Add GDP test | lab11/M-ulator,lab11/M-ulator,lab11/M-ulator,lab11/M-ulator,lab11/M-ulator,lab11/M-ulator,lab11/M-ulator | platforms/m3/programming/goc_gdp_test.py | platforms/m3/programming/goc_gdp_test.py | #!/usr/bin/env python2
import code
try:
import Image
except ImportError:
from PIL import Image
import gdp
gdp.gdp_init()
gcl_name = gdp.GDP_NAME("edu.umich.eecs.m3.test01")
gcl_handle = gdp.GDP_GCL(gcl_name, gdp.GDP_MODE_RA)
#j = Image.open('/tmp/capture1060.jpeg')
#d = {"data": j.tostring()}
#gcl_handle.append(d)
record = gcl_handle.read(3)
raw = record['data']
image = Image.fromstring('RGB', (640,640), raw)
image.show()
#code.interact(local=locals())
| apache-2.0 | Python | |
b3624916b29d25d1baec7c55da4cc7184e724812 | Add tests for LocalizedFieldsAdminMixin | SectorLabs/django-localized-fields,SectorLabs/django-localized-fields,SectorLabs/django-localized-fields | tests/test_admin.py | tests/test_admin.py | from django.apps import apps
from django.contrib import admin
from django.contrib.admin.checks import check_admin_app
from django.db import models
from django.test import TestCase
from localized_fields.fields import LocalizedField
from localized_fields.admin import LocalizedFieldsAdminMixin
from tests.fake_model import get_fake_model
class LocalizedFieldsAdminMixinTestCase(TestCase):
"""Tests the :see:LocalizedFieldsAdminMixin class."""
TestModel = None
TestRelModel = None
@classmethod
def setUpClass(cls):
"""Creates the test model in the database."""
super(LocalizedFieldsAdminMixinTestCase, cls).setUpClass()
cls.TestRelModel = get_fake_model(
{
'description': LocalizedField()
}
)
cls.TestModel = get_fake_model(
{
'title': LocalizedField(),
'rel': models.ForeignKey(cls.TestRelModel,
on_delete=models.CASCADE)
}
)
def tearDown(self):
if admin.site.is_registered(self.TestModel):
admin.site.unregister(self.TestModel)
if admin.site.is_registered(self.TestRelModel):
admin.site.unregister(self.TestRelModel)
@classmethod
def test_model_admin(cls):
"""Tests whether :see:LocalizedFieldsAdminMixin
mixin are works with admin.ModelAdmin"""
@admin.register(cls.TestModel)
class TestModelAdmin(LocalizedFieldsAdminMixin, admin.ModelAdmin):
pass
assert len(check_admin_app(apps.get_app_configs())) == 0
@classmethod
def test_stackedmodel_admin(cls):
"""Tests whether :see:LocalizedFieldsAdminMixin mixin are works
with admin.StackedInline"""
class TestModelStackedInline(LocalizedFieldsAdminMixin,
admin.StackedInline):
model = cls.TestModel
@admin.register(cls.TestRelModel)
class TestRelModelAdmin(admin.ModelAdmin):
inlines = [
TestModelStackedInline,
]
assert len(check_admin_app(apps.get_app_configs())) == 0
@classmethod
def test_tabularmodel_admin(cls):
"""Tests whether :see:LocalizedFieldsAdminMixin mixin are works
with admin.TabularInline"""
class TestModelTabularInline(LocalizedFieldsAdminMixin,
admin.TabularInline):
model = cls.TestModel
@admin.register(cls.TestRelModel)
class TestRelModelAdmin(admin.ModelAdmin):
inlines = [
TestModelTabularInline,
]
assert len(check_admin_app(apps.get_app_configs())) == 0
| mit | Python | |
cf090648a8c88b7f30eaa925358ff175cbcb976c | use dtype float32 | fzalkow/scikit-learn,tosolveit/scikit-learn,russel1237/scikit-learn,nmayorov/scikit-learn,BiaDarkia/scikit-learn,untom/scikit-learn,cybernet14/scikit-learn,madjelan/scikit-learn,MartinDelzant/scikit-learn,yunfeilu/scikit-learn,quheng/scikit-learn,glouppe/scikit-learn,etkirsch/scikit-learn,kagayakidan/scikit-learn,ephes/scikit-learn,meduz/scikit-learn,ltiao/scikit-learn,bigdataelephants/scikit-learn,hrjn/scikit-learn,aminert/scikit-learn,thientu/scikit-learn,466152112/scikit-learn,LohithBlaze/scikit-learn,mhue/scikit-learn,themrmax/scikit-learn,eickenberg/scikit-learn,sergeyf/scikit-learn,jjx02230808/project0223,yyjiang/scikit-learn,samzhang111/scikit-learn,saiwing-yeung/scikit-learn,nvoron23/scikit-learn,arabenjamin/scikit-learn,LiaoPan/scikit-learn,krez13/scikit-learn,anntzer/scikit-learn,lin-credible/scikit-learn,pratapvardhan/scikit-learn,fabianp/scikit-learn,HolgerPeters/scikit-learn,AlexanderFabisch/scikit-learn,huobaowangxi/scikit-learn,jseabold/scikit-learn,marcocaccin/scikit-learn,jpautom/scikit-learn,shyamalschandra/scikit-learn,mrshu/scikit-learn,nomadcube/scikit-learn,huzq/scikit-learn,andaag/scikit-learn,lazywei/scikit-learn,vinayak-mehta/scikit-learn,ahoyosid/scikit-learn,cl4rke/scikit-learn,pratapvardhan/scikit-learn,mojoboss/scikit-learn,mehdidc/scikit-learn,petosegan/scikit-learn,pompiduskus/scikit-learn,harshaneelhg/scikit-learn,dhruv13J/scikit-learn,CforED/Machine-Learning,stylianos-kampakis/scikit-learn,ephes/scikit-learn,tawsifkhan/scikit-learn,CVML/scikit-learn,JosmanPS/scikit-learn,rishikksh20/scikit-learn,Srisai85/scikit-learn,cainiaocome/scikit-learn,xiaoxiamii/scikit-learn,ahoyosid/scikit-learn,trankmichael/scikit-learn,RomainBrault/scikit-learn,tosolveit/scikit-learn,mjgrav2001/scikit-learn,vivekmishra1991/scikit-learn,mblondel/scikit-learn,TomDLT/scikit-learn,simon-pepin/scikit-learn,rrohan/scikit-learn,gotomypc/scikit-learn,liberatorqjw/scikit-learn,Myasuka/scikit-learn,dsullivan7/scikit-learn,siutanwong/scikit-learn,RPGOne/scikit-learn,aabadie/scikit-learn,h2educ/scikit-learn,nesterione/scikit-learn,roxyboy/scikit-learn,phdowling/scikit-learn,jorik041/scikit-learn,ChanChiChoi/scikit-learn,joshloyal/scikit-learn,kylerbrown/scikit-learn,thilbern/scikit-learn,zhenv5/scikit-learn,AnasGhrab/scikit-learn,pnedunuri/scikit-learn,ngoix/OCRF,heli522/scikit-learn,AlexandreAbraham/scikit-learn,ningchi/scikit-learn,victorbergelin/scikit-learn,Garrett-R/scikit-learn,IndraVikas/scikit-learn,bigdataelephants/scikit-learn,lesteve/scikit-learn,B3AU/waveTree,macks22/scikit-learn,billy-inn/scikit-learn,arahuja/scikit-learn,procoder317/scikit-learn,hainm/scikit-learn,krez13/scikit-learn,pkruskal/scikit-learn,theoryno3/scikit-learn,ycaihua/scikit-learn,yonglehou/scikit-learn,hsuantien/scikit-learn,TomDLT/scikit-learn,depet/scikit-learn,ashhher3/scikit-learn,ashhher3/scikit-learn,vshtanko/scikit-learn,Nyker510/scikit-learn,vshtanko/scikit-learn,bikong2/scikit-learn,arabenjamin/scikit-learn,huobaowangxi/scikit-learn,xavierwu/scikit-learn,sarahgrogan/scikit-learn,cybernet14/scikit-learn,terkkila/scikit-learn,JosmanPS/scikit-learn,saiwing-yeung/scikit-learn,kmike/scikit-learn,IndraVikas/scikit-learn,florian-f/sklearn,hdmetor/scikit-learn,466152112/scikit-learn,xwolf12/scikit-learn,rsivapr/scikit-learn,IssamLaradji/scikit-learn,huobaowangxi/scikit-learn,jakirkham/scikit-learn,kashif/scikit-learn,jmetzen/scikit-learn,lucidfrontier45/scikit-learn,pnedunuri/scikit-learn,spallavolu/scikit-learn,zorojean/scikit-learn,Titan-C/scikit-learn,themrmax/scikit-learn,potash/scikit-learn,joernhees/scikit-learn,ldirer/scikit-learn,treycausey/scikit-learn,victorbergelin/scikit-learn,tomlof/scikit-learn,jseabold/scikit-learn,arjoly/scikit-learn,fredhusser/scikit-learn,toastedcornflakes/scikit-learn,sergeyf/scikit-learn,gclenaghan/scikit-learn,moutai/scikit-learn,hsiaoyi0504/scikit-learn,xyguo/scikit-learn,BiaDarkia/scikit-learn,shenzebang/scikit-learn,kmike/scikit-learn,dsquareindia/scikit-learn,zorojean/scikit-learn,bnaul/scikit-learn,Vimos/scikit-learn,AlexandreAbraham/scikit-learn,hsiaoyi0504/scikit-learn,glouppe/scikit-learn,pv/scikit-learn,evgchz/scikit-learn,vibhorag/scikit-learn,ZenDevelopmentSystems/scikit-learn,LiaoPan/scikit-learn,kevin-intel/scikit-learn,shahankhatch/scikit-learn,f3r/scikit-learn,shahankhatch/scikit-learn,MartinSavc/scikit-learn,nhejazi/scikit-learn,raghavrv/scikit-learn,dingocuster/scikit-learn,fabianp/scikit-learn,gotomypc/scikit-learn,JsNoNo/scikit-learn,lenovor/scikit-learn,yask123/scikit-learn,0x0all/scikit-learn,hsiaoyi0504/scikit-learn,shangwuhencc/scikit-learn,AnasGhrab/scikit-learn,alexsavio/scikit-learn,jjx02230808/project0223,IssamLaradji/scikit-learn,RomainBrault/scikit-learn,anirudhjayaraman/scikit-learn,fabioticconi/scikit-learn,mattgiguere/scikit-learn,jblackburne/scikit-learn,wazeerzulfikar/scikit-learn,abhishekkrthakur/scikit-learn,PrashntS/scikit-learn,kaichogami/scikit-learn,lenovor/scikit-learn,mattgiguere/scikit-learn,wanggang3333/scikit-learn,vortex-ape/scikit-learn,waterponey/scikit-learn,MatthieuBizien/scikit-learn,waterponey/scikit-learn,ilyes14/scikit-learn,tawsifkhan/scikit-learn,Akshay0724/scikit-learn,voxlol/scikit-learn,mlyundin/scikit-learn,joernhees/scikit-learn,mugizico/scikit-learn,glennq/scikit-learn,alexsavio/scikit-learn,IshankGulati/scikit-learn,xzh86/scikit-learn,bnaul/scikit-learn,herilalaina/scikit-learn,aabadie/scikit-learn,liangz0707/scikit-learn,Srisai85/scikit-learn,robbymeals/scikit-learn,alvarofierroclavero/scikit-learn,hitszxp/scikit-learn,mojoboss/scikit-learn,Garrett-R/scikit-learn,aetilley/scikit-learn,mikebenfield/scikit-learn,anntzer/scikit-learn,russel1237/scikit-learn,anirudhjayaraman/scikit-learn,aewhatley/scikit-learn,pkruskal/scikit-learn,ndingwall/scikit-learn,deepesch/scikit-learn,MatthieuBizien/scikit-learn,Windy-Ground/scikit-learn,mrshu/scikit-learn,quheng/scikit-learn,potash/scikit-learn,dingocuster/scikit-learn,sarahgrogan/scikit-learn,abhishekkrthakur/scikit-learn,walterreade/scikit-learn,qifeigit/scikit-learn,Vimos/scikit-learn,pypot/scikit-learn,plissonf/scikit-learn,larsmans/scikit-learn,idlead/scikit-learn,florian-f/sklearn,ldirer/scikit-learn,0asa/scikit-learn,anurag313/scikit-learn,mwv/scikit-learn,mlyundin/scikit-learn,DonBeo/scikit-learn,espg/scikit-learn,herilalaina/scikit-learn,Achuth17/scikit-learn,B3AU/waveTree,yask123/scikit-learn,vermouthmjl/scikit-learn,aabadie/scikit-learn,liangz0707/scikit-learn,cauchycui/scikit-learn,russel1237/scikit-learn,jereze/scikit-learn,jereze/scikit-learn,NunoEdgarGub1/scikit-learn,rexshihaoren/scikit-learn,wanggang3333/scikit-learn,xyguo/scikit-learn,MechCoder/scikit-learn,rohanp/scikit-learn,PrashntS/scikit-learn,lazywei/scikit-learn,0asa/scikit-learn,alexeyum/scikit-learn,tawsifkhan/scikit-learn,toastedcornflakes/scikit-learn,walterreade/scikit-learn,walterreade/scikit-learn,bikong2/scikit-learn,mikebenfield/scikit-learn,DonBeo/scikit-learn,mhdella/scikit-learn,simon-pepin/scikit-learn,glemaitre/scikit-learn,abimannans/scikit-learn,rahuldhote/scikit-learn,jlegendary/scikit-learn,espg/scikit-learn,cl4rke/scikit-learn,wzbozon/scikit-learn,luo66/scikit-learn,rexshihaoren/scikit-learn,ssaeger/scikit-learn,fabioticconi/scikit-learn,aewhatley/scikit-learn,evgchz/scikit-learn,larsmans/scikit-learn,Akshay0724/scikit-learn,mattilyra/scikit-learn,meduz/scikit-learn,yyjiang/scikit-learn,sumspr/scikit-learn,andrewnc/scikit-learn,manhhomienbienthuy/scikit-learn,glemaitre/scikit-learn,nvoron23/scikit-learn,ilo10/scikit-learn,PatrickOReilly/scikit-learn,jpautom/scikit-learn,xubenben/scikit-learn,hainm/scikit-learn,jblackburne/scikit-learn,mfjb/scikit-learn,zorojean/scikit-learn,lbishal/scikit-learn,jlegendary/scikit-learn,bigdataelephants/scikit-learn,ndingwall/scikit-learn,ishanic/scikit-learn,pianomania/scikit-learn,justincassidy/scikit-learn,kjung/scikit-learn,fengzhyuan/scikit-learn,equialgo/scikit-learn,phdowling/scikit-learn,rsivapr/scikit-learn,YinongLong/scikit-learn,djgagne/scikit-learn,mhue/scikit-learn,mayblue9/scikit-learn,shusenl/scikit-learn,mblondel/scikit-learn,yanlend/scikit-learn,appapantula/scikit-learn,aetilley/scikit-learn,lenovor/scikit-learn,h2educ/scikit-learn,jm-begon/scikit-learn,roxyboy/scikit-learn,NelisVerhoef/scikit-learn,NunoEdgarGub1/scikit-learn,ilyes14/scikit-learn,ltiao/scikit-learn,AlexanderFabisch/scikit-learn,nikitasingh981/scikit-learn,anurag313/scikit-learn,eg-zhang/scikit-learn,rahul-c1/scikit-learn,smartscheduling/scikit-learn-categorical-tree,jkarnows/scikit-learn,pythonvietnam/scikit-learn,hdmetor/scikit-learn,0x0all/scikit-learn,smartscheduling/scikit-learn-categorical-tree,cwu2011/scikit-learn,kevin-intel/scikit-learn,vortex-ape/scikit-learn,rishikksh20/scikit-learn,mrshu/scikit-learn,murali-munna/scikit-learn,wlamond/scikit-learn,xuewei4d/scikit-learn,shusenl/scikit-learn,sarahgrogan/scikit-learn,JosmanPS/scikit-learn,harshaneelhg/scikit-learn,JsNoNo/scikit-learn,lucidfrontier45/scikit-learn,Sentient07/scikit-learn,lin-credible/scikit-learn,hrjn/scikit-learn,yanlend/scikit-learn,alexeyum/scikit-learn,mjudsp/Tsallis,Djabbz/scikit-learn,LiaoPan/scikit-learn,Fireblend/scikit-learn,ElDeveloper/scikit-learn,giorgiop/scikit-learn,pianomania/scikit-learn,ngoix/OCRF,cainiaocome/scikit-learn,Obus/scikit-learn,jmetzen/scikit-learn,maheshakya/scikit-learn,hdmetor/scikit-learn,DSLituiev/scikit-learn,fengzhyuan/scikit-learn,NelisVerhoef/scikit-learn,toastedcornflakes/scikit-learn,wzbozon/scikit-learn,rajat1994/scikit-learn,IshankGulati/scikit-learn,nvoron23/scikit-learn,MatthieuBizien/scikit-learn,altairpearl/scikit-learn,yonglehou/scikit-learn,shenzebang/scikit-learn,heli522/scikit-learn,CforED/Machine-Learning,JsNoNo/scikit-learn,eickenberg/scikit-learn,etkirsch/scikit-learn,Clyde-fare/scikit-learn,dsquareindia/scikit-learn,joshloyal/scikit-learn,CforED/Machine-Learning,Lawrence-Liu/scikit-learn,cauchycui/scikit-learn,AIML/scikit-learn,xzh86/scikit-learn,abhishekgahlot/scikit-learn,RachitKansal/scikit-learn,arjoly/scikit-learn,Lawrence-Liu/scikit-learn,altairpearl/scikit-learn,florian-f/sklearn,nrhine1/scikit-learn,appapantula/scikit-learn,betatim/scikit-learn,luo66/scikit-learn,hugobowne/scikit-learn,elkingtonmcb/scikit-learn,xiaoxiamii/scikit-learn,vivekmishra1991/scikit-learn,mxjl620/scikit-learn,ogrisel/scikit-learn,pratapvardhan/scikit-learn,mlyundin/scikit-learn,arabenjamin/scikit-learn,tmhm/scikit-learn,imaculate/scikit-learn,ldirer/scikit-learn,schets/scikit-learn,kashif/scikit-learn,massmutual/scikit-learn,RachitKansal/scikit-learn,scikit-learn/scikit-learn,chrsrds/scikit-learn,henrykironde/scikit-learn,mattilyra/scikit-learn,dsullivan7/scikit-learn,frank-tancf/scikit-learn,fyffyt/scikit-learn,jayflo/scikit-learn,pompiduskus/scikit-learn,jkarnows/scikit-learn,Srisai85/scikit-learn,ilyes14/scikit-learn,shangwuhencc/scikit-learn,Fireblend/scikit-learn,yask123/scikit-learn,ChanderG/scikit-learn,scikit-learn/scikit-learn,Djabbz/scikit-learn,ChanderG/scikit-learn,trankmichael/scikit-learn,schets/scikit-learn,amueller/scikit-learn,RayMick/scikit-learn,michigraber/scikit-learn,spallavolu/scikit-learn,zorroblue/scikit-learn,abhishekkrthakur/scikit-learn,lesteve/scikit-learn,nhejazi/scikit-learn,loli/semisupervisedforests,betatim/scikit-learn,PrashntS/scikit-learn,gclenaghan/scikit-learn,amueller/scikit-learn,fabianp/scikit-learn,kaichogami/scikit-learn,ankurankan/scikit-learn,pkruskal/scikit-learn,RachitKansal/scikit-learn,bikong2/scikit-learn,lin-credible/scikit-learn,shikhardb/scikit-learn,themrmax/scikit-learn,Obus/scikit-learn,ashhher3/scikit-learn,glennq/scikit-learn,arabenjamin/scikit-learn,marcocaccin/scikit-learn,larsmans/scikit-learn,xuewei4d/scikit-learn,xubenben/scikit-learn,yask123/scikit-learn,rohanp/scikit-learn,icdishb/scikit-learn,sanketloke/scikit-learn,smartscheduling/scikit-learn-categorical-tree,mlyundin/scikit-learn,loli/semisupervisedforests,costypetrisor/scikit-learn,poryfly/scikit-learn,idlead/scikit-learn,schets/scikit-learn,beepee14/scikit-learn,btabibian/scikit-learn,herilalaina/scikit-learn,jakobworldpeace/scikit-learn,shenzebang/scikit-learn,belltailjp/scikit-learn,jorik041/scikit-learn,Clyde-fare/scikit-learn,MartinDelzant/scikit-learn,yyjiang/scikit-learn,vortex-ape/scikit-learn,altairpearl/scikit-learn,dingocuster/scikit-learn,jakobworldpeace/scikit-learn,michigraber/scikit-learn,JeanKossaifi/scikit-learn,bthirion/scikit-learn,appapantula/scikit-learn,OshynSong/scikit-learn,ssaeger/scikit-learn,imaculate/scikit-learn,huzq/scikit-learn,nmayorov/scikit-learn,michigraber/scikit-learn,robin-lai/scikit-learn,nelson-liu/scikit-learn,joshloyal/scikit-learn,cybernet14/scikit-learn,jjx02230808/project0223,ky822/scikit-learn,PatrickOReilly/scikit-learn,ZenDevelopmentSystems/scikit-learn,mattgiguere/scikit-learn,hsuantien/scikit-learn,devanshdalal/scikit-learn,zorroblue/scikit-learn,thientu/scikit-learn,ishanic/scikit-learn,ElDeveloper/scikit-learn,jorge2703/scikit-learn,PatrickChrist/scikit-learn,lucidfrontier45/scikit-learn,hsiaoyi0504/scikit-learn,vshtanko/scikit-learn,glouppe/scikit-learn,ankurankan/scikit-learn,jereze/scikit-learn,kylerbrown/scikit-learn,joshloyal/scikit-learn,arahuja/scikit-learn,aminert/scikit-learn,zuku1985/scikit-learn,treycausey/scikit-learn,ky822/scikit-learn,sanketloke/scikit-learn,gclenaghan/scikit-learn,AnasGhrab/scikit-learn,mwv/scikit-learn,IssamLaradji/scikit-learn,rsivapr/scikit-learn,shusenl/scikit-learn,Nyker510/scikit-learn,elkingtonmcb/scikit-learn,LohithBlaze/scikit-learn,fredhusser/scikit-learn,vermouthmjl/scikit-learn,Fireblend/scikit-learn,procoder317/scikit-learn,zuku1985/scikit-learn,tosolveit/scikit-learn,meduz/scikit-learn,dsullivan7/scikit-learn,olologin/scikit-learn,depet/scikit-learn,vivekmishra1991/scikit-learn,lin-credible/scikit-learn,qifeigit/scikit-learn,trankmichael/scikit-learn,zihua/scikit-learn,466152112/scikit-learn,kevin-intel/scikit-learn,liberatorqjw/scikit-learn,nrhine1/scikit-learn,spallavolu/scikit-learn,Djabbz/scikit-learn,xavierwu/scikit-learn,cainiaocome/scikit-learn,davidgbe/scikit-learn,waterponey/scikit-learn,kjung/scikit-learn,raghavrv/scikit-learn,jlegendary/scikit-learn,yonglehou/scikit-learn,stylianos-kampakis/scikit-learn,tmhm/scikit-learn,stylianos-kampakis/scikit-learn,alexeyum/scikit-learn,olologin/scikit-learn,andaag/scikit-learn,abhishekgahlot/scikit-learn,AIML/scikit-learn,kmike/scikit-learn,khkaminska/scikit-learn,rvraghav93/scikit-learn,fzalkow/scikit-learn,AlexandreAbraham/scikit-learn,MohammedWasim/scikit-learn,kaichogami/scikit-learn,Aasmi/scikit-learn,rexshihaoren/scikit-learn,untom/scikit-learn,fredhusser/scikit-learn,ilo10/scikit-learn,vortex-ape/scikit-learn,Barmaley-exe/scikit-learn,CVML/scikit-learn,ycaihua/scikit-learn,frank-tancf/scikit-learn,0asa/scikit-learn,hrjn/scikit-learn,NunoEdgarGub1/scikit-learn,pianomania/scikit-learn,trankmichael/scikit-learn,jmschrei/scikit-learn,kevin-intel/scikit-learn,olologin/scikit-learn,jayflo/scikit-learn,jayflo/scikit-learn,appapantula/scikit-learn,rajat1994/scikit-learn,equialgo/scikit-learn,tdhopper/scikit-learn,ChanderG/scikit-learn,pythonvietnam/scikit-learn,krez13/scikit-learn,gclenaghan/scikit-learn,giorgiop/scikit-learn,murali-munna/scikit-learn,maheshakya/scikit-learn,kagayakidan/scikit-learn,krez13/scikit-learn,thientu/scikit-learn,Fireblend/scikit-learn,nvoron23/scikit-learn,mwv/scikit-learn,florian-f/sklearn,3manuek/scikit-learn,huobaowangxi/scikit-learn,bnaul/scikit-learn,henridwyer/scikit-learn,TomDLT/scikit-learn,sarahgrogan/scikit-learn,costypetrisor/scikit-learn,abhishekgahlot/scikit-learn,wazeerzulfikar/scikit-learn,jmschrei/scikit-learn,LiaoPan/scikit-learn,betatim/scikit-learn,Akshay0724/scikit-learn,Titan-C/scikit-learn,loli/sklearn-ensembletrees,kylerbrown/scikit-learn,belltailjp/scikit-learn,sinhrks/scikit-learn,shikhardb/scikit-learn,abimannans/scikit-learn,OshynSong/scikit-learn,trungnt13/scikit-learn,zuku1985/scikit-learn,zhenv5/scikit-learn,toastedcornflakes/scikit-learn,belltailjp/scikit-learn,wazeerzulfikar/scikit-learn,jakirkham/scikit-learn,harshaneelhg/scikit-learn,0asa/scikit-learn,aewhatley/scikit-learn,vigilv/scikit-learn,ycaihua/scikit-learn,mhdella/scikit-learn,anntzer/scikit-learn,zaxtax/scikit-learn,ChanderG/scikit-learn,Adai0808/scikit-learn,ashhher3/scikit-learn,vshtanko/scikit-learn,fabioticconi/scikit-learn,xiaoxiamii/scikit-learn,xuewei4d/scikit-learn,Lawrence-Liu/scikit-learn,spallavolu/scikit-learn,MatthieuBizien/scikit-learn,qifeigit/scikit-learn,nrhine1/scikit-learn,xzh86/scikit-learn,nomadcube/scikit-learn,AnasGhrab/scikit-learn,eickenberg/scikit-learn,r-mart/scikit-learn,davidgbe/scikit-learn,ky822/scikit-learn,vivekmishra1991/scikit-learn,vigilv/scikit-learn,OshynSong/scikit-learn,betatim/scikit-learn,ZENGXH/scikit-learn,potash/scikit-learn,stylianos-kampakis/scikit-learn,sonnyhu/scikit-learn,IndraVikas/scikit-learn,samzhang111/scikit-learn,larsmans/scikit-learn,ivannz/scikit-learn,jzt5132/scikit-learn,tmhm/scikit-learn,manashmndl/scikit-learn,jkarnows/scikit-learn,NelisVerhoef/scikit-learn,fyffyt/scikit-learn,cl4rke/scikit-learn,andaag/scikit-learn,xubenben/scikit-learn,UNR-AERIAL/scikit-learn,MartinSavc/scikit-learn,etkirsch/scikit-learn,maheshakya/scikit-learn,deepesch/scikit-learn,vermouthmjl/scikit-learn,ky822/scikit-learn,siutanwong/scikit-learn,samuel1208/scikit-learn,massmutual/scikit-learn,PatrickChrist/scikit-learn,MartinDelzant/scikit-learn,Jimmy-Morzaria/scikit-learn,elkingtonmcb/scikit-learn,plissonf/scikit-learn,nikitasingh981/scikit-learn,loli/sklearn-ensembletrees,3manuek/scikit-learn,depet/scikit-learn,rvraghav93/scikit-learn,jaidevd/scikit-learn,ningchi/scikit-learn,hitszxp/scikit-learn,billy-inn/scikit-learn,ssaeger/scikit-learn,nikitasingh981/scikit-learn,MartinSavc/scikit-learn,kjung/scikit-learn,ltiao/scikit-learn,herilalaina/scikit-learn,anirudhjayaraman/scikit-learn,macks22/scikit-learn,thilbern/scikit-learn,Sentient07/scikit-learn,pythonvietnam/scikit-learn,harshaneelhg/scikit-learn,sinhrks/scikit-learn,chrisburr/scikit-learn,djgagne/scikit-learn,akionakamura/scikit-learn,MartinDelzant/scikit-learn,zorojean/scikit-learn,lbishal/scikit-learn,RayMick/scikit-learn,raghavrv/scikit-learn,glennq/scikit-learn,imaculate/scikit-learn,ilo10/scikit-learn,bhargav/scikit-learn,vigilv/scikit-learn,jmschrei/scikit-learn,carrillo/scikit-learn,marcocaccin/scikit-learn,chrsrds/scikit-learn,mjudsp/Tsallis,qifeigit/scikit-learn,rexshihaoren/scikit-learn,MohammedWasim/scikit-learn,mugizico/scikit-learn,yonglehou/scikit-learn,RPGOne/scikit-learn,nelson-liu/scikit-learn,petosegan/scikit-learn,liyu1990/sklearn,q1ang/scikit-learn,espg/scikit-learn,liberatorqjw/scikit-learn,ZenDevelopmentSystems/scikit-learn,Jimmy-Morzaria/scikit-learn,shikhardb/scikit-learn,MechCoder/scikit-learn,jzt5132/scikit-learn,ngoix/OCRF,lucidfrontier45/scikit-learn,waterponey/scikit-learn,fabianp/scikit-learn,akionakamura/scikit-learn,macks22/scikit-learn,bigdataelephants/scikit-learn,PatrickOReilly/scikit-learn,jereze/scikit-learn,UNR-AERIAL/scikit-learn,vigilv/scikit-learn,Garrett-R/scikit-learn,wzbozon/scikit-learn,manashmndl/scikit-learn,mattgiguere/scikit-learn,abimannans/scikit-learn,pompiduskus/scikit-learn,victorbergelin/scikit-learn,madjelan/scikit-learn,heli522/scikit-learn,YinongLong/scikit-learn,pypot/scikit-learn,jmetzen/scikit-learn,chrisburr/scikit-learn,kaichogami/scikit-learn,dingocuster/scikit-learn,yunfeilu/scikit-learn,rrohan/scikit-learn,kagayakidan/scikit-learn,luo66/scikit-learn,Titan-C/scikit-learn,ogrisel/scikit-learn,joernhees/scikit-learn,cauchycui/scikit-learn,xavierwu/scikit-learn,JPFrancoia/scikit-learn,Garrett-R/scikit-learn,JeanKossaifi/scikit-learn,Srisai85/scikit-learn,carrillo/scikit-learn,sinhrks/scikit-learn,mblondel/scikit-learn,BiaDarkia/scikit-learn,sanketloke/scikit-learn,idlead/scikit-learn,eg-zhang/scikit-learn,B3AU/waveTree,xuewei4d/scikit-learn,mehdidc/scikit-learn,mhue/scikit-learn,akionakamura/scikit-learn,henrykironde/scikit-learn,madjelan/scikit-learn,arahuja/scikit-learn,jpautom/scikit-learn,aflaxman/scikit-learn,evgchz/scikit-learn,costypetrisor/scikit-learn,aabadie/scikit-learn,henridwyer/scikit-learn,hlin117/scikit-learn,depet/scikit-learn,sinhrks/scikit-learn,chrsrds/scikit-learn,voxlol/scikit-learn,ldirer/scikit-learn,mayblue9/scikit-learn,shangwuhencc/scikit-learn,ZENGXH/scikit-learn,Sentient07/scikit-learn,Titan-C/scikit-learn,evgchz/scikit-learn,mhue/scikit-learn,murali-munna/scikit-learn,moutai/scikit-learn,luo66/scikit-learn,mhdella/scikit-learn,nesterione/scikit-learn,cl4rke/scikit-learn,moutai/scikit-learn,HolgerPeters/scikit-learn,ankurankan/scikit-learn,simon-pepin/scikit-learn,LohithBlaze/scikit-learn,xiaoxiamii/scikit-learn,frank-tancf/scikit-learn,nhejazi/scikit-learn,rrohan/scikit-learn,alvarofierroclavero/scikit-learn,treycausey/scikit-learn,Windy-Ground/scikit-learn,kashif/scikit-learn,trungnt13/scikit-learn,aewhatley/scikit-learn,jakobworldpeace/scikit-learn,deepesch/scikit-learn,mfjb/scikit-learn,jorge2703/scikit-learn,petosegan/scikit-learn,fbagirov/scikit-learn,pypot/scikit-learn,ClimbsRocks/scikit-learn,khkaminska/scikit-learn,bthirion/scikit-learn,NelisVerhoef/scikit-learn,hugobowne/scikit-learn,Vimos/scikit-learn,trungnt13/scikit-learn,PatrickChrist/scikit-learn,jseabold/scikit-learn,xwolf12/scikit-learn,bhargav/scikit-learn,wzbozon/scikit-learn,davidgbe/scikit-learn,nrhine1/scikit-learn,glemaitre/scikit-learn,tawsifkhan/scikit-learn,evgchz/scikit-learn,frank-tancf/scikit-learn,treycausey/scikit-learn,billy-inn/scikit-learn,glouppe/scikit-learn,sonnyhu/scikit-learn,Achuth17/scikit-learn,clemkoa/scikit-learn,AlexRobson/scikit-learn,loli/sklearn-ensembletrees,shyamalschandra/scikit-learn,saiwing-yeung/scikit-learn,cainiaocome/scikit-learn,khkaminska/scikit-learn,AlexanderFabisch/scikit-learn,hitszxp/scikit-learn,nikitasingh981/scikit-learn,massmutual/scikit-learn,ilo10/scikit-learn,adamgreenhall/scikit-learn,madjelan/scikit-learn,hlin117/scikit-learn,xzh86/scikit-learn,MechCoder/scikit-learn,JeanKossaifi/scikit-learn,vinayak-mehta/scikit-learn,liyu1990/sklearn,sonnyhu/scikit-learn,jblackburne/scikit-learn,jm-begon/scikit-learn,vibhorag/scikit-learn,MechCoder/scikit-learn,jakirkham/scikit-learn,eickenberg/scikit-learn,wlamond/scikit-learn,robin-lai/scikit-learn,schets/scikit-learn,shenzebang/scikit-learn,3manuek/scikit-learn,ningchi/scikit-learn,espg/scikit-learn,eg-zhang/scikit-learn,depet/scikit-learn,rahuldhote/scikit-learn,siutanwong/scikit-learn,vinayak-mehta/scikit-learn,themrmax/scikit-learn,nmayorov/scikit-learn,andrewnc/scikit-learn,hugobowne/scikit-learn,abhishekkrthakur/scikit-learn,fzalkow/scikit-learn,xwolf12/scikit-learn,clemkoa/scikit-learn,justincassidy/scikit-learn,robin-lai/scikit-learn,arahuja/scikit-learn,maheshakya/scikit-learn,zorroblue/scikit-learn,marcocaccin/scikit-learn,icdishb/scikit-learn,q1ang/scikit-learn,jmschrei/scikit-learn,shahankhatch/scikit-learn,JosmanPS/scikit-learn,wazeerzulfikar/scikit-learn,ankurankan/scikit-learn,procoder317/scikit-learn,huzq/scikit-learn,0x0all/scikit-learn,q1ang/scikit-learn,fyffyt/scikit-learn,mayblue9/scikit-learn,jzt5132/scikit-learn,zaxtax/scikit-learn,icdishb/scikit-learn,sgenoud/scikit-learn,eickenberg/scikit-learn,mxjl620/scikit-learn,manhhomienbienthuy/scikit-learn,jakobworldpeace/scikit-learn,vybstat/scikit-learn,zihua/scikit-learn,aflaxman/scikit-learn,gotomypc/scikit-learn,Lawrence-Liu/scikit-learn,wanggang3333/scikit-learn,JPFrancoia/scikit-learn,NunoEdgarGub1/scikit-learn,pythonvietnam/scikit-learn,mrshu/scikit-learn,MohammedWasim/scikit-learn,vybstat/scikit-learn,xavierwu/scikit-learn,henridwyer/scikit-learn,pv/scikit-learn,joernhees/scikit-learn,devanshdalal/scikit-learn,tomlof/scikit-learn,bikong2/scikit-learn,nmayorov/scikit-learn,zorroblue/scikit-learn,hitszxp/scikit-learn,rrohan/scikit-learn,mblondel/scikit-learn,walterreade/scikit-learn,bthirion/scikit-learn,AlexandreAbraham/scikit-learn,IndraVikas/scikit-learn,pratapvardhan/scikit-learn,pv/scikit-learn,Adai0808/scikit-learn,iismd17/scikit-learn,shangwuhencc/scikit-learn,pv/scikit-learn,Myasuka/scikit-learn,ngoix/OCRF,sgenoud/scikit-learn,f3r/scikit-learn,chrsrds/scikit-learn,justincassidy/scikit-learn,potash/scikit-learn,untom/scikit-learn,jorge2703/scikit-learn,alvarofierroclavero/scikit-learn,thilbern/scikit-learn,anurag313/scikit-learn,jpautom/scikit-learn,aminert/scikit-learn,clemkoa/scikit-learn,glennq/scikit-learn,sonnyhu/scikit-learn,mojoboss/scikit-learn,quheng/scikit-learn,mjudsp/Tsallis,zhenv5/scikit-learn,Achuth17/scikit-learn,lbishal/scikit-learn,jaidevd/scikit-learn,jlegendary/scikit-learn,simon-pepin/scikit-learn,Myasuka/scikit-learn,UNR-AERIAL/scikit-learn,Aasmi/scikit-learn,ClimbsRocks/scikit-learn,icdishb/scikit-learn,terkkila/scikit-learn,nelson-liu/scikit-learn,moutai/scikit-learn,mhdella/scikit-learn,nhejazi/scikit-learn,rvraghav93/scikit-learn,OshynSong/scikit-learn,vermouthmjl/scikit-learn,hrjn/scikit-learn,michigraber/scikit-learn,ishanic/scikit-learn,lesteve/scikit-learn,Clyde-fare/scikit-learn,massmutual/scikit-learn,rahul-c1/scikit-learn,aetilley/scikit-learn,mxjl620/scikit-learn,mattilyra/scikit-learn,lbishal/scikit-learn,r-mart/scikit-learn,alexeyum/scikit-learn,rsivapr/scikit-learn,olologin/scikit-learn,ephes/scikit-learn,AlexRobson/scikit-learn,adamgreenhall/scikit-learn,billy-inn/scikit-learn,IshankGulati/scikit-learn,mikebenfield/scikit-learn,ssaeger/scikit-learn,B3AU/waveTree,rajat1994/scikit-learn,vinayak-mehta/scikit-learn,nesterione/scikit-learn,manashmndl/scikit-learn,DonBeo/scikit-learn,arjoly/scikit-learn,mfjb/scikit-learn,CVML/scikit-learn,Sentient07/scikit-learn,raghavrv/scikit-learn,yyjiang/scikit-learn,siutanwong/scikit-learn,henrykironde/scikit-learn,alexsavio/scikit-learn,Garrett-R/scikit-learn,russel1237/scikit-learn,B3AU/waveTree,scikit-learn/scikit-learn,ycaihua/scikit-learn,andrewnc/scikit-learn,xubenben/scikit-learn,0x0all/scikit-learn,mattilyra/scikit-learn,YinongLong/scikit-learn,jmetzen/scikit-learn,rsivapr/scikit-learn,btabibian/scikit-learn,abhishekgahlot/scikit-learn,rahuldhote/scikit-learn,JsNoNo/scikit-learn,MohammedWasim/scikit-learn,nelson-liu/scikit-learn,arjoly/scikit-learn,RPGOne/scikit-learn,macks22/scikit-learn,RayMick/scikit-learn,RomainBrault/scikit-learn,HolgerPeters/scikit-learn,equialgo/scikit-learn,treycausey/scikit-learn,mfjb/scikit-learn,phdowling/scikit-learn,ivannz/scikit-learn,bnaul/scikit-learn,Achuth17/scikit-learn,AlexanderFabisch/scikit-learn,jblackburne/scikit-learn,sumspr/scikit-learn,jorik041/scikit-learn,liangz0707/scikit-learn,Djabbz/scikit-learn,eg-zhang/scikit-learn,sgenoud/scikit-learn,djgagne/scikit-learn,xyguo/scikit-learn,plissonf/scikit-learn,andrewnc/scikit-learn,sergeyf/scikit-learn,ndingwall/scikit-learn,tmhm/scikit-learn,mikebenfield/scikit-learn,terkkila/scikit-learn,bthirion/scikit-learn,jm-begon/scikit-learn,adamgreenhall/scikit-learn,quheng/scikit-learn,rishikksh20/scikit-learn,Aasmi/scikit-learn,CVML/scikit-learn,yunfeilu/scikit-learn,terkkila/scikit-learn,mehdidc/scikit-learn,tomlof/scikit-learn,heli522/scikit-learn,hitszxp/scikit-learn,xyguo/scikit-learn,cwu2011/scikit-learn,untom/scikit-learn,ngoix/OCRF,pkruskal/scikit-learn,TomDLT/scikit-learn,dhruv13J/scikit-learn,loli/sklearn-ensembletrees,tdhopper/scikit-learn,phdowling/scikit-learn,iismd17/scikit-learn,ilyes14/scikit-learn,Barmaley-exe/scikit-learn,mjgrav2001/scikit-learn,zhenv5/scikit-learn,poryfly/scikit-learn,zihua/scikit-learn,hsuantien/scikit-learn,ZenDevelopmentSystems/scikit-learn,samuel1208/scikit-learn,hugobowne/scikit-learn,ahoyosid/scikit-learn,PatrickChrist/scikit-learn,AlexRobson/scikit-learn,loli/semisupervisedforests,Windy-Ground/scikit-learn,shahankhatch/scikit-learn,fbagirov/scikit-learn,rishikksh20/scikit-learn,deepesch/scikit-learn,beepee14/scikit-learn,aminert/scikit-learn,JeanKossaifi/scikit-learn,manhhomienbienthuy/scikit-learn,jseabold/scikit-learn,mojoboss/scikit-learn,victorbergelin/scikit-learn,sgenoud/scikit-learn,Jimmy-Morzaria/scikit-learn,nomadcube/scikit-learn,wlamond/scikit-learn,liangz0707/scikit-learn,cwu2011/scikit-learn,shyamalschandra/scikit-learn,Vimos/scikit-learn,tosolveit/scikit-learn,RachitKansal/scikit-learn,MartinSavc/scikit-learn,RayMick/scikit-learn,IssamLaradji/scikit-learn,vibhorag/scikit-learn,samzhang111/scikit-learn,tomlof/scikit-learn,akionakamura/scikit-learn,tdhopper/scikit-learn,ndingwall/scikit-learn,dsquareindia/scikit-learn,jorge2703/scikit-learn,fzalkow/scikit-learn,ClimbsRocks/scikit-learn,hainm/scikit-learn,saiwing-yeung/scikit-learn,ngoix/OCRF,Adai0808/scikit-learn,Jimmy-Morzaria/scikit-learn,poryfly/scikit-learn,shyamalschandra/scikit-learn,kagayakidan/scikit-learn,Barmaley-exe/scikit-learn,mrshu/scikit-learn,0x0all/scikit-learn,f3r/scikit-learn,chrisburr/scikit-learn,3manuek/scikit-learn,BiaDarkia/scikit-learn,hlin117/scikit-learn,zaxtax/scikit-learn,btabibian/scikit-learn,bhargav/scikit-learn,RPGOne/scikit-learn,smartscheduling/scikit-learn-categorical-tree,djgagne/scikit-learn,beepee14/scikit-learn,samzhang111/scikit-learn,dsullivan7/scikit-learn,voxlol/scikit-learn,ChanChiChoi/scikit-learn,btabibian/scikit-learn,khkaminska/scikit-learn,pnedunuri/scikit-learn,elkingtonmcb/scikit-learn,ogrisel/scikit-learn,AlexRobson/scikit-learn,amueller/scikit-learn,ChanChiChoi/scikit-learn,beepee14/scikit-learn,fengzhyuan/scikit-learn,zaxtax/scikit-learn,yanlend/scikit-learn,poryfly/scikit-learn,PrashntS/scikit-learn,ElDeveloper/scikit-learn,zuku1985/scikit-learn,imaculate/scikit-learn,DSLituiev/scikit-learn,mjgrav2001/scikit-learn,liyu1990/sklearn,AIML/scikit-learn,fredhusser/scikit-learn,lesteve/scikit-learn,ankurankan/scikit-learn,RomainBrault/scikit-learn,dhruv13J/scikit-learn,justincassidy/scikit-learn,henrykironde/scikit-learn,iismd17/scikit-learn,devanshdalal/scikit-learn,vybstat/scikit-learn,pypot/scikit-learn,hlin117/scikit-learn,sergeyf/scikit-learn,vibhorag/scikit-learn,anntzer/scikit-learn,fabioticconi/scikit-learn,jayflo/scikit-learn,xwolf12/scikit-learn,mjudsp/Tsallis,robbymeals/scikit-learn,sgenoud/scikit-learn,DonBeo/scikit-learn,rohanp/scikit-learn,ogrisel/scikit-learn,ivannz/scikit-learn,loli/semisupervisedforests,abimannans/scikit-learn,robin-lai/scikit-learn,robbymeals/scikit-learn,voxlol/scikit-learn,kmike/scikit-learn,bhargav/scikit-learn,h2educ/scikit-learn,ElDeveloper/scikit-learn,cwu2011/scikit-learn,rahul-c1/scikit-learn,aflaxman/scikit-learn,pianomania/scikit-learn,q1ang/scikit-learn,Clyde-fare/scikit-learn,theoryno3/scikit-learn,JPFrancoia/scikit-learn,zihua/scikit-learn,adamgreenhall/scikit-learn,PatrickOReilly/scikit-learn,pnedunuri/scikit-learn,anurag313/scikit-learn,giorgiop/scikit-learn,Nyker510/scikit-learn,giorgiop/scikit-learn,theoryno3/scikit-learn,ahoyosid/scikit-learn,dhruv13J/scikit-learn,thilbern/scikit-learn,devanshdalal/scikit-learn,alvarofierroclavero/scikit-learn,jaidevd/scikit-learn,idlead/scikit-learn,liyu1990/sklearn,roxyboy/scikit-learn,h2educ/scikit-learn,meduz/scikit-learn,petosegan/scikit-learn,ningchi/scikit-learn,jakirkham/scikit-learn,lazywei/scikit-learn,murali-munna/scikit-learn,kashif/scikit-learn,r-mart/scikit-learn,LohithBlaze/scikit-learn,DSLituiev/scikit-learn,ishanic/scikit-learn,mjudsp/Tsallis,f3r/scikit-learn,anirudhjayaraman/scikit-learn,hsuantien/scikit-learn,chrisburr/scikit-learn,aetilley/scikit-learn,costypetrisor/scikit-learn,YinongLong/scikit-learn,ltiao/scikit-learn,Obus/scikit-learn,UNR-AERIAL/scikit-learn,jaidevd/scikit-learn,amueller/scikit-learn,vybstat/scikit-learn,shikhardb/scikit-learn,manashmndl/scikit-learn,florian-f/sklearn,andaag/scikit-learn,IshankGulati/scikit-learn,clemkoa/scikit-learn,Nyker510/scikit-learn,nomadcube/scikit-learn,hainm/scikit-learn,maheshakya/scikit-learn,samuel1208/scikit-learn,Adai0808/scikit-learn,lenovor/scikit-learn,mxjl620/scikit-learn,ZENGXH/scikit-learn,rajat1994/scikit-learn,mattilyra/scikit-learn,glemaitre/scikit-learn,lucidfrontier45/scikit-learn,tdhopper/scikit-learn,lazywei/scikit-learn,manhhomienbienthuy/scikit-learn,jjx02230808/project0223,rahul-c1/scikit-learn,ChanChiChoi/scikit-learn,liberatorqjw/scikit-learn,shusenl/scikit-learn,cauchycui/scikit-learn,ephes/scikit-learn,Barmaley-exe/scikit-learn,kjung/scikit-learn,etkirsch/scikit-learn,sumspr/scikit-learn,procoder317/scikit-learn,jzt5132/scikit-learn,hdmetor/scikit-learn,larsmans/scikit-learn,CforED/Machine-Learning,fbagirov/scikit-learn,mehdidc/scikit-learn,trungnt13/scikit-learn,abhishekgahlot/scikit-learn,JPFrancoia/scikit-learn,fbagirov/scikit-learn,huzq/scikit-learn,gotomypc/scikit-learn,mugizico/scikit-learn,mjgrav2001/scikit-learn,yanlend/scikit-learn,carrillo/scikit-learn,ZENGXH/scikit-learn,DSLituiev/scikit-learn,Akshay0724/scikit-learn,jkarnows/scikit-learn,roxyboy/scikit-learn,jm-begon/scikit-learn,sumspr/scikit-learn,carrillo/scikit-learn,iismd17/scikit-learn,0asa/scikit-learn,ivannz/scikit-learn,theoryno3/scikit-learn,Windy-Ground/scikit-learn,466152112/scikit-learn,henridwyer/scikit-learn,Aasmi/scikit-learn,mwv/scikit-learn,mayblue9/scikit-learn,yunfeilu/scikit-learn,kmike/scikit-learn,ycaihua/scikit-learn,equialgo/scikit-learn,davidgbe/scikit-learn,ClimbsRocks/scikit-learn,cybernet14/scikit-learn,belltailjp/scikit-learn,alexsavio/scikit-learn,rohanp/scikit-learn,rvraghav93/scikit-learn,AIML/scikit-learn,thientu/scikit-learn,wlamond/scikit-learn,robbymeals/scikit-learn,pompiduskus/scikit-learn,HolgerPeters/scikit-learn,samuel1208/scikit-learn,jorik041/scikit-learn,aflaxman/scikit-learn,scikit-learn/scikit-learn,loli/sklearn-ensembletrees,Obus/scikit-learn,rahuldhote/scikit-learn,fengzhyuan/scikit-learn,fyffyt/scikit-learn,dsquareindia/scikit-learn,nesterione/scikit-learn,wanggang3333/scikit-learn,plissonf/scikit-learn,mugizico/scikit-learn,r-mart/scikit-learn,kylerbrown/scikit-learn,altairpearl/scikit-learn,sanketloke/scikit-learn,Myasuka/scikit-learn | examples/ensemble/plot_gradient_boosting_quantile.py | examples/ensemble/plot_gradient_boosting_quantile.py | """
=====================================================
Prediction Intervals for Gradient Boosting Regression
=====================================================
This example shows how quantile regression can be used
to create prediction intervals.
"""
import numpy as np
from sklearn.ensemble import GradientBoostingRegressor
from matplotlib import pyplot as pl
np.random.seed(1)
def f(x):
"""The function to predict."""
return x * np.sin(x)
#----------------------------------------------------------------------
# First the noiseless case
X = np.atleast_2d(np.random.uniform(0, 10.0, size=100)).T
X = X.astype(np.float32)
# Observations
y = f(X).ravel()
dy = 1.5 + 1.0 * np.random.random(y.shape)
noise = np.random.normal(0, dy)
y += noise
y = y.astype(np.float32)
# Mesh the input space for evaluations of the real function, the prediction and
# its MSE
xx = np.atleast_2d(np.linspace(0, 10, 1000)).T
xx = xx.astype(np.float32)
alpha = 0.95
clf = GradientBoostingRegressor(loss='quantile', alpha=alpha,
n_estimators=250, max_depth=3,
learn_rate=.1, min_samples_leaf=9,
min_samples_split=9)
clf.fit(X, y)
# Make the prediction on the meshed x-axis
y_upper = clf.predict(xx)
clf.set_params(alpha=1.0 - alpha)
clf.fit(X, y)
# Make the prediction on the meshed x-axis
y_lower = clf.predict(xx)
clf.set_params(loss='ls')
clf.fit(X, y)
# Make the prediction on the meshed x-axis
y_pred = clf.predict(xx)
# Plot the function, the prediction and the 95% confidence interval based on
# the MSE
fig = pl.figure()
pl.plot(xx, f(xx), 'g:', label=u'$f(x) = x\,\sin(x)$')
pl.plot(X, y, 'b.', markersize=10, label=u'Observations')
pl.plot(xx, y_pred, 'r-', label=u'Prediction')
pl.plot(xx, y_upper, 'k-')
pl.plot(xx, y_lower, 'k-')
pl.fill(np.concatenate([xx, xx[::-1]]),
np.concatenate([y_upper, y_lower[::-1]]),
alpha=.5, fc='b', ec='None', label='95% prediction interval')
pl.xlabel('$x$')
pl.ylabel('$f(x)$')
pl.ylim(-10, 20)
pl.legend(loc='upper left')
pl.show()
| """
=====================================================
Prediction Intervals for Gradient Boosting Regression
=====================================================
This example shows how quantile regression can be used
to create prediction intervals.
"""
import numpy as np
from sklearn.ensemble import GradientBoostingRegressor
from matplotlib import pyplot as pl
np.random.seed(1)
def f(x):
"""The function to predict."""
return x * np.sin(x)
#----------------------------------------------------------------------
# First the noiseless case
X = np.atleast_2d(np.random.uniform(0, 10.0, size=100)).T
# Observations
y = f(X).ravel()
dy = 1.5 + 1.0 * np.random.random(y.shape)
noise = np.random.normal(0, dy)
y += noise
# Mesh the input space for evaluations of the real function, the prediction and
# its MSE
xx = np.atleast_2d(np.linspace(0, 10, 1000)).T
alpha = 0.95
clf = GradientBoostingRegressor(loss='quantile', alpha=alpha,
n_estimators=250, max_depth=3,
learn_rate=.1, min_samples_leaf=9,
min_samples_split=9)
clf.fit(X, y)
# Make the prediction on the meshed x-axis
y_upper = clf.predict(xx)
clf.set_params(alpha=1.0 - alpha)
clf.fit(X, y)
# Make the prediction on the meshed x-axis
y_lower = clf.predict(xx)
clf.set_params(loss='ls')
clf.fit(X, y)
# Make the prediction on the meshed x-axis
y_pred = clf.predict(xx)
# Plot the function, the prediction and the 95% confidence interval based on
# the MSE
fig = pl.figure()
pl.plot(xx, f(xx), 'g:', label=u'$f(x) = x\,\sin(x)$')
pl.plot(X, y, 'b.', markersize=10, label=u'Observations')
pl.plot(xx, y_pred, 'r-', label=u'Prediction')
pl.plot(xx, y_upper, 'k-')
pl.plot(xx, y_lower, 'k-')
pl.fill(np.concatenate([xx, xx[::-1]]),
np.concatenate([y_upper, y_lower[::-1]]),
alpha=.5, fc='b', ec='None', label='95% prediction interval')
pl.xlabel('$x$')
pl.ylabel('$f(x)$')
pl.ylim(-10, 20)
pl.legend(loc='upper left')
pl.show()
| bsd-3-clause | Python |
42347f1106f91cb68fe914c9ea05e0c24f46ee08 | add missing migration for langpack model | mozilla/zamboni,ddurst/zamboni,luckylavish/zamboni,luckylavish/zamboni,luckylavish/zamboni,tsl143/zamboni,diox/zamboni,washort/zamboni,elysium001/zamboni,ingenioustechie/zamboni,tsl143/zamboni,ingenioustechie/zamboni,mozilla/zamboni,ingenioustechie/zamboni,ddurst/zamboni,mozilla/zamboni,elysium001/zamboni,shahbaz17/zamboni,ddurst/zamboni,shahbaz17/zamboni,jasonthomas/zamboni,ddurst/zamboni,washort/zamboni,shahbaz17/zamboni,jasonthomas/zamboni,tsl143/zamboni,luckylavish/zamboni,mozilla/zamboni,washort/zamboni,washort/zamboni,tsl143/zamboni,diox/zamboni,elysium001/zamboni,shahbaz17/zamboni,diox/zamboni,jasonthomas/zamboni,ingenioustechie/zamboni,elysium001/zamboni,diox/zamboni,jasonthomas/zamboni | mkt/langpacks/migrations/0002_auto_20150824_0820.py | mkt/langpacks/migrations/0002_auto_20150824_0820.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('langpacks', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='langpack',
name='language',
field=models.CharField(default=b'en-US', max_length=10, choices=[(b'el', '\u0395\u03bb\u03bb\u03b7\u03bd\u03b9\u03ba\u03ac'), (b'xh', 'isiXhosa'), (b'bn-BD', '\u09ac\u09be\u0982\u09b2\u09be (\u09ac\u09be\u0982\u09b2\u09be\u09a6\u09c7\u09b6)'), (b'af', 'Afrikaans'), (b'ee', 'E\u028be'), (b'bn-IN', '\u09ac\u09be\u0982\u09b2\u09be (\u09ad\u09be\u09b0\u09a4)'), (b'ca', 'Catal\xe0'), (b'en-US', 'English (US)'), (b'it', 'Italiano'), (b'cs', '\u010ce\u0161tina'), (b'cy', 'Cymraeg'), (b'ar', '\u0639\u0631\u0628\u064a'), (b'pt-BR', 'Portugu\xeas (do\xa0Brasil)'), (b'zu', 'isiZulu'), (b'eu', 'Euskara'), (b'sv-SE', 'Svenska'), (b'id', 'Bahasa Indonesia'), (b'es', 'Espa\xf1ol'), (b'en-GB', 'English (British)'), (b'ru', '\u0420\u0443\u0441\u0441\u043a\u0438\u0439'), (b'nl', 'Nederlands'), (b'zh-TW', '\u6b63\u9ad4\u4e2d\u6587 (\u7e41\u9ad4)'), (b'tr', 'T\xfcrk\xe7e'), (b'ga-IE', 'Gaeilge'), (b'zh-CN', '\u4e2d\u6587 (\u7b80\u4f53)'), (b'ig', 'Igbo'), (b'ro', 'rom\xe2n\u0103'), (b'dsb', 'Dolnoserb\u0161\u0107ina'), (b'pl', 'Polski'), (b'hsb', 'Hornjoserbsce'), (b'fr', 'Fran\xe7ais'), (b'bg', '\u0411\u044a\u043b\u0433\u0430\u0440\u0441\u043a\u0438'), (b'yo', 'Yor\xf9b\xe1'), (b'wo', 'Wolof'), (b'de', 'Deutsch'), (b'da', 'Dansk'), (b'ff', 'Pulaar-Fulfulde'), (b'nb-NO', 'Norsk bokm\xe5l'), (b'ha', 'Hausa'), (b'ja', '\u65e5\u672c\u8a9e'), (b'sr', '\u0421\u0440\u043f\u0441\u043a\u0438'), (b'sq', 'Shqip'), (b'ko', '\ud55c\uad6d\uc5b4'), (b'sk', 'sloven\u010dina'), (b'uk', '\u0423\u043a\u0440\u0430\u0457\u043d\u0441\u044c\u043a\u0430'), (b'sr-Latn', 'Srpski'), (b'hu', 'magyar'), (b'sw', 'Kiswahili')]),
preserve_default=True,
),
]
| bsd-3-clause | Python | |
296ab4d527558a77cb635ea0754078c66bbd5462 | bump version to 1.0.0 pre | flatfox-ag/django-admin-sso,flatfox-ag/django-admin-sso,frog32/django-admin-sso,matthiask/django-admin-sso,allink/django-admin-sso,matthiask/django-admin-sso,diegobz/django-admin-sso,frog32/django-admin-sso,diegobz/django-admin-sso,allink/django-admin-sso | admin_sso/__init__.py | admin_sso/__init__.py | VERSION = (1, 0, 0, 'pre')
__version__ = '.'.join(map(str, VERSION))
# Do not use Django settings at module level as recommended
try:
from django.utils.functional import LazyObject
except ImportError:
pass
else:
class LazySettings(LazyObject):
def _setup(self):
from admin_sso import default_settings
self._wrapped = Settings(default_settings)
class Settings(object):
def __init__(self, settings_module):
for setting in dir(settings_module):
if setting == setting.upper():
setattr(self, setting, getattr(settings_module, setting))
settings = LazySettings()
| VERSION = (0, 1, 3,)
__version__ = '.'.join(map(str, VERSION))
# Do not use Django settings at module level as recommended
try:
from django.utils.functional import LazyObject
except ImportError:
pass
else:
class LazySettings(LazyObject):
def _setup(self):
from admin_sso import default_settings
self._wrapped = Settings(default_settings)
class Settings(object):
def __init__(self, settings_module):
for setting in dir(settings_module):
if setting == setting.upper():
setattr(self, setting, getattr(settings_module, setting))
settings = LazySettings()
| bsd-3-clause | Python |
394d8b1ebd14a0ad566e67a352085968126701c4 | Add test for object source marked down | t-miyamae/teuthology,ivotron/teuthology,zhouyuan/teuthology,dmick/teuthology,robbat2/teuthology,dreamhost/teuthology,yghannam/teuthology,t-miyamae/teuthology,dreamhost/teuthology,robbat2/teuthology,michaelsevilla/teuthology,ivotron/teuthology,tchaikov/teuthology,tchaikov/teuthology,michaelsevilla/teuthology,ceph/teuthology,caibo2014/teuthology,SUSE/teuthology,ktdreyer/teuthology,zhouyuan/teuthology,dmick/teuthology,ktdreyer/teuthology,yghannam/teuthology,ceph/teuthology,dmick/teuthology,caibo2014/teuthology,SUSE/teuthology,SUSE/teuthology | teuthology/task/object_source_down.py | teuthology/task/object_source_down.py | import logging
import ceph_manager
from teuthology import misc as teuthology
import time
log = logging.getLogger(__name__)
def rados(remote, cmd):
log.info("rados %s" % ' '.join(cmd))
pre = [
'LD_LIBRARY_PATH=/tmp/cephtest/binary/usr/local/lib',
'/tmp/cephtest/enable-coredump',
'/tmp/cephtest/binary/usr/local/bin/ceph-coverage',
'/tmp/cephtest/archive/coverage',
'/tmp/cephtest/binary/usr/local/bin/rados',
'-c', '/tmp/cephtest/ceph.conf',
];
pre.extend(cmd)
proc = remote.run(
args=pre,
check_status=False
)
return proc.exitstatus
def task(ctx, config):
"""
Test handling of object location going down
"""
if config is None:
config = {}
assert isinstance(config, dict), \
'lost_unfound task only accepts a dict for configuration'
first_mon = teuthology.get_first_mon(ctx, config)
(mon,) = ctx.cluster.only(first_mon).remotes.iterkeys()
manager = ceph_manager.CephManager(
mon,
ctx=ctx,
logger=log.getChild('ceph_manager'),
)
while manager.get_osd_status()['up'] < 3:
manager.sleep(10)
manager.wait_for_clean()
# something that is always there
dummyfile = '/etc/fstab'
# take 0, 1 out
manager.mark_out_osd(0)
manager.mark_out_osd(1)
manager.wait_for_clean()
# delay recovery, and make the pg log very long (to prevent backfill)
manager.raw_cluster_cmd(
'tell', 'osd.0',
'injectargs',
'--osd-recovery-delay-start 10000 --osd-min-pg-log-entries 100000000'
)
# delay recovery, and make the pg log very long (to prevent backfill)
manager.raw_cluster_cmd(
'tell', 'osd.1',
'injectargs',
'--osd-recovery-delay-start 10000 --osd-min-pg-log-entries 100000000'
)
# delay recovery, and make the pg log very long (to prevent backfill)
manager.raw_cluster_cmd(
'tell', 'osd.2',
'injectargs',
'--osd-recovery-delay-start 10000 --osd-min-pg-log-entries 100000000'
)
# delay recovery, and make the pg log very long (to prevent backfill)
manager.raw_cluster_cmd(
'tell', 'osd.3',
'injectargs',
'--osd-recovery-delay-start 10000 --osd-min-pg-log-entries 100000000'
)
# kludge to make sure they get a map
rados(mon, ['-p', 'data', 'put', 'dummy', dummyfile])
# create old objects
for f in range(1, 10):
rados(mon, ['-p', 'data', 'put', 'existing_%d' % f, dummyfile])
manager.mark_out_osd(3)
manager.wait_till_active()
manager.mark_in_osd(0)
manager.wait_till_active()
manager.raw_cluster_cmd('tell', 'osd.2', 'flush_pg_stats')
manager.raw_cluster_cmd('tell', 'osd.0', 'flush_pg_stats')
manager.mark_out_osd(2)
manager.wait_till_active()
# bring up 1
manager.mark_in_osd(1)
manager.wait_till_active()
manager.raw_cluster_cmd('tell', 'osd.1', 'flush_pg_stats')
manager.raw_cluster_cmd('tell', 'osd.0', 'flush_pg_stats')
log.info("Getting unfound objects")
unfound = manager.get_num_unfound_objects()
assert not unfound
manager.kill_osd(2)
manager.mark_down_osd(2)
manager.kill_osd(3)
manager.mark_down_osd(3)
manager.raw_cluster_cmd('tell', 'osd.1', 'flush_pg_stats')
manager.raw_cluster_cmd('tell', 'osd.0', 'flush_pg_stats')
log.info("Getting unfound objects")
unfound = manager.get_num_unfound_objects()
assert unfound
| mit | Python | |
aa02a1ff3722b4ccb644daf4f5d57a0e01f5e9e2 | add make Data | mcvidomi/poim2motif | makeData.py | makeData.py | import random
import cPickle as pickle
from numpy import concatenate, ones, array, shape, size, zeros, exp, arange
from numpy import concatenate,ones,array,shape,size,zeros,exp
import numpy as np
import copy
import math
import pdb
dna = ['A', 'C', 'G', 'T']
def simulate_sequence(length):
sequence = ''
for i in range(length):
sequence += random.choice(dna) #zufaellig Element aus dna anhaengen
return sequence
def mutate_motif(motiv,probmut):
dna = ['A', 'C', 'G', 'T']
mutmot = ""
for i in range(len(motiv)):
rnd = random.random()
if (rnd <= probmut):
dnashort =['A', 'C', 'G', 'T']
dnashort.pop(dnashort.index(motiv[i]))
mutmot += random.choice(dnashort)
else:
mutmot +=motiv[i]
return mutmot
def gensequences2(tally,positives,sequenceno,prob,motif,mu):
sequences = []
ml = len(motif)
for i in range(sequenceno):
aa = simulate_sequence(tally)
if i < positives:
mut=mutate_motif(motif,prob)
aa = aa.replace(aa[mu:mu + ml], mut)
sequences.append(aa)
return sequences
def gensequences(tally,positives,sequenceno,prob,motif,mu):
sequences = []
y=np.ones(sequenceno)*(-1)
ml = len(motif)
for i in range(sequenceno):
aa = simulate_sequence(tally)
if i < positives:
y[i]=1
mut=mutate_motif(motif,prob)
aa = aa.replace(aa[mu:mu + ml], mut)
sequences.append(aa)
return sequences,y
def non_polymorphic_loci(x):
counter = np.zeros((4,len(x[0])))
for i in range(len(x)):
for j in range(len(x[0])):
if x[i][j] == 'A':
counter[0,j]=counter[0,j]+1
elif x[i][j] == 'C':
counter[1,j]=counter[1,j]+1
elif x[i][j] == 'G':
counter[2,j]=counter[2,j]+1
else:
counter[3,j]=counter[3,j]+1
counter=counter/len(x)
dna = ['A', 'C', 'G', 'T']
for i in range(len(counter[0])):
for j in range(4):
if counter[j,i] == 1.0:
print "nucleotid " , dna[j]," position", str(i)
return counter
def extractRealData(datapath,savepath,lines):
#path = "/home/mvidovic/POIMall/data/real/"
#filename = "human_acceptor_splice_data.txt"
data =file(datapath).readlines()[:lines]
labels = []
x = []
cn=0
for i in range(len(data)):
labels.append(int(data[i][0:2]))
x.append(data[i][3:-1])
pdb.set_trace()
if int(data[i][0:2])==1:
cn=cn+1
print "numper of positive labels: " , cn
if savepath !="":
fobj = open(savepath,'wb')
pickle.dump([x,labels],fobj)
fobj.close()
return x,labels
#tally = 30 #length of training sequences
#sequenceno = 50 #number of training sequences
#positives = 12 #positives training sequences
#motiv = "CCTATA"
#mu = 10
#poim_degree = 3
#idxprob=np.arange(0.0,1.01,1.1)
#dna = ['A', 'C', 'G', 'T']
def compute_data(seq_length,seq_no,pos_no,motifs,mu):
a=1
#fm_train_dna = gensequences(tally,positives,sequenceno,0,motiv,mu)
| mit | Python | |
d32058b6a6d3db162b79628cadc9fa061672a297 | Add django sync db after migrate if using south | jocke-l/blues,andreif/blues,5monkeys/blues,5monkeys/blues,gelbander/blues,5monkeys/blues,andreif/blues,Sportamore/blues,gelbander/blues,jocke-l/blues,chrippa/blues,adisbladis/blues,gelbander/blues,adisbladis/blues,chrippa/blues,jocke-l/blues,Sportamore/blues,chrippa/blues,Sportamore/blues,andreif/blues,adisbladis/blues | blues/django.py | blues/django.py | from fabric.context_managers import cd
from fabric.decorators import task, runs_once
from fabric.operations import prompt
from refabric.api import run, info
from refabric.context_managers import shell_env
from refabric.contrib import blueprints
from . import virtualenv
from .application.project import virtualenv_path, python_path, sudo_project
blueprint = blueprints.get(__name__)
@task
def manage(cmd=''):
if not cmd:
cmd = prompt('Enter django management command:')
with sudo_project(), cd(python_path()), virtualenv.activate(virtualenv_path()), shell_env():
return run('python manage.py {cmd}'.format(cmd=cmd))
@task
def deploy():
"""
Migrate database and collect static files
"""
# Migrate database
migrate()
# Collect static files
collectstatic()
@task
def version():
if not hasattr(version, 'version'):
v = manage('--version')
version.version = tuple(map(int, v.split('\n')[0].strip().split('.')))
return version.version
@task
@runs_once
def migrate():
info('Migrate database')
if version() >= (1, 7):
manage('migrate')
elif blueprint.get('use_south', True):
manage('migrate --merge')
manage('syncdb --noinput') # TODO: Remove?
else:
manage('syncdb --noinput')
@task
@runs_once
def collectstatic():
info('Collect static files')
manage('collectstatic --noinput')
| from fabric.context_managers import cd
from fabric.decorators import task, runs_once
from fabric.operations import prompt
from refabric.api import run, info
from refabric.context_managers import shell_env
from refabric.contrib import blueprints
from . import virtualenv
from .application.project import virtualenv_path, python_path, sudo_project
blueprint = blueprints.get(__name__)
@task
def manage(cmd=''):
if not cmd:
cmd = prompt('Enter django management command:')
with sudo_project(), cd(python_path()), virtualenv.activate(virtualenv_path()), shell_env():
return run('python manage.py {cmd}'.format(cmd=cmd))
@task
def deploy():
"""
Migrate database and collect static files
"""
# Migrate database
migrate()
# Collect static files
collectstatic()
@task
def version():
if not hasattr(version, 'version'):
v = manage('--version')
version.version = tuple(map(int, v.split('\n')[0].strip().split('.')))
return version.version
@task
@runs_once
def migrate():
info('Migrate database')
if version() >= (1, 7):
manage('migrate')
elif blueprint.get('use_south', True):
manage('migrate --merge')
else:
manage('syncdb --noinput')
@task
@runs_once
def collectstatic():
info('Collect static files')
manage('collectstatic --noinput')
| mit | Python |
60fa72f1d6c21eda46124db02f1907046f8e3cb4 | Add boot_switch.py | dhylands/upy-examples,dhylands/upy-examples | boot_switch.py | boot_switch.py | import pyb
sw = pyb.Switch()
# 1 - Red
# 2 - Green
# 3 - Yellow
# 4 - Blue
pyb.LED(2).off() # Turn Greem LED off since normal boot turns it on
led = pyb.LED(1)
leds = (pyb.LED(4), pyb.LED(3), pyb.LED(2))
try:
import boot_mode
persisted_mode = boot_mode.mode
mode = boot_mode.mode
except:
persisted_mode = -1
mode = 0
def mode_led(mode):
for led in leds:
led.off()
if mode >= 0:
leds[mode].on()
for i in range(10):
led.on()
pyb.delay(100)
led.off()
pyb.delay(100)
if sw():
while True:
mode_led(mode)
pyb.delay(500)
if not sw():
mode_led(-1)
break
mode = (mode + 1) % 3
break
for i in range(3):
mode_led(mode)
pyb.delay(100)
mode_led(-1)
pyb.delay(100)
usb_mode = ('CDC+MSC', 'CDC+HID', 'CDC')[mode]
if mode != persisted_mode:
with open('/flash/boot_mode.py', 'w') as f:
f.write('mode = %d\n' % mode)
f.write("usb_mode = '%s'\n" % usb_mode)
pyb.sync()
pyb.usb_mode(usb_mode)
# Note: prints which occur before the call pyb.usb_mode() will not show on the
# USB serial port.
print('usb_mode = %s' % usb_mode)
# Cleanup the namespace (since anything from boot.py shows up in the REPL)
del led, leds, mode, usb_mode, persisted_mode, i, sw, mode_led, boot_mode
| mit | Python | |
2bb33986002a1e1e5152b311662a200db717aa78 | change school object | rohitdatta/pepper,rohitdatta/pepper,rohitdatta/pepper | migrations/versions/2ae4701a60b4_.py | migrations/versions/2ae4701a60b4_.py | """empty message
Revision ID: 2ae4701a60b4
Revises: 013b5c571b68
Create Date: 2016-09-11 15:43:53.495932
"""
# revision identifiers, used by Alembic.
revision = '2ae4701a60b4'
down_revision = '013b5c571b68'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('users', sa.Column('school_id', sa.Integer(), nullable=True))
op.add_column('users', sa.Column('school_name', sa.String(length=255), nullable=True))
op.drop_column('users', 'school')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('users', sa.Column('school', sa.VARCHAR(length=255), autoincrement=False, nullable=True))
op.drop_column('users', 'school_name')
op.drop_column('users', 'school_id')
### end Alembic commands ###
| agpl-3.0 | Python | |
855057d96d0335005c172c244de5f20d27e54907 | Create check_purefa_occpy.py | PureStorage-OpenConnect/python-scripts | check_purefa_occpy.py | check_purefa_occpy.py | #!/usr/bin/env python
# Copyright (c) 2018 Pure Storage, Inc.
#
## Overview
#
# This short Nagios/Icinga plugin code shows how to build a simple plugin to monitor Pure Storage FlashArrays.
# The Pure Storage Python REST Client is used to query the FlashArray occupancy indicators.
# Plugin leverages the remarkably helpful nagiosplugin library by Christian Kauhaus.
#
## Installation
#
# The scripo should be copied to the Nagios plugins directory on the machine hosting the Nagios server or the NRPE
# for example the /usr/lib/nagios/plugins folder.
# Change the execution rights of the program to allow the execution to 'all' (usually chmod 0755).
#
## Dependencies
#
# nagiosplugin helper Python class library for Nagios plugins by Christian Kauhaus (http://pythonhosted.org/nagiosplugin)
# purestorage Pure Storage Python REST Client (https://github.com/purestorage/rest-client)
__author__ = "Eugenio Grosso"
__copyright__ = "Copyright 2018, Pure Storage Inc."
__credits__ = "Christian Kauhaus"
__license__ = "Apache v2.0"
__version__ = "1.0"
__maintainer__ = "Eugenio Grosso"
__email__ = "geneg@purestorage.com"
__status__ = "Production"
"""Pure Storage FlashArray occupancy status
Nagios plugin to retrieve the overall occupancy from a Pure Storage FlashArray.
Storage occupancy indicators are collected from the target FA using the REST call.
"""
import argparse
import logging
import nagiosplugin
import purestorage
import urllib3
_log = logging.getLogger('nagiosplugin')
class PureFAoccpy(nagiosplugin.Resource):
"""Pure Storage FlashArray overall occupancy
Calculates the overall FA storage occupancy
"""
def __init__(self, endpoint, apitoken):
self.endpoint = endpoint
self.apitoken = apitoken
def get_perf(self):
"""Gets performance counters from flasharray."""
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
fa = purestorage.FlashArray(self.endpoint, api_token=self.apitoken)
fainfo = fa.get(space=True)[0]
fa.invalidate_cookie()
return(fainfo)
def probe(self):
fainfo = self.get_perf()
_log.debug('FA REST call returned "%s" ', fainfo)
occupancy = round(float(fainfo.get('total'))/float(fainfo.get('capacity')), 2) * 100
metric = nagiosplugin.Metric('occupancy', occupancy, '%', min=0),
return metric
def parse_args():
argp = argparse.ArgumentParser()
argp.add_argument('endpoint', help="FA hostname or ip address")
argp.add_argument('apitoken', help="FA api_token")
argp.add_argument('-w', '--warning', metavar='RANGE', default='',
help='return warning if occupancy is outside RANGE')
argp.add_argument('-c', '--critical', metavar='RANGE', default='',
help='return critical if occupancy is outside RANGE')
argp.add_argument('-v', '--verbose', action='count', default=0,
help='increase output verbosity (use up to 3 times)')
argp.add_argument('-t', '--timeout', default=30,
help='abort execution after TIMEOUT seconds')
return argp.parse_args()
@nagiosplugin.guarded
def main():
args = parse_args()
check = nagiosplugin.Check( PureFAoccpy(args.endpoint, args.apitoken) )
check.add(nagiosplugin.ScalarContext('occupancy', args.warning, args.critical))
check.main(args.verbose, args.timeout)
if __name__ == '__main__':
main()
| apache-2.0 | Python | |
f2693a72f99a2d414b7ee76d0f542535aa0d9ec0 | Remove debug prints | buhe/judge,buhe/judge,buhe/judge,DMOJ/judge,DMOJ/judge,buhe/judge,buhe/judge,buhe/judge,DMOJ/judge | checkers/rstripped.py | checkers/rstripped.py | def check(process_output, judge_output, **kwargs):
from itertools import izip
process_lines = process_output.split('\n')
judge_lines = judge_output.split('\n')
if 'filter_new_line' in kwargs:
process_lines = filter(None, process_lines)
judge_lines = filter(None, judge_lines)
if len(process_lines) != len(judge_lines):
return False
for process_line, judge_line in izip(process_lines, judge_lines):
if process_line.rstrip() != judge_line.rstrip():
return False
return True
| def check(process_output, judge_output, **kwargs):
from itertools import izip
process_lines = process_output.split('\n')
judge_lines = judge_output.split('\n')
if 'filter_new_line' in kwargs:
process_lines = filter(None, process_lines)
judge_lines = filter(None, judge_lines)
print process_lines
print judge_lines
if len(process_lines) != len(judge_lines):
print "fail 1"
return False
for process_line, judge_line in izip(process_lines, judge_lines):
if process_line.rstrip() != judge_line.rstrip():
print "fail 2"
return False
return True
| agpl-3.0 | Python |
9d24ced3ea0cc010bd210643ee57895624892ee2 | Create power10.py | rdpapworth/powerof10 | power10.py | power10.py | import pandas as pd
import requests
import time
import numpy as np
import itertools
from lxml import html
from lxml import etree
def get_p10_results(events, ages, sexes, years):
"""
Return a dictionary combining results returned for a given combination of
url parameters.
"""
# xpath patterns
header_xpath = u'////tr[@class="rankinglistheadings"]//td'
ranking_xpath = u'////*[@id="pnlMainRankings"]//table[1]//tr[(td[position() = 1 and normalize-space(.)!=""]) and (@class="rlr" or @class="rlra")]//td'
athlete_xpath = ranking_xpath + '[7]//a[1]//@href'
perfomance_xpath = ranking_xpath + '[14]//a[1]//@href'
# url for power of 10
protocol = "http://"
domain = "www.thepowerof10.info"
path = "/rankings/rankinglist.aspx"
# construct a list containing every combination of parameters
param_combos = list(itertools.product(events, ages, sexes, years))
#rank_request_urls = [build_p10_url(event,age,sex,year) for event,age,sex,year in param_combos]
header_list = []
ranking_list = []
athlete_list = []
performance_list = []
num_headers = None
all_ranking_lists = []
all_athlete_lists = []
all_performance_lists = []
result_dfs = []
for event, age_group, sex, year in param_combos: #rank_request_urls:
#params = (event, age, sex, year)
print (event, age_group, sex, year)
query = "?event=%s&agegroup=%s&sex=%s&year=%s" % (event, age_group, sex, year)
rank_request_url = protocol + domain + path + query
rank_page = requests.get(rank_request_url)
tree = html.fromstring(rank_page.content)
print rank_request_url
# Get the header on first request
if not header_list:
header_list = tree.xpath(header_xpath)
header_list = [h.text_content().lower() for h in header_list]
print header_list
# Add in some missing headers
header_list[2] = 'indoor'
header_list[3] = 'wind'
header_list[5] = 'pb_status'
header_list[7] = 'age_group'
header_list[-1] = 'performance_id'
num_headers = len(header_list)
ranking_list = [etree.tostring(elem, method='text', encoding='utf-8').strip()\
for elem in tree.xpath(ranking_xpath)]
athlete_list = [elem.split("=")[1] \
for elem in tree.xpath(athlete_xpath)]
performance_list = [elem.split("=")[1] \
for elem in tree.xpath(perfomance_xpath)]
assert len(ranking_list) / len(header_list) == len(athlete_list), \
"mismatch between size of athlete and ranking list"
# be nice and wait for a few seconds before making a second request
time.sleep(5)
# Could create and reshape at same time if I could figure out the
# correct reshape syntax.
ranking_np = np.array(ranking_list)
ranking_np.shape = (len(athlete_list), num_headers)
df = pd.DataFrame(data=ranking_np, columns=header_list)
df['rank'] = df['rank'].astype(int, raise_on_error=False)
df['perf'] = df['perf'].astype(float, raise_on_error=False)
df['wind'] = df['wind'].astype(float, raise_on_error=False)
df['dob'] = pd.to_datetime(df['dob'], format='%d.%m.%y', errors='coerce')
df['dop'] = pd.to_datetime(df['date'], format='%d %b %y', errors='coerce')
df['event'] = event
df['age_group2'] = age_group
df['sex'] = sex
df['year'] = year
df['athlete_id'] = athlete_list
df['performance_id'] = performance_list
result_dfs.append(df)
return pd.concat(result_dfs, ignore_index=True)
| mit | Python | |
143bd8066ed53f7a1f70664f89dfd7323aba8e57 | Create 07.py | ezralalonde/cloaked-octo-sansa | 02/qu/07.py | 02/qu/07.py | # Define a procedure, is_friend, that takes
# a string as its input, and returns a
# Boolean indicating if the input string
# is the name of a friend. Assume
# I am friends with everyone whose name
# starts with either 'D' or 'N', but no one
# else. You do not need to check for
# lower case 'd' or 'n'
def is_friend(name):
if name[0] == 'D':
return True
if name[0] == 'N':
return True
else:
return False
#print is_friend('Diane')
#>>> True
#print is_friend('Ned')
#>>> True
#print is_friend('Moe')
#>>> False
| bsd-2-clause | Python | |
f7d15618c661f1e7f555ce9d9a12fdbc851e76c9 | fix handling of version | Calysto/octave_kernel,Calysto/octave_kernel | octave_kernel/_version.py | octave_kernel/_version.py | __version__ = '0.33.1'
| bsd-3-clause | Python | |
b7b088fc8e46376c8cb4608738c9dffcdb7d5dec | Add a test to verify filedes.subprocess.Popen()'s close_fds | fmoo/python-filedes,fmoo/python-filedes | tests/subprocess.py | tests/subprocess.py | from __future__ import absolute_import
from filedes.test.base import BaseFDTestCase
from filedes.subprocess import Popen
from filedes import get_open_fds
from subprocess import PIPE, STDOUT
import filedes
class SubprocessTests(BaseFDTestCase):
def testPopenCloseFds(self):
r, w = filedes.pipe()
try:
# Create a subprocess that prints and blocks waiting for input
p = Popen("echo ok; read foo", shell=True,
stdin=PIPE, stdout=PIPE, stderr=STDOUT,
close_fds=True)
try:
# Wait for the process to let us know it's alive
ready = p.stdout.read(3)
self.assertEquals(ready, "ok\n")
# Get the list of FDs of the remote process
remote_fds = get_open_fds(p.pid)
# Make sure the read fd persisted, but the write fd was not.
self.assertNotIn(r, remote_fds)
self.assertNotIn(w, remote_fds)
# Now send some output to the remote process to unblock it
p.stdin.write("ok\n")
p.stdin.flush()
# Wait for it to shutdown
self.assertEquals(p.wait(), 0)
finally:
# Popen does not close PIPE fds on process shutdown
# automatically, even if there's no data in it. Since the
# exception context is propagated to the test cases' tearDown,
# the popen's pipes will show up as a leak
del p
finally:
r.close()
w.close()
| isc | Python | |
c2945bc741cea8213926e935d71d0c6281eadbd6 | reduce orbits to delays and notes | adrn/GalaxySynth | synthetic/orbitreducer.py | synthetic/orbitreducer.py | # coding: utf-8
""" Turn a collection of orbits into something we can make into music. """
from __future__ import division, print_function
__author__ = "adrn <adrn@astro.columbia.edu>"
# Third-party
import numpy as np
from scipy.signal import argrelmin
__all__ = ['cyl_orbit_to_events', 'xyz_orbit_to_events']
def quantize(x, nbins, min=None, max=None):
if min is None:
min = x.min()
if max is None:
max = x.max()
if max > min:
q = np.round((x - min) / (max - min) * (nbins-1)).astype(int)
q[x > max] = nbins-1
q[x < min] = 0
else:
max = -max
min = -min
x = -x.copy()
q = np.round((x - min) / (max - min) * (nbins-1)).astype(int)
q[x > max] = nbins-1
q[x < min] = 0
return q
def cyl_orbit_to_events(t, w, midi_pool_hi, midi_pool_lo):
"""
Convert an orbit to MIDI events using cylindrical coordinates and rules.
For cylindrical orbits, crossing the disk midplane (x-y plane) triggers a
high note. Crossing the x-z plane triggers a low note. The pitch of the note
is set by the cylindrical radius at the time of either crossing. Smaller
radius triggers a higher pitch note.
Parameters
----------
t : array_like
w : array_like
midi_pool : array_like
"""
R = np.sqrt(w[:,:,0]**2 + w[:,:,1]**2)
phi = np.arctan2(w[:,:,1], w[:,:,0]) % (2*np.pi)
z = w[:,:,2]
# variable length arrays
phi_cross = np.array([argrelmin(pphi)[0] for pphi in phi.T])
z_cross = np.array([argrelmin(zz**2)[0] for zz in z.T])
# quantize R orbit
nbins_hi = len(midi_pool_hi)
q_R_hi = quantize(R, nbins=nbins_hi, min=R.max(), max=R.min())
nbins_lo = len(midi_pool_lo)
q_R_lo = quantize(R, nbins=nbins_lo, min=R.max(), max=R.min())
delays = []
notes = []
for j in range(w.shape[0]):
_no = []
for i in range(w.shape[1]):
if j in z_cross[i]:
_no.append(midi_pool_hi[q_R_hi[j,i]])
if j in phi_cross[i]:
_no.append(midi_pool_lo[q_R_lo[j,i]])
if len(_no) > 0:
delays.append(t[j])
notes.append(np.unique(_no).tolist())
return delays, notes
def xyz_orbit_to_events(t, w, midi_pool_hi, midi_pool_lo):
"""
Convert an orbit to MIDI events using Cartesian coordinates and rules.
For Cartesian orbits...
Parameters
----------
t : array_like
w : array_like
midi_pool : array_like
"""
x,y,z = w.T
# # variable length arrays
# phi_cross = np.array([argrelmin(pphi)[0] for pphi in phi.T])
# z_cross = np.array([argrelmin(zz**2)[0] for zz in z.T])
# # quantize R orbit
# nbins_hi = len(midi_pool_hi)
# q_R_hi = quantize(R, nbins=nbins_hi, min=R.max(), max=R.min())
# nbins_lo = len(midi_pool_lo)
# q_R_lo = quantize(R, nbins=nbins_lo, min=R.max(), max=R.min())
# delays = []
# notes = []
# for j in range(w.shape[0]):
# _no = []
# for i in range(w.shape[1]):
# if j in z_cross[i]:
# _no.append(midi_pool_hi[q_R_hi[j,i]])
# if j in phi_cross[i]:
# _no.append(midi_pool_lo[q_R_lo[j,i]])
# if len(_no) > 0:
# delays.append(t[j])
# notes.append(np.unique(_no).tolist())
# return delays, notes
| mit | Python | |
be9d5ffc427c7303d6c85a091d2508021cc330dd | Add utility tests | blindstore/blindstore-old-scarab | tests/test_utils.py | tests/test_utils.py | import numpy as np
import scarab
from nose.tools import *
from common.utils import *
def test_binary():
a = binary(1, size=5)
assert_true(np.all(a == [0, 0, 0, 0, 1]))
a = binary(2, size=3)
assert_true(np.all(a == [0, 1, 0]))
def test_encrypt_index():
pk, sk = scarab.generate_pair()
c = encrypt_index(pk, 1, 5)
assert_true(len(c) == 5) | mit | Python | |
32c32d38d1305b92bcda07efaadd3b29dbf4ac31 | add piling-up | EdisonCodeKeeper/hacker-rank,zeyuanxy/hacker-rank,EdisonAlgorithms/HackerRank,EdisonCodeKeeper/hacker-rank,EdisonAlgorithms/HackerRank,zeyuanxy/hacker-rank,EdisonCodeKeeper/hacker-rank,EdisonAlgorithms/HackerRank,zeyuanxy/hacker-rank,EdisonCodeKeeper/hacker-rank,EdisonCodeKeeper/hacker-rank,EdisonAlgorithms/HackerRank,EdisonAlgorithms/HackerRank,EdisonCodeKeeper/hacker-rank,EdisonAlgorithms/HackerRank,zeyuanxy/hacker-rank,zeyuanxy/hacker-rank,zeyuanxy/hacker-rank | python/containers/piling-up/piling-up.py | python/containers/piling-up/piling-up.py | from collections import deque
if __name__ == "__main__":
T = int(raw_input())
for _ in xrange(T):
n = int(raw_input())
sl = deque(map(int, raw_input().split()))
ans = []
while len(sl) > 0:
if sl[0] >= sl[-1]:
ans.append(sl[0])
sl.popleft()
else:
ans.append(sl[-1])
sl.pop()
if all(ans[i] >= ans[i + 1] for i in xrange(len(ans) - 1)):
print 'Yes'
else:
print 'No' | mit | Python | |
5c57882c74cf8dad132b255a285566c7329a1569 | add google reader starred python example | gunderjt/libZotero,gunderjt/libZotero,gunderjt/libZotero,gunderjt/libZotero | pythonexamples/addGoogleReaderStarred.py | pythonexamples/addGoogleReaderStarred.py | #!/usr/bin/python
import sys
sys.path.append('../lib/py') # unnecessary if libZotero is installed separately
import json
import time
import argparse
from libZotero import zotero
parser = argparse.ArgumentParser(description='Add starred items from google reader to your Zotero library.')
parser.add_argument('--libraryID', required=True)
parser.add_argument('--libraryType', required=True)
parser.add_argument('--apiKey', required=True)
parser.add_argument('--starredFile', required=True)
args = parser.parse_args()
print args
zlib = zotero.Library(args.libraryType, args.libraryID, '', args.apiKey)
#create a collection for the items
starredCollection = zlib.createCollection('Google Reader Starred')
if starredCollection.writeFailure != False:
print "Error creating collection"
print starredCollection.writeFailure['code'], ": ", starredCollection.writeFailure['message']
sys.exit(1)
print "New Zotero collection created for google reader starred items with collectionKey ", starredCollection.get('collectionKey')
#read the starred items from the json file and create a Zotero item for each one
starredObject = json.loads(open(args.starredFile).read())
starredReaderItems = starredObject['items']
zItems = []
for readerItem in starredReaderItems:
print "Reader starred item: ", readerItem['title']
item = zlib.getTemplateItem('webpage')
item.set('title', readerItem['title'])
pubtime = time.gmtime(readerItem['published'])
item.set('date', time.strftime("%Y-%m-%d", pubtime))
item.addCreator({'creatorType': 'author', 'name': readerItem['author']})
if 'content' in readerItem and 'content' in readerItem['content']:
item.set('abstractNote', readerItem['content']['content'])
elif 'summary' in readerItem and 'content' in readerItem['summary']:
item.set('abstractNote', readerItem['summary']['content'])
for alt in readerItem['alternate']:
if alt['type'] == "text/html":
item.set('url', alt['href'])
break
item.addToCollection(starredCollection)
zItems.append(item)
#make the request to save the items to the Zotero server
writtenItems = zlib.items.writeItems(zItems)
#returns false if the entire request fails
if writtenItems == False:
print "Error writing items"
lastResponse = zlib._lastResponse
print "Code: {}".format(lastResponse.status_code)
print "Message: {}".format(lastResponse.text)
sys.exit()
#individual items may also fail even if the request goes through, so we can
#check each one for errors
for item in writtenItems:
if item.writeFailure != False:
print "Failed writing item {}\n".format(item.writeFailure['key'])
print "Status code: {}\n".format(item.writeFailure['code'])
print "Message: {}\n\n".format(item.writeFailure['message'])
else:
print "Item successfully created. itemKey: ", item.get('itemKey'), " - ", item.get('title')
| agpl-3.0 | Python | |
2e7252fab4667047c04b540040d5ad2287a73299 | Add management command to import geolocation data | rhertzog/parrainage,rhertzog/parrainage,rhertzog/parrainage | parrainage/app/management/commands/import_geoloc.py | parrainage/app/management/commands/import_geoloc.py | # Copyright 2017 Raphaël Hertzog
#
# This file is subject to the license terms in the LICENSE file found in
# the top-level directory of this distribution.
import argparse
from datetime import datetime
import csv
import logging
import sys
from django.core.management.base import BaseCommand
from django.db import transaction
from parrainage.app.models import Elu
class Command(BaseCommand):
help = 'Import a CSV file with data about mayors'
def add_arguments(self, parser):
parser.add_argument('csvfile', help='Path of the CSV file',
type=argparse.FileType(mode='r', encoding='utf-8'))
@transaction.atomic
def handle(self, *args, **kwargs):
csvfile = csv.DictReader(kwargs['csvfile'], delimiter=';',
restkey='addresses')
for row in csvfile:
done = False
for elu in Elu.objects.filter(city_code=row['city_code']):
elu.city_address = '\n'.join(row.get('addresses', [])) or ''
elu.city_zipcode = row['city_zipcode'] or ''
elu.city_latitude = row['latitude'] or ''
elu.city_longitude = row['longitude'] or ''
elu.save()
done = True
if not done:
sys.stderr.write(
'Unknown city code: {}\n'.format(row['city_code']))
| mit | Python | |
3d7640a014d110f5600dc317b16585874934b3e7 | check updates for amazon linux | site24x7/plugins,site24x7/plugins,site24x7/plugins | check_updates_amazonlinux.py | check_updates_amazonlinux.py | #!/usr/bin/python
import subprocess,sys,json
METRIC_UNITS={'Available_Updates':'count','Security_Updates':'count'}
PLUGIN_VERSION="1"
HEARTBEAT="true"
class datacollector:
def __init__(self):
self.data={}
self.data['plugin_version']=PLUGIN_VERSION
self.data['heartbeat_required']=HEARTBEAT
def metricCollector(self):
try:
self.updates=subprocess.check_output("yum check-update | wc -l",shell=True)
self.security=subprocess.check_output("yum list-security |wc -l",shell=True)
self.data["Available_Updates"]=int(self.updates)
self.data["Security_Updates"]=int(self.security)
except Exception as e:
self.data["status"]=0
self.data["msg"]=str(e)
self.data['units']=METRIC_UNITS
return self.data
if __name__=="__main__":
update=datacollector()
result=update.metricCollector()
print(json.dumps(result,indent=4,sort_keys=True))
| bsd-2-clause | Python | |
73bb34ad6e481f5ebcc3623d7f63af87986d3cc7 | Create new package. (#5642) | LLNL/spack,LLNL/spack,tmerrick1/spack,tmerrick1/spack,tmerrick1/spack,iulian787/spack,iulian787/spack,lgarren/spack,lgarren/spack,LLNL/spack,lgarren/spack,tmerrick1/spack,EmreAtes/spack,skosukhin/spack,iulian787/spack,matthiasdiener/spack,LLNL/spack,lgarren/spack,matthiasdiener/spack,iulian787/spack,krafczyk/spack,skosukhin/spack,krafczyk/spack,mfherbst/spack,krafczyk/spack,skosukhin/spack,krafczyk/spack,LLNL/spack,EmreAtes/spack,matthiasdiener/spack,matthiasdiener/spack,tmerrick1/spack,krafczyk/spack,mfherbst/spack,skosukhin/spack,EmreAtes/spack,skosukhin/spack,EmreAtes/spack,lgarren/spack,mfherbst/spack,mfherbst/spack,EmreAtes/spack,matthiasdiener/spack,iulian787/spack,mfherbst/spack | var/spack/repos/builtin/packages/r-affycomp/package.py | var/spack/repos/builtin/packages/r-affycomp/package.py | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class RAffycomp(RPackage):
"""The package contains functions that can be used to compare
expression measures for Affymetrix Oligonucleotide Arrays."""
homepage = "https://www.bioconductor.org/packages/affycomp/"
url = "https://www.bioconductor.org/packages/release/bioc/src/contrib/affycomp_1.52.0.tar.gz"
version('1.52.0', 'efa67e67701f2083fadbed99bf7d60b9')
depends_on('r-biobase', type=('build', 'run'))
| lgpl-2.1 | Python | |
2ea0f3c172f80e8ca5dc55d1ab6416707d4ba485 | Add motor configuration | labs127/typhoon,hiraq/typhoon,hiraq/typhoon,labs127/typhoon | core/mongo.py | core/mongo.py | from motor.motor_tornado import MotorClient
def mongo_configurations(config):
return MotorClient(config.get('MONGO_URI'))
| bsd-3-clause | Python | |
679b94772232e20095692361a43d48834ed383f3 | Create flight.py | vkmguy/Flights-and-Hotels,VIkramx89/Flights-and-Hotels | flight.py | flight.py | '''
Created on Aug 12, 2015
@author: sadhna01
'''
'''
Created on Aug 12, 2015
@author: sahil.singla01
'''
class Flight:
def __init__(self):
self.__flight_id=None
self.__flight_name=None
self.__source=None
self.__destination=None
self.__departure_time=None
self.__arrival_time=None
self.__duration=0
self.__adult_fare=0
self.__child_fare=0
def get_flight_id(self):
return self.__flight_id
def get_flight_name(self):
return self.__flight_name
def get_source(self):
return self.__source
def get_destination(self):
return self.__destination
def get_departure_time(self):
return self.__departure_time
def get_arrival_time(self):
return self.__arrival_time
def get_duration(self):
return self.__duration
def get_adult_fare(self):
return self.__adult_fare
def get_child_fare(self):
return self.__child_fare
def set_flight_id(self, value):
self.__flight_id = value
def set_flight_name(self, value):
self.__flight_name = value
def set_source(self, value):
self.__source = value
def set_destination(self, value):
self.__destination = value
def set_departure_time(self, value):
self.__departure_time = value
def set_arrival_time(self, value):
self.__arrival_time = value
def set_duration(self, value):
self.__duration = value
def set_adult_fare(self, value):
self.__adult_fare = value
def set_child_fare(self, value):
self.__child_fare = value
| epl-1.0 | Python | |
be4abd8d3b54ab66f89c88e56cb948d5bf5f5725 | Add the static info gatherer | mozilla/stoneridge,mozilla/stoneridge,mozilla/stoneridge,mozilla/stoneridge,mozilla/stoneridge,mozilla/stoneridge,mozilla/stoneridge,mozilla/stoneridge | stoneridge_info_gatherer.py | stoneridge_info_gatherer.py | #!/usr/bin/env python
try:
import configparser
except ImportError:
import ConfigParser as configparser
import json
import os
import platform
import stoneridge
class StoneRidgeInfoGatherer(object):
def run(self):
info_file = os.path.join(stoneridge.bindir, 'application.ini')
cp = configparser.SafeConfigParser()
cp.read([info_file])
build_info = {}
build_info['name'] = cp.get('App', 'Name')
build_info['version'] = cp.get('App', 'Version')
build_info['revision'] = cp.get('App', 'SourceStamp')
build_info['branch'] = ''
build_info['id'] = cp.get('App', 'BuildID')
machine_info = {}
machine_info['name'] = platform.node()
machine_info['os'] = stoneridge.os_name
machine_info['osversion'] = stoneridge.os_version
machine_info['platform'] = platform.machine()
info = {'test_machine':machine_info,
'test_build':build_info,
'testrun':{}}
with file(os.path.join(stoneridge.outdir, 'info.json'), 'w') as f:
json.dump(info, f)
@stoneridge.main
def main():
parser = stoneridge.ArgumentParser()
args = parser.parse_arguments()
info_gatherer = StoneRidgeInfoGatherer()
info_gatherer.run()
| mpl-2.0 | Python | |
daf6ef7990cb56c960f5099dcee5ebc93596dba0 | Add verifier | palcu/rotopcoder,palcu/rotopcoder | coliziune/verifier.py | coliziune/verifier.py | def verify(n, m, b, first, second):
s1, s2 = 0, 0
for i in range(len(first)):
s1 += (int(first[i]) + 1) * b ** (n-i)
s2 += (int(second[i]) + 1) * b ** (n-i)
return s1 % m == s2 % m
with open('coliziune.in') as fin, open('coliziune.out') as fout:
input_lines = fin.readlines()[1:]
output_lines = fout.readlines()
for line_number in range(len(input_lines)):
n, m, b = [int(x) for x in input_lines[line_number].strip().split()]
first, second = output_lines[line_number].strip().split()
if verify(n, m, b, first, second):
print("{0} OK".format(line_number))
else:
print("{0} Wrong".format(line_number))
| mit | Python | |
3dfe72a6da11e8223a66e86fabf67146e1d4cb1f | Add base error type for user code | diyan/falcon_seed | app/common/errors.py | app/common/errors.py | from __future__ import unicode_literals, absolute_import, division
class BaseAppError(Exception):
pass
| mit | Python | |
f8373cefae048b8d60db06d9527c45375d49549e | Add locust test script. | IrimieBogdan/cloud-controller,IrimieBogdan/cloud-controller | testing/cloudControllerLocustTester.py | testing/cloudControllerLocustTester.py | from locust import HttpLocust, TaskSet, task
class WebsiteTasks(TaskSet):
@task
def index(self):
self.client.get("/service")
class WebsiteUser(HttpLocust):
task_set = WebsiteTasks
min_wait = 1000
max_wait = 1000
| apache-2.0 | Python | |
85c6aad62db7c7c5daa47eff871fbd1483c8dff9 | Add a gdb viewer for skbitmap. | google/skia,rubenvb/skia,HalCanary/skia-hc,rubenvb/skia,Hikari-no-Tenshi/android_external_skia,google/skia,HalCanary/skia-hc,rubenvb/skia,rubenvb/skia,Hikari-no-Tenshi/android_external_skia,HalCanary/skia-hc,google/skia,Hikari-no-Tenshi/android_external_skia,rubenvb/skia,Hikari-no-Tenshi/android_external_skia,google/skia,rubenvb/skia,aosp-mirror/platform_external_skia,HalCanary/skia-hc,HalCanary/skia-hc,google/skia,HalCanary/skia-hc,Hikari-no-Tenshi/android_external_skia,aosp-mirror/platform_external_skia,aosp-mirror/platform_external_skia,google/skia,Hikari-no-Tenshi/android_external_skia,HalCanary/skia-hc,aosp-mirror/platform_external_skia,google/skia,aosp-mirror/platform_external_skia,rubenvb/skia,HalCanary/skia-hc,google/skia,rubenvb/skia,google/skia,aosp-mirror/platform_external_skia,aosp-mirror/platform_external_skia,aosp-mirror/platform_external_skia,rubenvb/skia,HalCanary/skia-hc,HalCanary/skia-hc,rubenvb/skia,google/skia,aosp-mirror/platform_external_skia,aosp-mirror/platform_external_skia,Hikari-no-Tenshi/android_external_skia,Hikari-no-Tenshi/android_external_skia | tools/gdb/bitmap.py | tools/gdb/bitmap.py | # Copyright 2017 Google Inc.
#
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Visualize bitmaps in gdb.
(gdb) source <path to this file>
(gdb) sk_bitmap <symbol>
This should pop up a window with the bitmap displayed.
Right clicking should bring up a menu, allowing the
bitmap to be saved to a file.
"""
import gdb
from enum import Enum
try:
from PIL import Image
except ImportError:
import Image
class ColorType(Enum):
unknown = 0
alpha_8 = 1
rgb_565 = 2
argb_4444 = 3
rgba_8888 = 4
bgra_8888 = 5
gray_8 = 6
rgba_F16 = 7
class AlphaType(Enum):
unknown = 0
opaque = 1
premul = 2
unpremul = 3
class sk_bitmap(gdb.Command):
"""Displays the content of an SkBitmap image."""
def __init__(self):
super(sk_bitmap, self).__init__('sk_bitmap',
gdb.COMMAND_SUPPORT,
gdb.COMPLETE_FILENAME)
def invoke(self, arg, from_tty):
frame = gdb.selected_frame()
val = frame.read_var(arg)
if str(val.type.strip_typedefs()) == 'SkBitmap':
pixels = val['fPixels']
row_bytes = val['fRowBytes']
info = val['fInfo']
width = info['fWidth']
height = info['fHeight']
color_type = info['fColorType']
alpha_type = info['fAlphaType']
process = gdb.selected_inferior()
memory_data = process.read_memory(pixels, row_bytes * height)
size = (width, height)
image = None
# See Unpack.c for the values understood after the "raw" parameter.
if color_type == ColorType.bgra_8888.value:
if alpha_type == AlphaType.unpremul.value:
image = Image.frombytes("RGBA", size, memory_data.tobytes(),
"raw", "BGRA", row_bytes, 1)
elif alpha_type == AlphaType.premul.value:
# RGBA instead of RGBa, because Image.show() doesn't work with RGBa.
image = Image.frombytes("RGBA", size, memory_data.tobytes(),
"raw", "BGRa", row_bytes, 1)
if image:
# Fails on premultiplied alpha, it cannot convert to RGB.
image.show()
else:
print ("Need to add support for %s %s." % (
str(ColorType(int(color_type))),
str(AlphaType(int(alpha_type)))
))
sk_bitmap()
| bsd-3-clause | Python | |
5a2394f8445350387adc30dd5bc818971aefc91d | Add work for Exercise 25. | jaredmanning/learning,jaredmanning/learning | lpthw/ex25.py | lpthw/ex25.py | def break_words(stuff):
"""This function will brea up words for us."""
words = stuff.split(' ')
return words
def sort_words(words):
"""Sorts the words."""
return sorted(words)
def print_first_word(words):
"""Prints the first word after popping it off."""
word = words.pop(0)
print word
def print_last_word(words):
"""Prints the last word after popping it off."""
word = words.pop(-1)
print word
def sort_sentence(sentence):
"""Takes in a full senctence and returns the sorted words."""
words = break_words(sentence)
return sort_words(words)
def print_first_and_last(sentence):
"""Prints the first and last words of a sentence."""
words = break_words(sentence)
print_first_word(words)
print_last_word(words)
def print_first_and_last_sorted(sentence):
"""Sorts the words the prints the first and last one."""
words = sort_sentence(sentence)
print_first_word(words)
print_last_word(words)
| mit | Python | |
f8e78cdf12142b214f7f6513dc858abffc133789 | add test suite for issues and bugs | colinhiggs/pyramid-jsonapi,colinhiggs/pyramid-jsonapi | test_project/test_project/test_bugs.py | test_project/test_project/test_bugs.py | import unittest
import transaction
import testing.postgresql
import webtest
import urllib
from pyramid.paster import get_app
from sqlalchemy import create_engine
from .models import (
DBSession,
Base
)
from . import test_data
class TestBugs(unittest.TestCase):
'''Tests for issues.
https://github.com/colinhiggs/pyramid-jsonapi/issues
'''
@classmethod
def setUpClass(cls):
'''Create a test DB and import data.'''
# Create a new database somewhere in /tmp
cls.postgresql = testing.postgresql.Postgresql(port=7654)
cls.engine = create_engine(cls.postgresql.url())
DBSession.configure(bind=cls.engine)
cls.app = get_app('testing.ini')
cls.test_app = webtest.TestApp(cls.app)
@classmethod
def tearDownClass(cls):
'''Throw away test DB.'''
DBSession.close()
cls.postgresql.stop()
def setUp(self):
Base.metadata.create_all(self.engine)
# Add some basic test data.
test_data.add_to_db()
transaction.begin()
def tearDown(self):
transaction.abort()
Base.metadata.drop_all(self.engine)
| agpl-3.0 | Python | |
98a50ad5cbcf6239d9ebcecb13d99e6078c93668 | add plot file | DaMSL/K3,DaMSL/K3 | tools/scripts/mosaic/plots/plot_all.py | tools/scripts/mosaic/plots/plot_all.py | #!/usr/bin/env python
# Plot the output of process-all-latencies.rb
import matplotlib.pyplot as plt
import argparse
import yaml
import os
queries = ['1', '3', '4', '6', '11a', '12', '17']
nodes = [1, 4, 8, 16, 31]
scale_factors = [0.1, 1, 10, 100]
tuple_sizes = {0.1: 8 * 10**5, 1: 8 * 10**6, 10: 8 * 10**7, 100: 8*10**8}
batches = [100, 1000, 10000]
def scalability(args, r_file, workdir):
results = {q:{sf:{nd:[] for nd in nodes} for sf in scale_factors} for q in queries}
# create the graphs for scalability test
for l in r_file:
if not 'time' in l or l[':exp'] != ':scalability':
continue
key = (l[':q'], l[':sf'], l[':nd'])
time_sec = float(l['time']) / 1000
tup_per_sec = tuple_sizes[l[':sf']] / time_sec
gb_per_sec = l[':sf'] / time_sec
results[l[':q']][l[':sf']][l[':nd']].append(tup_per_sec)
results2 = {q:{sf:{nd:None for nd in nodes} for sf in scale_factors} for q in queries}
# average out the results
for k1,v1 in results.iteritems():
for k2,v2 in v1.iteritems():
for k3,v3 in v2.iteritems():
if v3 != []:
results2[k1][k2][k3] = sum(v3)/len(v3)
# dump results
with open(os.path.join(workdir, 'plot_throughput.txt'), 'w') as f:
yaml.dump(results2, f)
out_path = os.path.join(workdir, 'plots')
if not os.path.exists(out_path):
os.mkdir(out_path)
# create plots per query
for q,v1 in results2.iteritems():
# New Figure per query
plt.figure()
f, ax = plt.subplots()
# A line per scale factor
for sf, v2 in v1.iteritems():
plt.plot(nodes, [v2[n] for n in nodes], label="Scale Factor {}".format(sf))
# Labels, etc.
plt.legend(loc='lower right')
plt.title("Query {} Throughput".format(q))
plt.xlabel("Worker Nodes")
plt.ylabel("Tuples/sec")
# Save to file
plt.savefig(os.path.join(out_path, "q{}_throughput.png".format(q)))
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-f', '--results-file', required=True, dest='results_file',
help='Results file from which to read')
parser.add_argument('-e', '--experiment', required=True,
help='Experiment (s, l, m)')
args = parser.parse_args()
if args is None:
parser.print_help()
exit(1)
with open(args.results_file) as f:
results = yaml.load(f)
workdir = os.path.dirname(args.results_file)
print("workdir {}".format(workdir))
tests = results[':tests']
if args.experiment == 's':
scalability(args, results, workdir)
if __name__ == '__main__':
main()
| apache-2.0 | Python | |
650b0db8f27f90d1092ffd4295ec154c33f25cde | test commit | butyesbutno/pyappium | pid.py | pid.py | #!/usr/bin/env python
#coding: utf-8
import jpype,time,os
#开启JVM,且指定jar包位置
jarpath = os.path.join(os.path.abspath('.'), '/work/appiumframework/apps/')
print(jarpath, jpype.getDefaultJVMPath())
jpype.startJVM(jpype.getDefaultJVMPath(), "-ea", "-Djava.ext.dirs=%s" % jarpath)
print("toe he")
#引入java程序中的类.路径应该是项目中的package包路径
javaClass = jpype.JClass('ChromedriverHandler.chromeDriverHandlerThread')
#这一步就是具体执行类中的函数了
print("before started")
javaInstance = javaClass.start()
javaInstance = javaClass.stop()
print("started")
time.sleep(6)
jpype.shutdownJVM()
print("end")
| apache-2.0 | Python | |
abab11646518f78019d44542c277cadfbb354c1a | add computation of temperature within a season | akrherz/iem,akrherz/iem,akrherz/iem,akrherz/iem,akrherz/iem | scripts/feature/temperature_in_season.py | scripts/feature/temperature_in_season.py | import psycopg2
import numpy
import datetime
COOP = psycopg2.connect(database='coop', host='iemdb', user='nobody')
cursor = COOP.cursor()
# 50 to 120
in_summer = numpy.zeros( (70,))
counts = numpy.zeros( (70,))
in_jja = numpy.zeros( (70,))
cofreq = numpy.zeros( (70,))
for year in range(1893,2013):
cursor.execute("""
SELECT extract(doy from day) as doy, high, month from alldata_ia where
station = 'IA2203' and year = %s ORDER by doy
""", (year,))
data = numpy.zeros( (366,), 'f')
for row in cursor:
data[ int(row[0])-1] = row[1]
if row[2] in [6,7,8]:
in_jja[ int(row[1]) - 50 ] += 1.0
# Find hottest period
running = 0
idx0 = None
for i in range(0,366-91):
total = numpy.sum( data[i:i+91] )
if total > running:
running = total
idx0 = i
idx1 = idx0 + 91
for i, high in enumerate(data):
if high < 50:
continue
counts[ high - 50 ] += 1.0
if i >= idx0 and i < idx1:
in_summer[ high - 50 ] += 1.0
ts = datetime.datetime(year, 1, 1) + datetime.timedelta(days=i)
if ts.month in [6,7,8]:
cofreq[ high - 50 ] += 1.0
import matplotlib.pyplot as plt
(fig, ax) = plt.subplots(1,1)
ax.bar(numpy.arange(50,120)-0.4, in_summer / counts * 100., fc='r', ec='r',
label='in 91 warmest days')
ax.plot(numpy.arange(50,120), in_jja / counts * 100.0, lw=3, zorder=2, c='k',
label='During Jun/Jul/Aug')
ax.plot(numpy.arange(50,120), cofreq / counts * 100.0, lw=3, zorder=2, c='b',
label='Both')
ax.legend(loc='best')
ax.set_xlim(49.5, 111)
ax.set_yticks(numpy.arange(0,101,10))
ax.grid(True)
ax.set_ylabel("Percentage of High Temperature Events")
ax.set_xlabel("Daily High Temperature $^{\circ}\mathrm{F}$")
ax.set_title("1893-2012 Des Moines Daily High Temperature\nFrequency of High Temp within period of 91 warmest days")
fig.savefig('test.svg')
import iemplot
iemplot.makefeature('test') | mit | Python | |
6e62c6e65376c890adb4c3f56159ba8cc857d565 | Create new_file.py | kneeks/pull-request-demonstration,rmanzano-sps/pull-request-demonstration,is210-faculty/pull-request-demonstration,rmanzano-sps/pull-request-demonstration,is210-faculty/pull-request-demonstration,kneeks/pull-request-demonstration | new_file.py | new_file.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""A new file"""
print 'New files rule!'
| mpl-2.0 | Python | |
9cd2eb451f14656668953db13acff7911047bf9f | Add a tool to be able to diff sln files | witwall/gyp,witwall/gyp,witwall/gyp,witwall/gyp,witwall/gyp | tools/pretty_sln.py | tools/pretty_sln.py | #!/usr/bin/python2.5
# Copyright 2009 Google Inc.
# All Rights Reserved.
"""Prints the information in a sln file in a diffable way.
It first outputs each projects in alphabetical order with their
dependencies.
Then it outputs a possible build order.
"""
__author__ = 'nsylvain (Nicolas Sylvain)'
import re
import sys
def BuildProject(project, built, projects, deps):
# if all dependencies are done, we can build it, otherwise we try to build the
# dependency.
# This is not infinite-recursion proof.
for dep in deps[project]:
if dep not in built:
BuildProject(dep, built, projects, deps)
print project
built.append(project)
def ParseSolution(solution_file):
# All projects, their clsid and paths.
projects = dict()
# A list of dependencies associated with a project.
dependencies = dict()
# Regular expressions that matches the SLN format.
# The first line of a project definition.
begin_project = re.compile(('^Project\("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942'
'}"\) = "(.*)", "(.*)", "(.*)"$'))
# The last line of a project definition.
end_project = re.compile('^EndProject$')
# The first line of a dependency list.
begin_dep = re.compile('ProjectSection\(ProjectDependencies\) = postProject$')
# The last line of a dependency list.
end_dep = re.compile('EndProjectSection$')
# A line describing a dependency.
dep_line = re.compile(' *({.*}) = ({.*})$')
in_deps = False
solution = open(solution_file)
for line in solution:
results = begin_project.search(line)
if results:
# We remove "_gyp" from the names because it helps to diff them.
current_project = results.group(1).replace('_gyp', '')
projects[current_project] = [results.group(2).replace('_gyp', ''),
results.group(3)]
dependencies[current_project] = []
continue
results = end_project.search(line)
if results:
current_project = None
continue
results = begin_dep.search(line)
if results:
in_deps = True
continue
results = end_dep.search(line)
if results:
in_deps = False
continue
results = dep_line.search(line)
if results and in_deps:
dependencies[current_project].append(results.group(1))
continue
# Change all dependencies clsid to name instead.
for project in dependencies:
# For each dependencies in this project
new_dep_array = []
for dep in dependencies[project]:
# Look for the project name matching this cldis
for project_info in projects:
if projects[project_info][1] == dep:
new_dep_array.append(project_info)
dependencies[project] = sorted(new_dep_array)
return (projects, dependencies)
def PrintDependencies(projects, deps):
print "---------------------------------------"
print "Dependencies for all projects"
print "---------------------------------------"
print "-- --"
for (project, dep_list) in sorted(deps.items()):
print "Project : %s" % project
print "Path : %s" % projects[project][0]
if dep_list:
for dep in dep_list:
print " - %s" % dep
print ""
print "-- --"
def PrintBuildOrder(projects, deps):
print "---------------------------------------"
print "Build order "
print "---------------------------------------"
print "-- --"
built = []
for (project, dep_list) in sorted(deps.items()):
if project not in built:
BuildProject(project, built, projects, deps)
print "-- --"
def main():
# check if we have exactly 1 parameter.
if len(sys.argv) != 2:
print 'Usage: %s "c:\\path\\to\\project.sln"' % sys.argv[0]
return
(projects, deps) = ParseSolution(sys.argv[1])
PrintDependencies(projects, deps)
PrintBuildOrder(projects, deps)
if __name__ == '__main__':
main()
| bsd-3-clause | Python | |
ef56a72b73bd408bb51d235b2274eef8766e0277 | add simple watch_notify stress test | t-miyamae/teuthology,caibo2014/teuthology,dreamhost/teuthology,SUSE/teuthology,michaelsevilla/teuthology,ceph/teuthology,t-miyamae/teuthology,SUSE/teuthology,ceph/teuthology,tchaikov/teuthology,robbat2/teuthology,robbat2/teuthology,ktdreyer/teuthology,SUSE/teuthology,dmick/teuthology,zhouyuan/teuthology,michaelsevilla/teuthology,ivotron/teuthology,tchaikov/teuthology,zhouyuan/teuthology,yghannam/teuthology,ktdreyer/teuthology,dreamhost/teuthology,caibo2014/teuthology,ivotron/teuthology,yghannam/teuthology,dmick/teuthology,dmick/teuthology | teuthology/task/watch_notify_stress.py | teuthology/task/watch_notify_stress.py | import contextlib
import logging
from ..orchestra import run
log = logging.getLogger(__name__)
@contextlib.contextmanager
def task(ctx, config):
"""
Run test_stress_watch
The config should be as follows:
test_stress_watch:
clients: [client list]
example:
tasks:
- ceph:
- test_stress_watch:
clients: [client.0]
- interactive:
"""
log.info('Beginning test_stress_watch...')
assert isinstance(config, dict), \
"please list clients to run on"
testsnaps = {}
(mon,) = ctx.cluster.only('mon.0').remotes.iterkeys()
remotes = []
for role in config.get('clients', ['client.0']):
assert isinstance(role, basestring)
PREFIX = 'client.'
assert role.startswith(PREFIX)
id_ = role[len(PREFIX):]
(remote,) = ctx.cluster.only(role).remotes.iterkeys()
remotes.append(remote)
remote.run(
args=[
'cp',
'/tmp/cephtest/ceph.conf',
'/tmp/cephtest/data/ceph.conf',
],
logger=log.getChild('test_stress_watch.{id}'.format(id=id_)),
wait=True,
)
proc = remote.run(
args=[
'/bin/sh', '-c',
" ".join([
'cd', '/tmp/cephtest/data;',
'export CEPH_CLIENT_ID={id_}; export CEPH_CONF=ceph.conf; LD_PRELOAD=/tmp/cephtest/binary/usr/local/lib/librados.so.2 /tmp/cephtest/binary/usr/local/bin/test_stress_watch'.format(
id_=id_),
])
],
logger=log.getChild('testsnaps.{id}'.format(id=id_)),
stdin=run.PIPE,
wait=False
)
testsnaps[id_] = proc
try:
yield
finally:
for i in remotes:
i.run(
args=[
'rm',
'/tmp/cephtest/data/ceph.conf'
],
logger=log.getChild('testsnaps.{id}'.format(id=id_)),
wait=True,
)
log.info('joining watch_notify_stress')
run.wait(testsnaps.itervalues())
| mit | Python | |
1c68c6b3da5677ce0847eb563bfea0ed3d8810a0 | add language detection tween (from PATH_INFO) | silenius/amnesia,silenius/amnesia,silenius/amnesia | amnesia/translations/tweens.py | amnesia/translations/tweens.py | # -*- coding: utf-8 -*-
def path_info_lang_tween_factory(handler, registry):
def path_info_lang_tween(request):
if not hasattr(request, '_LOCALE_'):
if request.path_info_peek() in ('en', 'fr'):
lang = request.path_info_pop()
else:
lang = 'en'
setattr(request, '_LOCALE_', lang)
response = handler(request)
return response
return path_info_lang_tween
| bsd-2-clause | Python | |
38d340c2a866a445160393029fa3b0c07131818a | Create test-tport.py | arienchen/pytibrv | test/python/test-tport.py | test/python/test-tport.py | import os
import sys
from tibrv.tport import *
from tibrv.status import *
from tibrv.tport import *
import unittest
class TransportTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
status = Tibrv.open()
if status != TIBRV_OK:
raise TibrvError(status)
@classmethod
def tearDownClass(cls):
Tibrv.close()
def test_create(self):
tx = TibrvTx()
status = tx.create(None, None, None)
self.assertEqual(TIBRV_OK, status, TibrvStatus.text(status))
self.assertEqual('', tx.service)
self.assertEqual('', tx.network)
self.assertEqual('7500', tx.daemon) # Default is 7500
self.assertIsNone(tx.description)
tx.destroy();
# transport destroyed
# should return None
self.assertIsNone(tx.service)
self.assertIsNone(tx.network)
self.assertIsNone(tx.daemon)
self.assertIsNone(tx.description)
del tx
tx = TibrvTx()
service = '12345'
network=';225.1.1.1'
daemon='2000'
status = tx.create(service, network, daemon)
self.assertEqual(TIBRV_OK, status, TibrvStatus.text(status))
self.assertEqual(service, tx.service)
self.assertEqual(network, tx.network)
self.assertEqual(daemon, tx.daemon)
self.assertIsNone(tx.description)
tx.description = 'TEST'
self.assertEqual('TEST', tx.description)
subj = tx.inbox()
self.assertIsNotNone(subj)
status = tx.reliability(100)
self.assertEqual(TIBRV_OK, status, TibrvStatus.text(status))
#tx.destroy();
del tx
def test_send(self):
tx = TibrvTx()
status = tx.create(None, None, None)
self.assertEqual(TIBRV_OK, status, TibrvStatus.text(status))
msg = TibrvMsg()
msg.setStr('DATA', 'TEST')
msg.sendSubject = 'TEST.A'
status = tx.send(msg)
self.assertEqual(TIBRV_OK, status, TibrvStatus.text(status))
msg.replySubject = 'TEST.ACK'
status = tx.send(msg, 'TEST.B')
self.assertEqual(TIBRV_OK, status, TibrvStatus.text(status))
del msg
del tx
def test_request(self):
tx = TibrvTx()
status = tx.create(None, None, None)
self.assertEqual(TIBRV_OK, status, TibrvStatus.text(status))
msg = TibrvMsg()
msg.setStr('DATA', 'TEST')
msg.sendSubject = 'TEST.A'
status, ack = tx.sendRequest(msg, 1.0)
#self.assertEqual(TIBRV_OK, status, TibrvStatus.text(status))
self.assertEqual(TIBRV_TIMEOUT, status, TibrvStatus.text(status))
self.assertIsNone(ack)
del msg
del tx
if __name__ == "__main__" :
unittest.main(verbosity=2)
| bsd-3-clause | Python | |
b45b425414da5fc65f171b6f81c5983aade98fb6 | Add run.py | UNH-CORE/RM2-tow-tank | run.py | run.py | # -*- coding: utf-8 -*-
"""
This script generates all the relevant figures from the experiment.
"""
from Modules.processing import *
from Modules.plotting import *
def main():
save = True
savetype = ".pdf"
plot_perf_curves(save=save, savetype=savetype)
plot_perf_re_dep(save=save, savetype=savetype, errorbars=True,
dual_xaxes=True)
if __name__ == "__main__":
if not os.path.isdir("Figures"):
os.mkdir("Figures")
main() | mit | Python | |
103f1fe13a8a807a0bfc93df5e4b1a17281e28b4 | Create tool instance and run from command line | BakeCode/performance-testing,BakeCode/performance-testing | run.py | run.py | #!/usr/bin/env python
from performance_testing.command_line import Tool
def main():
tool = Tool(config='config.yml', output_directory='result')
tool.run()
if __name__ == '__main__':
main()
| mit | Python | |
0700ce9be37ada187105c5f38983092b6bba9762 | Test new backend methods | automl/auto-sklearn,automl/auto-sklearn,hmendozap/auto-sklearn,hmendozap/auto-sklearn | test/util/test_backend.py | test/util/test_backend.py | # -*- encoding: utf-8 -*-
from __future__ import print_function
import unittest
import mock
from autosklearn.util.backend import Backend
class BackendModelsTest(unittest.TestCase):
class BackendStub(Backend):
def __init__(self, model_directory):
self.__class__ = Backend
self.get_model_dir = lambda: model_directory
def setUp(self):
self.model_directory = '/model_directory/'
self.backend = self.BackendStub(self.model_directory)
self.BackendStub.get_model_dir = lambda x: 42
@mock.patch('six.moves.cPickle.load')
@mock.patch('__builtin__.open')
def test_loads_model_by_seed_and_id(self, openMock, pickleLoadMock):
seed = 13
idx = 17
expected_model = self._setup_load_model_mocks(openMock, pickleLoadMock, seed, idx)
actual_model = self.backend.load_model_by_seed_and_id(seed, idx)
self.assertEqual(expected_model, actual_model)
@mock.patch('six.moves.cPickle.load')
@mock.patch('__builtin__.open')
def test_loads_models_by_identifiers(self, openMock, pickleLoadMock):
seed = 13
idx = 17
expected_model = self._setup_load_model_mocks(openMock, pickleLoadMock, seed, idx)
expected_dict = { (seed, idx): expected_model }
actual_dict = self.backend.load_models_by_identifiers([(seed, idx)])
self.assertIsInstance(actual_dict, dict)
self.assertDictEqual(expected_dict, actual_dict)
def _setup_load_model_mocks(self, openMock, pickleLoadMock, seed, idx):
model_path = '/model_directory/%s.%s.model' % (seed, idx)
file_handler = 'file_handler'
expected_model = 'model'
fileMock = mock.MagicMock()
fileMock.__enter__.return_value = file_handler
openMock.side_effect = lambda path, flag: fileMock if path == model_path and flag == 'rb' else None
pickleLoadMock.side_effect = lambda fh: expected_model if fh == file_handler else None
return expected_model | bsd-3-clause | Python | |
faffa1c83e599730105f1fe38b253aafb2b00d18 | Add headerimage tests | ZeitOnline/zeit.content.cp,ZeitOnline/zeit.content.cp | src/zeit/content/cp/browser/blocks/tests/test_headerimage.py | src/zeit/content/cp/browser/blocks/tests/test_headerimage.py | import zeit.cms.testing
import zeit.content.cp
import zeit.content.cp.centerpage
class TestHeaderImage(zeit.cms.testing.BrowserTestCase):
layer = zeit.content.cp.testing.ZCML_LAYER
def setUp(self):
super(TestHeaderImage, self).setUp()
with zeit.cms.testing.site(self.getRootFolder()):
self.centerpage = zeit.content.cp.centerpage.CenterPage()
self.centerpage['lead'].create_item('headerimage')
self.repository['centerpage'] = self.centerpage
b = self.browser
b.open(
'http://localhost/++skin++vivi/repository/centerpage/@@checkout')
b.open('contents')
self.content_url = b.url
self.xml_url = 'http://localhost/++skin++vivi/workingcopy/zope.user/' \
'centerpage/@@xml_source_edit.html'
def test_can_create_headerimage_module_via_drag_n_drop_from_sidebar(self):
b = self.browser
self.assertEqual(1, b.contents.count('type-headerimage'))
b.open('informatives/@@landing-zone-drop-module?block_type=headerimage')
b.open(self.content_url)
self.assertEqual(2, b.contents.count('type-headerimage'))
| bsd-3-clause | Python | |
bdd532cccf504dc9fbf21a9e72b8185dc910ec94 | Add management command for running the task for validating all data catalogs. | sunlightlabs/thezombies,sunlightlabs/thezombies,sunlightlabs/thezombies,sunlightlabs/thezombies | thezombies/management/commands/validate_all_data_catalogs.py | thezombies/management/commands/validate_all_data_catalogs.py | from django.core.management.base import NoArgsCommand
from thezombies.tasks.main import validate_data_catalogs
class Command(NoArgsCommand):
"""Validate all of the agency data catalogs"""
def handle_noargs(self):
validator_group = validate_data_catalogs.delay()
self.stdout.write(u"\nSpawned data catalog task group: {0}\n".format(validator_group.id))
| bsd-3-clause | Python | |
0dbc7432bf78850dee10b8d814b1d9eb74fa5afc | add test wing defender play | RoboJackets/robocup-software,RoboJackets/robocup-software,RoboJackets/robocup-software,RoboJackets/robocup-software | soccer/gameplay/plays/testing/test_wing_defender.py | soccer/gameplay/plays/testing/test_wing_defender.py | import play
import behavior
import constants
import robocup
import tactics.positions.wing_defender
import main
class TestWingDefender(play.Play):
def __init__(self):
super().__init__(continuous=True)
self.add_transition(behavior.Behavior.State.start,
behavior.Behavior.State.running, lambda: True,
'immediately')
tact = tactics.positions.wing_defender.WingDefender(main.ball().pos)
self.add_subbehavior(tact, 'tact', required=False)
def execute_running(self):
tact = self.subbehavior_with_name('tact')
def on_exit_running(self):
self.remove_subbehavior('tact') | apache-2.0 | Python | |
a2086c9c5c11586b04ca934bdad838babad087ee | add a mk-wof-config utility script | whosonfirst/go-whosonfirst-pip,whosonfirst/go-whosonfirst-pip,whosonfirst/go-whosonfirst-pip | utils/mk-wof-config.py | utils/mk-wof-config.py | #!/usr/bin/env python
import sys
import os
import json
import random
import logging
import socket
import mapzen.whosonfirst.placetypes
if __name__ == '__main__':
import optparse
opt_parser = optparse.OptionParser()
opt_parser.add_option('-w', '--wof', dest='wof', action='store', default=None, help='The path to your Who\'s On First data repository')
opt_parser.add_option('-o', '--out', dest='out', action='store', default=None, help='Where to write your config file, if "-" then config will be written to STDOUT')
opt_parser.add_option('-r', '--roles', dest='roles', action='store', default='common,common_optional,optional', help='List of Who\'s On First placetype roles to include (default is "common,common_optional,optional"')
opt_parser.add_option('-e', '--exclude', dest='exclude', action='store', default='', help='List of Who\'s On First placetypes to exclude, even if they are part of a role (default is None')
opt_parser.add_option('-v', '--verbose', dest='verbose', action='store_true', default=False, help='Be chatty (default is false)')
options, args = opt_parser.parse_args()
if options.verbose:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)
roles = options.roles.split(",")
exclude = options.exclude.split(",")
impossible = ('venue', 'address', 'planet', 'building')
for pt in impossible:
if not pt in exclude:
exclude.append(pt)
wof = os.path.abspath(options.wof)
meta = os.path.join(wof, 'meta')
config = []
ports = []
for pt in mapzen.whosonfirst.placetypes.with_roles(roles) :
# Is this a valid placetype?
if pt in exclude:
logging.debug("%s is in exclude list, skipping" % pt)
continue
fname = "wof-%s-latest.csv" % pt
path = os.path.join(meta, fname)
if not os.path.exists(path):
logging.warning("meta file for %s (%s) does not exist, skipping" % (pt, path))
continue
# Pick a port! Any port!!
port = None
while not port or port in ports:
port = random.randint(1025, 49151)
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
result = sock.connect_ex(('127.0.0.1', port))
if result == 0:
logging.debug("port %s is already in use, trying again" % port)
port = None
# Add it to the list
config.append({
'Target': pt,
'Port': port,
'Meta': path
})
# All done
if options.out == "-":
fh = sys.stdout
else:
out = os.path.abspath(options.out)
fh = open(out, 'w')
json.dump(config, fh, indent=2)
sys.exit(0)
| bsd-3-clause | Python | |
0fb8dec880b7a48002929fc54c6e337be63afa05 | Add missing manage.py | uw-it-aca/spacescout_web,uw-it-aca/spacescout_web,uw-it-aca/spacescout_web | travis_ci/manage.py | travis_ci/manage.py | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "travis_ci.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| apache-2.0 | Python | |
24e14b7d53e43f1574971ff5b6eee6d0185df23a | Add tests for retrieving/updating reverse fks | sehmaschine/django-rest-framework,qsorix/django-rest-framework,lubomir/django-rest-framework,nhorelik/django-rest-framework,James1345/django-rest-framework,canassa/django-rest-framework,yiyocx/django-rest-framework,thedrow/django-rest-framework-1,vstoykov/django-rest-framework,qsorix/django-rest-framework,jpulec/django-rest-framework,antonyc/django-rest-framework,kgeorgy/django-rest-framework,thedrow/django-rest-framework-1,rafaelang/django-rest-framework,damycra/django-rest-framework,HireAnEsquire/django-rest-framework,sbellem/django-rest-framework,rafaelcaricio/django-rest-framework,potpath/django-rest-framework,gregmuellegger/django-rest-framework,thedrow/django-rest-framework-1,waytai/django-rest-framework,akalipetis/django-rest-framework,uploadcare/django-rest-framework,ajaali/django-rest-framework,nhorelik/django-rest-framework,VishvajitP/django-rest-framework,wangpanjun/django-rest-framework,yiyocx/django-rest-framework,rhblind/django-rest-framework,andriy-s/django-rest-framework,aericson/django-rest-framework,rubendura/django-rest-framework,mgaitan/django-rest-framework,delinhabit/django-rest-framework,antonyc/django-rest-framework,rafaelcaricio/django-rest-framework,krinart/django-rest-framework,abdulhaq-e/django-rest-framework,raphaelmerx/django-rest-framework,waytai/django-rest-framework,douwevandermeij/django-rest-framework,sbellem/django-rest-framework,jness/django-rest-framework,VishvajitP/django-rest-framework,rhblind/django-rest-framework,nryoung/django-rest-framework,alacritythief/django-rest-framework,agconti/django-rest-framework,wedaly/django-rest-framework,nryoung/django-rest-framework,akalipetis/django-rest-framework,sehmaschine/django-rest-framework,sehmaschine/django-rest-framework,rubendura/django-rest-framework,wedaly/django-rest-framework,potpath/django-rest-framework,callorico/django-rest-framework,zeldalink0515/django-rest-framework,hnakamur/django-rest-framework,canassa/django-rest-framework,hunter007/django-rest-framework,zeldalink0515/django-rest-framework,bluedazzle/django-rest-framework,adambain-vokal/django-rest-framework,MJafarMashhadi/django-rest-framework,ebsaral/django-rest-framework,davesque/django-rest-framework,justanr/django-rest-framework,paolopaolopaolo/django-rest-framework,abdulhaq-e/django-rest-framework,ossanna16/django-rest-framework,hnakamur/django-rest-framework,pombredanne/django-rest-framework,lubomir/django-rest-framework,sheppard/django-rest-framework,fishky/django-rest-framework,wwj718/django-rest-framework,tomchristie/django-rest-framework,HireAnEsquire/django-rest-framework,cyberj/django-rest-framework,tcroiset/django-rest-framework,gregmuellegger/django-rest-framework,vstoykov/django-rest-framework,jerryhebert/django-rest-framework,simudream/django-rest-framework,pombredanne/django-rest-framework,kennydude/django-rest-framework,ebsaral/django-rest-framework,nhorelik/django-rest-framework,AlexandreProenca/django-rest-framework,buptlsl/django-rest-framework,pombredanne/django-rest-framework,ticosax/django-rest-framework,kezabelle/django-rest-framework,kylefox/django-rest-framework,jpulec/django-rest-framework,jtiai/django-rest-framework,leeahoward/django-rest-framework,zeldalink0515/django-rest-framework,maryokhin/django-rest-framework,dmwyatt/django-rest-framework,MJafarMashhadi/django-rest-framework,potpath/django-rest-framework,tomchristie/django-rest-framework,waytai/django-rest-framework,yiyocx/django-rest-framework,justanr/django-rest-framework,hnarayanan/django-rest-framework,lubomir/django-rest-framework,rubendura/django-rest-framework,gregmuellegger/django-rest-framework,hnakamur/django-rest-framework,johnraz/django-rest-framework,werthen/django-rest-framework,callorico/django-rest-framework,leeahoward/django-rest-framework,werthen/django-rest-framework,bluedazzle/django-rest-framework,fishky/django-rest-framework,rafaelang/django-rest-framework,uploadcare/django-rest-framework,krinart/django-rest-framework,agconti/django-rest-framework,xiaotangyuan/django-rest-framework,delinhabit/django-rest-framework,YBJAY00000/django-rest-framework,abdulhaq-e/django-rest-framework,callorico/django-rest-framework,jerryhebert/django-rest-framework,adambain-vokal/django-rest-framework,alacritythief/django-rest-framework,wwj718/django-rest-framework,uruz/django-rest-framework,cyberj/django-rest-framework,James1345/django-rest-framework,maryokhin/django-rest-framework,paolopaolopaolo/django-rest-framework,simudream/django-rest-framework,tcroiset/django-rest-framework,justanr/django-rest-framework,kezabelle/django-rest-framework,mgaitan/django-rest-framework,YBJAY00000/django-rest-framework,edx/django-rest-framework,HireAnEsquire/django-rest-framework,alacritythief/django-rest-framework,linovia/django-rest-framework,hunter007/django-rest-framework,ambivalentno/django-rest-framework,jpadilla/django-rest-framework,hunter007/django-rest-framework,arpheno/django-rest-framework,wzbozon/django-rest-framework,rafaelang/django-rest-framework,ashishfinoit/django-rest-framework,ashishfinoit/django-rest-framework,jerryhebert/django-rest-framework,wzbozon/django-rest-framework,linovia/django-rest-framework,xiaotangyuan/django-rest-framework,ajaali/django-rest-framework,vstoykov/django-rest-framework,johnraz/django-rest-framework,atombrella/django-rest-framework,AlexandreProenca/django-rest-framework,jpulec/django-rest-framework,ticosax/django-rest-framework,tigeraniya/django-rest-framework,cheif/django-rest-framework,uruz/django-rest-framework,wedaly/django-rest-framework,kezabelle/django-rest-framework,ticosax/django-rest-framework,jtiai/django-rest-framework,elim/django-rest-framework,canassa/django-rest-framework,atombrella/django-rest-framework,maryokhin/django-rest-framework,tcroiset/django-rest-framework,sheppard/django-rest-framework,jpadilla/django-rest-framework,cheif/django-rest-framework,tigeraniya/django-rest-framework,tomchristie/django-rest-framework,andriy-s/django-rest-framework,hnarayanan/django-rest-framework,d0ugal/django-rest-framework,ebsaral/django-rest-framework,ezheidtmann/django-rest-framework,raphaelmerx/django-rest-framework,douwevandermeij/django-rest-framework,douwevandermeij/django-rest-framework,adambain-vokal/django-rest-framework,werthen/django-rest-framework,buptlsl/django-rest-framework,uruz/django-rest-framework,ezheidtmann/django-rest-framework,ossanna16/django-rest-framework,johnraz/django-rest-framework,d0ugal/django-rest-framework,delinhabit/django-rest-framework,buptlsl/django-rest-framework,davesque/django-rest-framework,edx/django-rest-framework,aericson/django-rest-framework,hnarayanan/django-rest-framework,James1345/django-rest-framework,tigeraniya/django-rest-framework,brandoncazander/django-rest-framework,sheppard/django-rest-framework,dmwyatt/django-rest-framework,elim/django-rest-framework,ambivalentno/django-rest-framework,raphaelmerx/django-rest-framework,kennydude/django-rest-framework,YBJAY00000/django-rest-framework,damycra/django-rest-framework,jness/django-rest-framework,agconti/django-rest-framework,uploadcare/django-rest-framework,paolopaolopaolo/django-rest-framework,kgeorgy/django-rest-framework,atombrella/django-rest-framework,bluedazzle/django-rest-framework,rhblind/django-rest-framework,akalipetis/django-rest-framework,brandoncazander/django-rest-framework,kennydude/django-rest-framework,iheitlager/django-rest-framework,dmwyatt/django-rest-framework,ezheidtmann/django-rest-framework,iheitlager/django-rest-framework,d0ugal/django-rest-framework,linovia/django-rest-framework,wwj718/django-rest-framework,brandoncazander/django-rest-framework,aericson/django-rest-framework,krinart/django-rest-framework,edx/django-rest-framework,wangpanjun/django-rest-framework,cheif/django-rest-framework,wangpanjun/django-rest-framework,sbellem/django-rest-framework,ambivalentno/django-rest-framework,AlexandreProenca/django-rest-framework,davesque/django-rest-framework,nryoung/django-rest-framework,arpheno/django-rest-framework,kylefox/django-rest-framework,qsorix/django-rest-framework,kylefox/django-rest-framework,fishky/django-rest-framework,kgeorgy/django-rest-framework,leeahoward/django-rest-framework,arpheno/django-rest-framework,rafaelcaricio/django-rest-framework,xiaotangyuan/django-rest-framework,jtiai/django-rest-framework,iheitlager/django-rest-framework,antonyc/django-rest-framework,mgaitan/django-rest-framework,ashishfinoit/django-rest-framework,wzbozon/django-rest-framework,cyberj/django-rest-framework,ajaali/django-rest-framework,ossanna16/django-rest-framework,jness/django-rest-framework,andriy-s/django-rest-framework,damycra/django-rest-framework,MJafarMashhadi/django-rest-framework,jpadilla/django-rest-framework,simudream/django-rest-framework,elim/django-rest-framework,VishvajitP/django-rest-framework | rest_framework/tests/nested_relations.py | rest_framework/tests/nested_relations.py | from copy import deepcopy
from django.db import models
from django.test import TestCase
from rest_framework import serializers
# ForeignKey
class ForeignKeyTarget(models.Model):
name = models.CharField(max_length=100)
class ForeignKeySource(models.Model):
name = models.CharField(max_length=100)
target = models.ForeignKey(ForeignKeyTarget, related_name='sources')
class ForeignKeySourceSerializer(serializers.ModelSerializer):
class Meta:
model = ForeignKeySource
class ForeignKeyTargetSerializer(serializers.ModelSerializer):
sources = ForeignKeySourceSerializer()
class Meta:
model = ForeignKeyTarget
class ReverseForeignKeyTests(TestCase):
def setUp(self):
target = ForeignKeyTarget(name='target-1')
target.save()
new_target = ForeignKeyTarget(name='target-2')
new_target.save()
for idx in range(1, 4):
source = ForeignKeySource(name='source-%d' % idx, target=target)
source.save()
self.target_data = {'id': 1, 'name': u'target-1', 'sources': [
{'id': 1, 'name': u'source-1', 'target': 1},
{'id': 2, 'name': u'source-2', 'target': 1},
{'id': 3, 'name': u'source-3', 'target': 1},
]}
self.new_target_data = {'id': 2, 'name': u'target-2', 'sources': []}
self.data = [self.target_data, self.new_target_data]
def test_reverse_foreign_key_retrieve(self):
queryset = ForeignKeyTarget.objects.all()
serializer = ForeignKeyTargetSerializer(queryset)
self.assertEquals(serializer.data, self.data)
def test_reverse_foreign_key_update(self):
data = deepcopy(self.target_data)
data['sources'][0]['name'] = 'source-1-changed'
data['sources'][2]['name'] = 'source-3-changed'
instance = ForeignKeyTarget.objects.get(pk=1)
serializer = ForeignKeyTargetSerializer(instance, data=data)
self.assertTrue(serializer.is_valid())
self.assertEquals(serializer.data, data)
serializer.save()
# Ensure target 1 is updated, and everything else is as expected
queryset = ForeignKeyTarget.objects.all()
serializer = ForeignKeyTargetSerializer(queryset)
expected = deepcopy(self.data)
expected[0]['sources'][0]['name'] = 'source-1-changed'
expected[0]['sources'][2]['name'] = 'source-3-changed'
self.assertEquals(serializer.data, expected)
| bsd-2-clause | Python | |
78fa851ffa6a9594dbbd41a6d572674552d76c85 | Install constants file. | Renelvon/txrudp,OpenBazaar/txrudp,jorik041/txrudp | txrudp/constants.py | txrudp/constants.py | """Constants governing operation of txrudp package."""
# [bytes]
UDP_SAFE_PACKET_SIZE = 1000
# [length]
WINDOW_SIZE = 65535 // UDP_SAFE_PACKET_SIZE
# [seconds]
TIMEOUT = 0.7
# [seconds]
_MAX_PACKET_DELAY = 20
# If a packet is retransmitted more than that many times,
# the connection should be considered broken.
MAX_RETRANSMISSIONS = _MAX_PACKET_DELAY // TIMEOUT
| mit | Python | |
9d550e9403560a84a75aad55a91ca661fcef7957 | Implement a hook for Issue #41 | gratipay/aspen.py,gratipay/aspen.py | aspen/hooks/options200.py | aspen/hooks/options200.py |
from aspen import Response
def hook(request):
"""A hook to return 200 to an 'OPTIONS *' request"""
if request.line.method == "OPTIONS" and request.line.uri == "*":
raise Response(200)
return request
| mit | Python | |
632b3530f6b04d82bc66299d34137d3a76fb8f90 | add a test for only loading N items parameter | AmeliaKnows/libgreader,askedrelic/libgreader,smurfix/librssreader | tests/test_special_feeds.py | tests/test_special_feeds.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
libG(oogle)Reader
Copyright (C) 2010 Matt Behrens <askedrelic@gmail.com> http://asktherelic.com
Python library for working with the unofficial Google Reader API.
Unit tests for feeds. Requires mechanize for automated oauth authenication.
"""
try:
import unittest2 as unittest
except:
import unittest
from libgreader import GoogleReader, OAuthMethod, ClientAuthMethod, Feed, ItemsContainer, Item, BaseFeed, SpecialFeed, ReaderUrl
import urllib
import urllib2
import urlparse
import mechanize
import re
import time
from config import *
class TestSpecialFeeds(unittest.TestCase):
def test_reading_list_exists(self):
ca = ClientAuthMethod(username,password)
reader = GoogleReader(ca)
reader.makeSpecialFeeds()
feeds = reader.getFeedContent(reader.getSpecialFeed(ReaderUrl.READING_LIST))
self.assertEqual(dict, type(feeds))
list_match = re.search('reading list in Google Reader', feeds['title'])
self.assertTrue(list_match)
def test_marking_read(self):
ca = ClientAuthMethod(username,password)
reader = GoogleReader(ca)
container = SpecialFeed(reader, ReaderUrl.READING_LIST)
container.loadItems()
feed_item = container.items[0]
self.assertTrue(feed_item.markRead())
self.assertTrue(feed_item.isRead())
def test_loading_item_count(self):
ca = ClientAuthMethod(username,password)
reader = GoogleReader(ca)
container = SpecialFeed(reader, ReaderUrl.READING_LIST)
container.loadItems(loadLimit=5)
self.assertEqual(5, len(container.items))
self.assertEqual(5, container.countItems())
def test_subscribe_unsubscribe(self):
ca = ClientAuthMethod(username,password)
reader = GoogleReader(ca)
slashdot = 'feed/http://rss.slashdot.org/Slashdot/slashdot'
#unsubscribe always return true; revert feedlist state
self.assertTrue(reader.unsubscribe(slashdot))
# now subscribe
self.assertTrue(reader.subscribe(slashdot))
# wait for server to update
time.sleep(1)
reader.buildSubscriptionList()
# test subscribe successful
self.assertIn(slashdot, [x.id for x in reader.getSubscriptionList()])
if __name__ == '__main__':
unittest.main()
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
libG(oogle)Reader
Copyright (C) 2010 Matt Behrens <askedrelic@gmail.com> http://asktherelic.com
Python library for working with the unofficial Google Reader API.
Unit tests for feeds. Requires mechanize for automated oauth authenication.
"""
try:
import unittest2 as unittest
except:
import unittest
from libgreader import GoogleReader, OAuthMethod, ClientAuthMethod, Feed, ItemsContainer, Item, BaseFeed, SpecialFeed, ReaderUrl
import urllib
import urllib2
import urlparse
import mechanize
import re
import time
from config import *
class TestSpecialFeeds(unittest.TestCase):
def test_reading_list_exists(self):
ca = ClientAuthMethod(username,password)
reader = GoogleReader(ca)
reader.makeSpecialFeeds()
feeds = reader.getFeedContent(reader.getSpecialFeed(ReaderUrl.READING_LIST))
self.assertEqual(dict, type(feeds))
list_match = re.search('reading list in Google Reader', feeds['title'])
self.assertTrue(list_match)
def test_marking_read(self):
ca = ClientAuthMethod(username,password)
reader = GoogleReader(ca)
container = SpecialFeed(reader, ReaderUrl.READING_LIST)
container.loadItems()
feed_item = container.items[0]
self.assertTrue(feed_item.markRead())
self.assertTrue(feed_item.isRead())
def test_subscribe_unsubscribe(self):
ca = ClientAuthMethod(username,password)
reader = GoogleReader(ca)
slashdot = 'feed/http://rss.slashdot.org/Slashdot/slashdot'
#unsubscribe always return true; revert feedlist state
self.assertTrue(reader.unsubscribe(slashdot))
# now subscribe
self.assertTrue(reader.subscribe(slashdot))
# wait for server to update
time.sleep(1)
reader.buildSubscriptionList()
# test subscribe successful
self.assertIn(slashdot, [x.id for x in reader.getSubscriptionList()])
if __name__ == '__main__':
unittest.main()
| mit | Python |
8ee78c14af3b9974ad96cf85f6ea32c4e254f958 | Add calcurse-dateutil | lfos/calcurse,lfos/calcurse,lfos/calcurse | contrib/calcurse-dateutil.py | contrib/calcurse-dateutil.py | #!/usr/bin/env python3
import argparse
import datetime
def get_date(s):
return datetime.datetime.strptime(s, '%Y-%m-%d').date()
parser = argparse.ArgumentParser('calcurse-dateutil')
parser.add_argument('--date', type=get_date, action='store', dest='date')
parser.add_argument('--range', type=int, action='store', dest='range')
parser.add_argument('--unique', action='store_true', dest='unique')
parser.add_argument('--append', type=str, action='store', dest='append')
parser.add_argument('op', nargs='+')
args = parser.parse_args()
def skip_days(d, n):
return d + datetime.timedelta(days=n)
def skip_months(d, n):
return d + datetime.timedelta(months=1)
def next_weekday(d, w):
return skip_days(d, (w - d.weekday() + 7) % 7)
def bow(d):
return skip_days(d, -d.weekday())
def bom(d):
return d.replace(day=1)
def eow(d):
return skip_days(bow(d), 6)
def eom(d):
return skip_months(bom(d), 1)
s = args.date if args.date else datetime.date.today()
r = args.range if args.range else 1
a = args.append if args.append else ''
seen = set()
for i in range(0, r):
d = skip_days(s, i)
it = iter(args.op)
for arg in it:
if arg == 'bow':
d = bow(d)
elif arg == 'bom':
d = bom(d)
elif arg == 'eow':
d = eow(d)
elif arg == 'eom':
d = eom(d)
elif arg == 'next-weekday':
d = next_weekday(d, int(next(it)))
elif arg == 'skip-days':
d = skip_days(d, int(next(it)))
elif arg == 'skip-months':
d = skip_months(d, int(next(it)))
out = "{}{}".format(d, a)
if args.unique:
if d not in seen:
print(out)
seen.add(d)
else:
print(out)
| bsd-2-clause | Python | |
bbf1e1532ef1827c808c60fe8f7459a438789aaf | work on csv collection | SMAPPNYU/smappdragon | smappdragon/collection/csv_collection.py | smappdragon/collection/csv_collection.py | import os
import unicodecsv
from smappdragon.tools.tweet_parser import TweetParser
from smappdragon.collection.base_collection import BaseCollection
class CsvCollection(BaseCollection):
'''
method that tells us how to
create the CsvCollection object
'''
def __init__(self, filepath):
BaseCollection.__init__(self)
self.filepath = filepath
if not os.path.isfile(filepath):
raise IOError(filepath, 'CsvCollection could not find your file, it\'s mispelled or doesn\'t exist.')
'''
method that creates a cursor
and yields all tweets in a particular collection
'''
def get_iterator(self):
count = 1
tweet_parser = TweetParser()
csv_handle = open(self.filepath, 'rb')
for tweet in unicodecsv.DictReader(csv_handle):
if self.limit < count and self.limit != 0:
csv_handle.close()
return
elif tweet_parser.tweet_passes_filter(self.filter, tweet) \
and tweet_parser.tweet_passes_custom_filter_list(self.custom_filters, tweet):
count += 1
yield tweet
csv_handle.close()
| mit | Python | |
755f6f701c5bef733531c33da2b1a0918a9f84dc | add daemonize | turbidsoul/tsutil | tsutil/daemonize.py | tsutil/daemonize.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Author: Turbidsoul Chen
# @Date: 2014-03-07 17:11:20
# @Last Modified by: Turbidsoul Chen
# @Last Modified time: 2014-07-16 15:52:49
import os
import sys
def daemonize(stdin='/dev/null', stdout='/dev/null', stderr='/dev/null'):
try:
pid = os.fork()
print(pid)
if pid > 0:
sys.exit(0)
except OSError, e:
sys.stderr.write('fork #1 failed: (%d) %s\n' % (e.errno, e.strerror))
sys.exit(1)
os.chdir('/')
os.umask(0)
os.setsid()
try:
pid = os.fork()
print(pid)
if pid > 0:
sys.exit(0)
except OSError, e:
sys.stderr.write('fork #2v failed: (%d) %s\n' % (e.errno, e.strerror))
sys.exit(1)
for f in sys.stdout, sys.stderr:
f.flush()
si = file(stdin, 'r')
so = file(stdout, 'a+')
se = file(stdout, 'a+', 0)
os.dup2(si.fileno(), sys.stdin.fileno())
os.dup2(so.fileno(), sys.stdout.fileno())
os.dup2(se.fileno(), sys.stderr.fileno())
| mit | Python | |
aeb633ae76f2ffe0e927a3230e1ad456891c9afc | add chart networth endpoint | Mesitis/community | sample-code/Python/get_chart_networth.py | sample-code/Python/get_chart_networth.py | '''
- login and get token
- process 2FA if 2FA is setup for this account
- if the user is a regular customer then get cashflow chart data for this user
- if the user is a partner_admin then get a cashflow chart data for the first user from the list of users this partner admin has access to
'''
import requests
import json
get_token_url = "https://api.canopy.cloud:443/api/v1/sessions/"
validate_otp_url = "https://api.canopy.cloud:443/api/v1/sessions/otp/validate.json" #calling the production server for OTP authentication
get_partner_users_url = "https://api.canopy.cloud:443/api/v1/admin/users.json"
get_chart_networth_url = "https://api.canopy.cloud:443/api/v1/charts/networth.json"
#please replace below with your username and password over here
username = 'login_name'
password = 'xxxxxxxxxx'
#please enter the OTP token in case it is enabled
otp_code = '123456'
#first call for a fresh token
payload = "user%5Busername%5D=" + username + "&user%5Bpassword%5D=" + password
headers = {
'accept': "application/json",
'content-type':"application/x-www-form-urlencoded"
}
response = requests.request("POST", get_token_url, data=payload, headers=headers)
print json.dumps(response.json(), indent=4, sort_keys = True)
token = response.json()['token']
login_flow = response.json()['login_flow']
#in case 2FA is enabled use the OTP code to get the second level of authentication
if login_flow == '2fa_verification':
headers['Authorization'] = token
payload = 'otp_code=' + otp_code
response = requests.request("POST", validate_otp_url, data=payload, headers=headers)
print json.dumps(response.json(), indent=4, sort_keys = True) #print response.text
token = response.json()['token']
login_role = response.json()['role']
switch_user_id = response.json()['id']
if login_role == 'Partneradmin':
#print "============== partner's users ==========="
headers = {
'authorization': token,
'content-type': "application/x-www-form-urlencoded; charset=UTF-8"
}
partner_users = []
response = requests.request("GET", get_partner_users_url, headers=headers)
for parent_user in response.json()['users']:
partner_users.append(parent_user['id'])
#print partner_users
#take the first users in the list as the switch_user_id
switch_user_id = partner_users[0]
#in case the user is a partner_admin then switch_user_id is any one of the users it has access to (here we take the first one from the list)
#in case the user is a regular customer then the switch_user_id = user_id for this customer
headers = {
'authorization': token,
'content-type': "application/x-www-form-urlencoded; charset=UTF-8",
'x-app-switch-user': str(switch_user_id)
}
response = requests.request("GET", get_chart_networth_url, headers=headers)
print json.dumps(response.json(), indent=4, sort_keys = True)
| mit | Python | |
e8235b10c610aae51213e8f090e3bf692f99adcc | Add the cbtf-lanl spack build package. cbtf-lanl is LANLs contribution to the CBTF project. It contains psTool and memTool which are example tools, showing use case examples for CBTF. | lgarren/spack,skosukhin/spack,krafczyk/spack,matthiasdiener/spack,EmreAtes/spack,krafczyk/spack,mfherbst/spack,tmerrick1/spack,TheTimmy/spack,mfherbst/spack,lgarren/spack,matthiasdiener/spack,LLNL/spack,TheTimmy/spack,TheTimmy/spack,mfherbst/spack,lgarren/spack,EmreAtes/spack,iulian787/spack,krafczyk/spack,matthiasdiener/spack,EmreAtes/spack,tmerrick1/spack,iulian787/spack,skosukhin/spack,skosukhin/spack,tmerrick1/spack,mfherbst/spack,LLNL/spack,matthiasdiener/spack,lgarren/spack,lgarren/spack,iulian787/spack,iulian787/spack,mfherbst/spack,tmerrick1/spack,LLNL/spack,skosukhin/spack,matthiasdiener/spack,LLNL/spack,LLNL/spack,TheTimmy/spack,EmreAtes/spack,tmerrick1/spack,krafczyk/spack,TheTimmy/spack,EmreAtes/spack,krafczyk/spack,iulian787/spack,skosukhin/spack | var/spack/packages/cbtf-lanl/package.py | var/spack/packages/cbtf-lanl/package.py | ################################################################################
# Copyright (c) 2015 Krell Institute. All Rights Reserved.
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; either version 2 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 59 Temple
# Place, Suite 330, Boston, MA 02111-1307 USA
################################################################################
from spack import *
class CbtfLanl(Package):
"""CBTF LANL project contains a memory tool and data center type system command monitoring tool."""
homepage = "http://sourceforge.net/p/cbtf/wiki/Home/"
url = "http://sourceforge.net/projects/cbtf/files/cbtf-1.5/cbtf-lanl-1.5.tar.gz/download"
version('1.5', '78d42050f9ec7127e8d0a93d87a66702')
# Mirror access template example
#url = "file:/opt/spack-mirror-2015-02-27/cbtf-lanl/cbtf-lanl-1.5.tar.gz"
#version('1.5', 'c3f78f967b0a42c6734ce4be0e602426')
# Dependencies for cbtf-krell
depends_on("boost@1.41:")
depends_on("mrnet@4.1.0+krelloptions")
depends_on("xerces-c@3.1.1:")
depends_on("cbtf")
depends_on("cbtf-krell")
parallel = False
def install(self, spec, prefix):
# Add in paths for finding package config files that tell us where to find these packages
cmake_prefix_path = join_path(spec['cbtf'].prefix) + ':' + join_path(spec['cbtf-krell'].prefix)
with working_dir('build', create=True):
cmake('..',
'-DCBTF_DIR=%s' % spec['cbtf'].prefix,
'-DCBTF_KRELL_DIR=%s' % spec['cbtf-krell'].prefix,
'-DMRNET_DIR=%s' % spec['mrnet'].prefix,
'-DXERCESC_DIR=%s' % spec['xerces-c'].prefix,
'-DCMAKE_PREFIX_PATH=%s' % cmake_prefix_path,
'-DCMAKE_MODULE_PATH=%s' % join_path(prefix.share,'KrellInstitute','cmake'),
*std_cmake_args)
make("clean")
make()
make("install")
| lgpl-2.1 | Python | |
e3842cfbdfbf5c28e70916080fe4ce1ffad7c75c | Add bleu-score calculator | m1cr0man/OneLineWonders,m1cr0man/OneLineWonders | CA4011/bleu-score.py | CA4011/bleu-score.py | print((lambda n,p,t,r:min(1,len(t)/len(r))*__import__(math).pow(p(n,t,r,1)*p(n,t,r,2)*p(n,t,r,3)*p(n,t,r,4),0.25))(lambda s,l:[s[i:i+l]for i in range(len(s)-l+1)],lambda n,t,r,s:(lambda T,R:sum([g in R and(R.remove(g)or 1)for g in T])/len(T)if len(T+R)else 1)([.join(x)for x in n(t,s)],[.join(x)for x in n(r,s)]),print(Enter translation)or input().strip().lower().split(),print(Enter references)or input().strip().lower().split()))
| mit | Python | |
adbe1d4f06028ba13e21386f7d62939d4b2eb740 | Add PatchELF package | matthiasdiener/spack,mfherbst/spack,krafczyk/spack,mfherbst/spack,tmerrick1/spack,tmerrick1/spack,tmerrick1/spack,iulian787/spack,matthiasdiener/spack,matthiasdiener/spack,LLNL/spack,matthiasdiener/spack,skosukhin/spack,skosukhin/spack,mfherbst/spack,krafczyk/spack,tmerrick1/spack,LLNL/spack,lgarren/spack,skosukhin/spack,krafczyk/spack,TheTimmy/spack,TheTimmy/spack,iulian787/spack,EmreAtes/spack,TheTimmy/spack,mfherbst/spack,iulian787/spack,skosukhin/spack,TheTimmy/spack,krafczyk/spack,LLNL/spack,lgarren/spack,mfherbst/spack,LLNL/spack,iulian787/spack,LLNL/spack,EmreAtes/spack,lgarren/spack,EmreAtes/spack,EmreAtes/spack,matthiasdiener/spack,krafczyk/spack,lgarren/spack,TheTimmy/spack,skosukhin/spack,lgarren/spack,EmreAtes/spack,tmerrick1/spack,iulian787/spack | var/spack/packages/patchelf/package.py | var/spack/packages/patchelf/package.py | from spack import *
class Patchelf(Package):
"""PatchELF is a small utility to modify the dynamic linker and RPATH of ELF executables."""
homepage = "https://nixos.org/patchelf.html"
url = "http://nixos.org/releases/patchelf/patchelf-0.8/patchelf-0.8.tar.gz"
list_url = "http://nixos.org/releases/patchelf/"
list_depth = 2
version('0.8', '407b229e6a681ffb0e2cdd5915cb2d01')
def install(self, spec, prefix):
configure('--prefix=%s' % prefix)
make()
make("install")
| lgpl-2.1 | Python | |
b4d6fc7ed10bb7e424797aaa8bcfff8ad738cd97 | Add __init__ file to permit import as module | mcs07/ChemSpiPy | __init__.py | __init__.py | mit | Python | ||
195eba54a45e8d841e1e9574938bef1d2440eb06 | Create __init__.py | CSIRTUK/TekDefense-Automater | __init__.py | __init__.py | mit | Python | ||
2124026b3b6468789f599a2bc5382e69e3d27310 | Add __main__.py | jaronoff97/play_as_one,jaronoff97/play_as_one,jaronoff97/play_as_one | __main__.py | __main__.py | #! usr/bin/env python2
import PlayAsOne
if __name__ == '__main__':
PlayAsOne.PlayAsOne()
| mit | Python | |
b46e223340ecb4c4056eb89fa08aaff64fceaa09 | Add command | RaitoBezarius/mangaki,Mako-kun/mangaki,Elarnon/mangaki,RaitoBezarius/mangaki,Mako-kun/mangaki,Elarnon/mangaki,RaitoBezarius/mangaki,Mako-kun/mangaki,Elarnon/mangaki | mangaki/mangaki/management/commands/findneighbors.py | mangaki/mangaki/management/commands/findneighbors.py | from django.core.management.base import BaseCommand, CommandError
from django.contrib.auth.models import User
from mangaki.models import Neighborship, Rating
from collections import Counter
class Command(BaseCommand):
args = ''
help = ''
def handle(self, *args, **options):
values = {'like': 2, 'dislike': -2, 'neutral': 0.1, 'willsee': 0.5, 'wontsee': -0.5}
for user in User.objects.all():
print(user.id, user.username)
c = 0
neighbors = Counter()
for my in Rating.objects.filter(user=user):
for her in Rating.objects.filter(work=my.work):
c += 1
neighbors[her.user.id] += values[my.choice] * values[her.choice]
print(c, 'operations performed')
for user_id in neighbors:
Neighborship.objects.update_or_create(user=user, neighbor=User.objects.get(id=user_id), defaults={'score': neighbors[user_id]})
| agpl-3.0 | Python | |
7627d58460c3683e51f944e49dc9ab31c8beda06 | Create default_gateway_checker.py | msenin94/mk-post-deployment-checks,msenin94/salt-mk-verificator,legan4ik/mk-post-deployment-checks | mk-verificator/networking/default_gateway_checker.py | mk-verificator/networking/default_gateway_checker.py | #!/usr/bin/env python
import json
import salt.client as client
def main():
local = client.LocalClient()
netstat_info = local.cmd('*', 'cmd.run', ['ip r | sed -n 1p'])
# {node:"default via 10.xxx.xxx.xxx dev ethx", }
groups = {}
for node_name, node_gw in netstat_info.items():
group_name = node_name.split('-')[0]
if not groups.has_key(group_name):
groups[group_name] = [node_name]
else:
groups[group_name].append(node_name)
for group_name, nodes in groups.items():
gw = {}
for node in nodes:
if not gw.has_key(netstat_info[node]):
gw[netstat_info[node]] = [node]
else:
gw[netstat_info[node]].append(node)
if len(gw) != 1:
print '-' * 40
print 'Gpoup: ' + group_name
print json.dumps(gw, indent=4)
if __name__ == "__main__":
main()
| bsd-2-clause | Python | |
12c50dbac8179b92272136c512e034f6782027df | Introduce a GlobalStack class | fieldOfView/Cura,fieldOfView/Cura,Curahelper/Cura,hmflash/Cura,ynotstartups/Wanhao,hmflash/Cura,Curahelper/Cura,ynotstartups/Wanhao | cura/Settings/GlobalStack.py | cura/Settings/GlobalStack.py | # Copyright (c) 2017 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
from UM.MimeTypeDatabase import MimeType, MimeTypeDatabase
from UM.Settings.ContainerStack import ContainerStack
from UM.Settings.ContainerRegistry import ContainerRegistry
class CannotSetNextStackError(Exception):
pass
class GlobalStack(ContainerStack):
def __init__(self, container_id, *args, **kwargs):
super().__init__(container_id, *args, **kwargs)
global_stack_mime = MimeType(
name = "application/x-cura-globalstack",
comment = "Cura Global Stack",
suffixes = [ "global.cfg" ]
)
MimeTypeDatabase.addMimeType(global_stack_mime)
ContainerRegistry.addContainerTypeByName(GlobalStack, "global_stack", global_stack_mime.name)
| agpl-3.0 | Python | |
f5ef5c2a986d56495069c7ccad5e56fb097ea17b | Create t.py | zjs81/Chain_Coin_Master_Node_Admin_Tool,zjs81/Chain_Coin_Master_Node_Admin_Tool | t.py | t.py | from appJar import gui
import sys
keyfilename = ""
keyfileinuse = False
port = sys.argv[4]
ip = sys.argv[1]
username = sys.argv[2]
password = sys.argv[3]
#The stuff above takes args from login.py
balance = 0
#Gets balance
app = gui("HODLER ADMIN", "400x200")
app.setFont(10)
app.addLabelOptionBox("Options", ["File","Install MasterNode"])
print port, ip, username, password
app.go()
| mit | Python | |
5439a712f1f33117561ca0448d8a88ff53ec8979 | Add initial spin_fort test (#5658) | dtee/PokemonGo-Bot,goedzo/PokemonGo-Bot,halsafar/PokemonGo-Bot,halsafar/PokemonGo-Bot,heihachi/PokemonGo-Bot,dtee/PokemonGo-Bot,goedzo/PokemonGo-Bot,DBa2016/PokemonGo-Bot,heihachi/PokemonGo-Bot,DBa2016/PokemonGo-Bot,halsafar/PokemonGo-Bot,Gobberwart/PokemonGo-Bot,Gobberwart/PokemonGo-Bot,dtee/PokemonGo-Bot,DBa2016/PokemonGo-Bot,goedzo/PokemonGo-Bot,goedzo/PokemonGo-Bot,halsafar/PokemonGo-Bot,heihachi/PokemonGo-Bot,dtee/PokemonGo-Bot,Gobberwart/PokemonGo-Bot,heihachi/PokemonGo-Bot,DBa2016/PokemonGo-Bot,Gobberwart/PokemonGo-Bot | pokemongo_bot/test/spin_fort_test.py | pokemongo_bot/test/spin_fort_test.py | import os
import pickle
import unittest
from mock import MagicMock, patch
from pokemongo_bot.cell_workers.spin_fort import SpinFort
from pokemongo_bot.inventory import Items
config = {
"spin_wait_min": 0,
"spin_wait_max": 0,
"daily_spin_limit": 100,
}
response_dict = {'responses':
{'FORT_SEARCH': {
'experience_awarded': 50,
'items_awarded': [
{'item_id': 1, 'item_count': 1},
{'item_id': 1, 'item_count': 1},
{'item_id': 1, 'item_count': 1}
],
'result': 1,
'cooldown_complete_timestamp_ms': 1474592183629L,
'chain_hack_sequence_number': 1}
},
'status_code': 1,
'platform_returns': [
{'type': 6, 'response': 'CAE='}
],
'request_id': 4916374460149268503L
}
items_awarded = {u'Pokeball': 4}
egg_awarded = None
experience_awarded = 50
class SpinFortTestCase(unittest.TestCase):
def setUp(self):
self.patcherPokemonGoBot = patch('pokemongo_bot.PokemonGoBot')
self.bot = self.patcherPokemonGoBot.start()
forts_path = os.path.join(os.path.dirname(__file__),
'resources', 'example_forts.pickle')
with open(forts_path, 'rb') as forts:
ex_forts = pickle.load(forts)
self.patcherFortRange = patch('pokemongo_bot.cell_workers.spin_fort.SpinFort.get_forts_in_range')
self.fort_range = self.patcherFortRange.start()
self.fort_range.return_value = ex_forts
self.patcherInventoryItem = patch('pokemongo_bot.inventory.Items')
self.inventory_item = self.patcherInventoryItem.start()
def tearDown(self):
self.patcherPokemonGoBot.stop()
self.patcherFortRange.stop()
self.patcherInventoryItem.stop()
@patch('pokemongo_bot.cell_workers.spin_fort.SpinFort.get_items_awarded_from_fort_spinned')
def test_spin_fort(self, items_awarded):
spin_fort = SpinFort(self.bot, config)
self.bot.api = MagicMock()
self.bot.api.fort_search.return_value = response_dict
items_awarded.return_value = items_awarded
result = spin_fort.work()
self.assertEqual(result, 1)
| mit | Python | |
0eeff1ec1498f98d624dad90a60d24ab44cc31de | Fix cleanvcf.py when handling chromosome changes Arvados-DCO-1.1-Signed-off-by: Jiayong Li <jli@curii.com> refs #14992 | curoverse/l7g,curoverse/l7g,curoverse/l7g,curoverse/l7g,curoverse/l7g,curoverse/l7g,curoverse/l7g | cwl-version/preprocess/gvcf/filterclean/src/cleanvcf.py | cwl-version/preprocess/gvcf/filterclean/src/cleanvcf.py | #!/usr/bin/env python
from __future__ import print_function
import sys
def is_header(line):
"""Check if a line is header."""
return line.startswith('#')
# FIELD index
# CHROM 0, POS 1, REF 3
def main():
previous_CHROM = ""
previous_end_POS = 0
for line in sys.stdin:
if not is_header(line):
fields = line.split('\t')
CHROM = fields[0]
POS = int(fields[1])
REF = fields[3]
if CHROM == previous_CHROM:
if POS > previous_end_POS:
print(line, end='')
previous_end_POS = max(previous_end_POS, POS + len(REF) - 1)
else:
print(line, end='')
previous_end_POS = POS + len(REF) - 1
previous_CHROM = CHROM
else:
print(line, end='')
if __name__ == '__main__':
main()
| #!/usr/bin/env python
from __future__ import print_function
import sys
def is_header(line):
"""Check if a line is header."""
return line.startswith('#')
# FIELD index
# CHROM 0, POS 1, REF 3
def main():
previous_CHROM = ""
previous_end_POS = 0
for line in sys.stdin:
if not is_header(line):
fields = line.split('\t')
CHROM = fields[0]
POS = int(fields[1])
REF = fields[3]
if CHROM == previous_CHROM:
if POS > previous_end_POS:
print(line, end='')
previous_end_POS = max(previous_end_POS, POS + len(REF) - 1)
else:
print(line, end='')
previous_end_POS = 0
previous_CHROM = CHROM
else:
print(line, end='')
if __name__ == '__main__':
main()
| agpl-3.0 | Python |
d2c26cdfb9077aa5e3e8f9a5e2b89c8085bdd2d9 | Create RLU_back_propagation.py | rupertsmall/machine-learning,rupertsmall/machine-learning | Neural-Networks/RLU_back_propagation.py | Neural-Networks/RLU_back_propagation.py | # back propagation algorithm
from numpy import *
def back_propagation(y, A, MEGA_THETA, xi):
# assume y, A, xi are 1-D column vectors (row-less)
# assume MEGA_THETA is 2-D array
# define useful constants
L = size(xi)
a = A[-xi[-1]:][:, newaxis]
delta = a - y[:, newaxis]
DIM = shape(MEGA_THETA)
DELTA = zeros([DIM[0],DIM[1]]) # matrix of dJ/dTHETA
rows_A = sum(xi) + L -1
# define index start/end values
end_index = rows_A - xi[-1]
start_index = end_index - xi[-2] -1
D_row_start = DIM[0] - xi[-1]
D_row_end = DIM[0]
D_col_start = DIM[1] - xi[-2] -1
D_col_end = DIM[1]
# execute first step outside for loop (it doesn't involve MEGA_THETA)
a_prev = a # save for later
a = A[start_index:end_index][:,newaxis] # next layer inwards from end
DELTA[D_row_start:D_row_end, D_col_start:D_col_end] = kron(delta, a.T)
# iterate backwards through each later (back propagation)
for i in range(L-1,1,-1):
# calculate next delta
# use as if rectified linear units
g = ones(len(a_prev))*(a_prev > 0)
# local matrix for this layer
local_theta = MEGA_THETA[D_row_start:D_row_end, D_col_start:D_col_end]
# the derivative in the back-prop algorithm doesn't include the first elmnt
delta = dot(local_theta.T, g*delta)[1:,:]
# now update DELTA indices for next loop
D_row_end = D_row_start # yes, strange indeed !
D_row_start = D_row_end - xi[i-1]
D_col_end = D_col_start
D_col_start = D_col_end - xi[i-2] -1
# update indices to select from A
end_index = start_index
start_index = end_index - xi[i-2] -1
# calculate DELTA for next layer
a_prev = a[1:,:]
a = A[start_index:end_index]
DELTA[D_row_start:D_row_end, D_col_start:D_col_end] = kron(delta, a.T)
return DELTA
| mit | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.