commit stringlengths 40 40 | old_file stringlengths 4 236 | new_file stringlengths 4 236 | old_contents stringlengths 1 3.26k | new_contents stringlengths 16 4.43k | subject stringlengths 16 624 | message stringlengths 17 3.29k | lang stringclasses 5
values | license stringclasses 13
values | repos stringlengths 5 91.5k |
|---|---|---|---|---|---|---|---|---|---|
aefa8a3d6d4c809c7e470b22a0c9fb2c0875ba8b | project/project/urls.py | project/project/urls.py | from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.conf.urls.static import static
from django.contrib.auth import views
urlpatterns = [
url(
r'^silk/',
include('silk.urls', namespace='silk', app_name='silk')
),
url(
r'^example_app/',
include('example_app.urls', namespace='example_app', app_name='example_app')
),
url(r'^admin/', include(admin.site.urls)),
]
urlpatterns += [
url(
r'^login/$',
views.login,
{'template_name': 'example_app/login.html'}, name='login'),
]
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) + \
static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.conf.urls.static import static
from django.contrib.auth import views
urlpatterns = [
url(
r'^silk/',
include('silk.urls', namespace='silk')
),
url(
r'^example_app/',
include('example_app.urls', namespace='example_app')
),
url(
r'^admin/',
admin.site.urls
),
]
urlpatterns += [
url(
r'^login/$',
views.login,
{'template_name': 'example_app/login.html'}, name='login'),
]
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) + \
static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| Remove unneeded app_name from test project to be django 2 compatible | Remove unneeded app_name from test project to be django 2 compatible
| Python | mit | crunchr/silk,mtford90/silk,jazzband/silk,crunchr/silk,mtford90/silk,jazzband/silk,crunchr/silk,django-silk/silk,django-silk/silk,jazzband/silk,django-silk/silk,crunchr/silk,mtford90/silk,jazzband/silk,mtford90/silk,django-silk/silk |
aae29a385129e6a1573fac2c631eff8db8ea3079 | stackdio/stackdio/__init__.py | stackdio/stackdio/__init__.py | # -*- coding: utf-8 -*-
# Copyright 2014, Digital Reasoning
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import absolute_import
import sys
from .version import __version__, __version_info__ # NOQA
# This will make sure the app is always imported when
# Django starts so that shared_task will use this app.
try:
from .celery import app as celery_app
except ImportError:
sys.stderr.write('Not importing celery... Ignore if this is running setup.py.\n')
__copyright__ = "Copyright 2014, Digital Reasoning"
__license__ = "Apache License Version 2.0, January 2004"
__maintainer__ = "https://github.com/stackdio/stackdio"
| # -*- coding: utf-8 -*-
# Copyright 2014, Digital Reasoning
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import absolute_import
import sys
from .version import __version__, __version_info__ # NOQA
# This will make sure the app is always imported when
# Django starts so that shared_task will use this app.
try:
from .celery import app as celery_app
except ImportError:
sys.stderr.write("Not importing celery... "
"Ignore if this if you're currently running setup.py.\n")
__copyright__ = "Copyright 2014, Digital Reasoning"
__license__ = "Apache License Version 2.0, January 2004"
__maintainer__ = "https://github.com/stackdio/stackdio"
| Print a more useful warning message | Print a more useful warning message
| Python | apache-2.0 | stackdio/stackdio,clarkperkins/stackdio,stackdio/stackdio,clarkperkins/stackdio,clarkperkins/stackdio,clarkperkins/stackdio,stackdio/stackdio,stackdio/stackdio |
067bbbc6c9edbf55606fe6f236c70affd86a1fc0 | tests/convert/test_unit.py | tests/convert/test_unit.py | from unittest.mock import patch
from smif.convert.unit import parse_unit
def test_parse_unit_valid():
"""Parse a valid unit
"""
meter = parse_unit('m')
assert str(meter) == 'meter'
@patch('smif.convert.unit.LOGGER.warning')
def test_parse_unit_invalid(warning_logger):
"""Warn if unit not recognised
"""
unit = 'unrecognisable'
parse_unit(unit)
msg = "Unrecognised unit: %s"
warning_logger.assert_called_with(msg, unit)
| import numpy as np
from unittest.mock import patch
from smif.convert.unit import parse_unit
from smif.convert import UnitConvertor
def test_parse_unit_valid():
"""Parse a valid unit
"""
meter = parse_unit('m')
assert str(meter) == 'meter'
@patch('smif.convert.unit.LOGGER.warning')
def test_parse_unit_invalid(warning_logger):
"""Warn if unit not recognised
"""
unit = 'unrecognisable'
parse_unit(unit)
msg = "Unrecognised unit: %s"
warning_logger.assert_called_with(msg, unit)
def test_convert_unit():
data = np.array([[1, 2], [3, 4]], dtype=float)
convertor = UnitConvertor()
actual = convertor.convert(data, 'liter', 'milliliter')
expected = np.array([[1000, 2000], [3000, 4000]], dtype=float)
np.allclose(actual, expected)
| Add test for normal unit conversion | Add test for normal unit conversion
| Python | mit | tomalrussell/smif,tomalrussell/smif,nismod/smif,nismod/smif,tomalrussell/smif,nismod/smif,nismod/smif,willu47/smif,willu47/smif,willu47/smif,willu47/smif,tomalrussell/smif |
c56a6c2f861d50d2bdc38ee33d30e4ef614a2de0 | tests/sim/test_entities.py | tests/sim/test_entities.py | import unittest
from hunting.sim.entities import *
class TestFighter(unittest.TestCase):
def test_minimum_speed_is_one(self):
self.assertEqual(Fighter(1, 1, 1, 1, base_speed=-5).speed, 1)
self.assertEqual(Fighter(1, 1, 1, 1, base_speed=0).speed, 1)
| import unittest
from hunting.sim.entities import *
class TestPropertyEffect(unittest.TestCase):
def setUp(self):
self.fighter = Fighter(100, 100, 100, 0, base_speed=100)
def test_add_remove_power(self):
power_buff = PropertyEffect(PROPERTY_POWER, value=100)
self.fighter.add_effect(power_buff)
self.assertEqual(self.fighter.power, 200)
self.fighter.remove_effect(power_buff)
self.assertEqual(self.fighter.power, 100)
def test_add_remove_speed(self):
speed_buff = PropertyEffect(PROPERTY_SPEED, value=100)
self.fighter.add_effect(speed_buff)
self.assertEqual(self.fighter.speed, 200)
self.fighter.remove_effect(speed_buff)
self.assertEqual(self.fighter.speed, 100)
def test_add_remove_defense(self):
defense_buff = PropertyEffect(PROPERTY_DEFENSE, value=100)
self.fighter.add_effect(defense_buff)
self.assertEqual(self.fighter.defense, 200)
self.fighter.remove_effect(defense_buff)
self.assertEqual(self.fighter.defense, 100)
class TestFighter(unittest.TestCase):
def test_minimum_speed_is_one(self):
self.assertEqual(Fighter(1, 1, 1, 1, base_speed=-5).speed, 1)
self.assertEqual(Fighter(1, 1, 1, 1, base_speed=0).speed, 1)
| Add failing tests for buffs | Add failing tests for buffs
| Python | mit | MoyTW/RL_Arena_Experiment |
51660291b043b88eab599c59d8c1ef7ae9dc74d7 | src/core/models.py | src/core/models.py | from django.db import models
from django.contrib.auth.models import User
from util.session import get_or_generate_session_name
class Session(models.Model):
name = models.CharField(max_length=255)
user = models.ForeignKey(User, blank=True, null=True)
started_at = models.DateTimeField('started at', auto_now_add=True)
# On Python 3: def __str__(self):
def __unicode__(self):
return self.name
def save(self, *args, **kwargs):
existing_session_names = Session.objects.filter(name__startswith=self.UNTITLED_PREFIX, user=self.user).only('name')
self.name = get_or_generate_session_name(self.name, existing_session_names)
super(Session, self).save(*args, **kwargs) # Call the "real" save() method.
class Spec(models.Model):
code = models.TextField()
session = models.ForeignKey(Session)
author = models.ForeignKey(User, verbose_name='The author of this last update.')
tests_passed = models.NullBooleanField(default=False)
saved_at = models.DateTimeField(auto_now_add=True)
class Meta:
get_latest_by = 'saved_at'
| from django.db import models
from django.contrib.auth.models import User
from util.session import get_or_generate_session_name
from util.session import DEFAULT_SESSION_NAME_PREFIX
class Session(models.Model):
name = models.CharField(max_length=255)
user = models.ForeignKey(User, blank=True, null=True)
started_at = models.DateTimeField('started at', auto_now_add=True)
# On Python 3: def __str__(self):
def __unicode__(self):
return self.name
def save(self, *args, **kwargs):
existing_session_names = Session.objects.filter(name__startswith=DEFAULT_SESSION_NAME_PREFIX, user=self.user).only('name')
self.name = get_or_generate_session_name(self.name, existing_session_names)
super(Session, self).save(*args, **kwargs) # Call the "real" save() method.
class Spec(models.Model):
code = models.TextField()
session = models.ForeignKey(Session)
author = models.ForeignKey(User, verbose_name='The author of this last update.')
tests_passed = models.NullBooleanField(default=False)
saved_at = models.DateTimeField(auto_now_add=True)
class Meta:
get_latest_by = 'saved_at'
| Use the existing default name. | Use the existing default name. | Python | mit | uxebu/tddbin-backend,uxebu/tddbin-backend |
9c6f3e1994f686e57092a7cd947c49b4f857743e | apps/predict/urls.py | apps/predict/urls.py | """
Predict app's urls
"""
#
# pylint: disable=bad-whitespace
#
from django.conf.urls import patterns, include, url
from .views import *
def url_tree(regex, *urls):
"""Quick access to stitching url patterns"""
return url(regex, include(patterns('', *urls)))
urlpatterns = patterns('',
url(r'^$', Datasets.as_view(), name="view_my_datasets"),
url_tree(r'^upload/',
url(r'^$', UploadChoices.as_view(), name="upload"),
url(r'^manual/$', UploadManual.as_view(), name="upload_manual"),
url_tree(r'^(?P<type>[\w-]+)/',
url(r'^$', UploadView.as_view(), name="upload"),
url(r'^(?P<fastq>[\w-]+)/$', UploadView.as_view(), name="upload"),
),
),
url_tree(r'^(?P<slug>\w{32})/',
url(r'^$', DatasetView.as_view(), name="view_single_dataset"),
url(r'^callback/$', Callback.as_view(), name="callback"),
url(r'^note/$', AddNote.as_view(), name="add_note"),
),
)
| """
Predict app's urls
"""
#
# pylint: disable=bad-whitespace
#
from django.conf.urls import patterns, include, url
from .views import *
def url_tree(regex, *urls):
"""Quick access to stitching url patterns"""
return url(regex, include(patterns('', *urls)))
urlpatterns = patterns('',
url(r'^$', Datasets.as_view(), name="view_my_datasets"),
url_tree(r'^upload/',
url(r'^$', UploadChoices.as_view(), name="upload"),
url(r'^(?P<type>[\w-]+)/', UploadView.as_view(), name="upload"),
),
url_tree(r'^(?P<slug>\w{32})/',
url(r'^$', DatasetView.as_view(), name="view_single_dataset"),
url(r'^note/$', AddNote.as_view(), name="add_note"),
),
)
| Remove callback url and bring uploads together | Remove callback url and bring uploads together
| Python | agpl-3.0 | IQSS/gentb-site,IQSS/gentb-site,IQSS/gentb-site,IQSS/gentb-site,IQSS/gentb-site,IQSS/gentb-site,IQSS/gentb-site,IQSS/gentb-site |
a077a5b7731e7d609b5c3adc8f8176ad79053f17 | rmake/lib/twisted_extras/tools.py | rmake/lib/twisted_extras/tools.py | #
# Copyright (c) SAS Institute Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from twisted.internet import defer
class Serializer(object):
def __init__(self):
self._lock = defer.DeferredLock()
self._waiting = {}
def call(self, func, args=(), kwargs=None, collapsible=False):
d = self._lock.acquire()
self._waiting[d] = collapsible
if not kwargs:
kwargs = {}
@d.addCallback
def _locked(_):
if collapsible and len(self._waiting) > 1:
# Superseded
return
return func(*args, **kwargs)
@d.addBoth
def _unlock(result):
self._lock.release()
del self._waiting[d]
return result
return d
| #
# Copyright (c) SAS Institute Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from twisted.internet import defer
class Serializer(object):
def __init__(self):
self._lock = defer.DeferredLock()
self._waiting = {}
def call(self, func, args=(), kwargs=None, collapsible=False):
d = self._lock.acquire()
self._waiting[d] = collapsible
if not kwargs:
kwargs = {}
@d.addCallback
def _locked(_):
if collapsible and len(self._waiting) > 1:
# Superseded
return
return func(*args, **kwargs)
@d.addBoth
def _unlock(result):
del self._waiting[d]
self._lock.release()
return result
return d
| Fix Serializer locking bug that caused it to skip calls it should have made | Fix Serializer locking bug that caused it to skip calls it should have made
| Python | apache-2.0 | sassoftware/rmake3,sassoftware/rmake3,sassoftware/rmake3 |
42e16bf376a64995a8b70a91829a82d7b0f3e1a1 | gameanalysis/__main__.py | gameanalysis/__main__.py | """Command line module"""
import argparse
import pkgutil
import sys
import gameanalysis
from gameanalysis import script
def create_parser():
"""Create the default parser"""
modules = [imp.find_module(name).load_module(name) for imp, name, _
in pkgutil.iter_modules(script.__path__)]
parser = argparse.ArgumentParser(
description="""Command line access to the game analysis toolkit.""")
parser.add_argument('-V', '--version', action='version',
version='%(prog)s {}'.format(gameanalysis.__version__))
subparsers = parser.add_subparsers(
title='commands', dest='command', metavar='<command>', help="""The
commands to execute. Available commands are:""")
for module in modules:
subparser = module.add_parser(subparsers)
subparser.main = module.main
return parser, subparsers.choices
def amain(*argv):
"""Entry point for game analysis"""
parser, commands = create_parser()
args = parser.parse_args(argv)
if args.command is None:
parser.print_help()
sys.exit(1)
else:
commands[args.command].main(args)
def main():
"""Entry point for game analysis"""
amain(*sys.argv[1:])
if __name__ == '__main__':
main()
| """Command line module"""
import argparse
import logging
import pkgutil
import sys
import gameanalysis
from gameanalysis import script
def create_parser():
"""Create the default parser"""
modules = [imp.find_module(name).load_module(name) for imp, name, _
in pkgutil.iter_modules(script.__path__)]
parser = argparse.ArgumentParser(
description="""Command line access to the game analysis toolkit.""")
parser.add_argument('-V', '--version', action='version',
version='%(prog)s {}'.format(gameanalysis.__version__))
parser.add_argument(
'-v', '--verbose', action='count', default=0, help="""Set the verbosity
level depending on the number of times specified, up to a maximum of
three.""")
subparsers = parser.add_subparsers(
title='commands', dest='command', metavar='<command>', help="""The
commands to execute. Available commands are:""")
for module in modules:
subparser = module.add_parser(subparsers)
subparser.main = module.main
return parser, subparsers.choices
def amain(*argv):
"""Entry point for game analysis"""
parser, commands = create_parser()
args = parser.parse_args(argv)
if args.command is None:
parser.print_help()
sys.exit(1)
else:
logging.basicConfig(level=40 - 10 * min(args.verbose, 3))
commands[args.command].main(args)
def main():
"""Entry point for game analysis"""
amain(*sys.argv[1:])
if __name__ == '__main__':
main()
| Add logging verbosity to game analysis | Add logging verbosity to game analysis
| Python | apache-2.0 | egtaonline/GameAnalysis |
f5d0f8cd145c759cff6d5f6cfeb46459efaa63ca | sale_line_description/__openerp__.py | sale_line_description/__openerp__.py | # -*- coding: utf-8 -*-
#
#
# Copyright (C) 2013-15 Agile Business Group sagl
# (<http://www.agilebg.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
{
'name': "Sale line description",
'version': '1.0',
'category': 'Sales Management',
'author': "Agile Business Group, Odoo Community Association (OCA)",
'website': 'http://www.agilebg.com',
'license': 'AGPL-3',
"depends": [
'sale',
],
"data": [
'security/sale_security.xml',
'res_config_view.xml',
],
"active": False,
"installable": True
}
| # -*- coding: utf-8 -*-
#
#
# Copyright (C) 2013-15 Agile Business Group sagl
# (<http://www.agilebg.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
{
'name': "Sale line description",
'version': '1.0',
'category': 'Sales Management',
'author': "Agile Business Group, Odoo Community Association (OCA)",
'website': 'http://www.agilebg.com',
'license': 'AGPL-3',
"depends": [
'sale',
],
"data": [
'security/sale_security.xml',
'res_config_view.xml',
],
"installable": True
}
| Remove active key since is deprecated | Remove active key since is deprecated
| Python | agpl-3.0 | anas-taji/sale-workflow,brain-tec/sale-workflow,luistorresm/sale-workflow,factorlibre/sale-workflow,BT-fgarbely/sale-workflow,adhoc-dev/sale-workflow,jjscarafia/sale-workflow,richard-willowit/sale-workflow,VitalPet/sale-workflow,akretion/sale-workflow,ddico/sale-workflow,fevxie/sale-workflow,xpansa/sale-workflow,Endika/sale-workflow,alexsandrohaag/sale-workflow,acsone/sale-workflow,kittiu/sale-workflow,BT-cserra/sale-workflow,akretion/sale-workflow,numerigraphe/sale-workflow,diagramsoftware/sale-workflow,BT-ojossen/sale-workflow,thomaspaulb/sale-workflow,damdam-s/sale-workflow,jabibi/sale-workflow,numerigraphe/sale-workflow,clubit/sale-workflow,BT-jmichaud/sale-workflow,open-synergy/sale-workflow,Eficent/sale-workflow,anybox/sale-workflow,Antiun/sale-workflow,guewen/sale-workflow,acsone/sale-workflow,kittiu/sale-workflow,brain-tec/sale-workflow,Rona111/sale-workflow |
c6926dda0a9e6e1515721e54788c29d0ef8b58a4 | tests/test_sqlcompletion.py | tests/test_sqlcompletion.py | from pgcli.packages.sqlcompletion import suggest_type
def test_select_suggests_cols_with_table_scope():
suggestion = suggest_type('SELECT FROM tabl', 'SELECT ')
assert suggestion == ('columns-and-functions', ['tabl'])
def test_lparen_suggest_cols():
suggestion = suggest_type('SELECT MAX( FROM tbl', 'SELECT MAX(')
assert suggestion == ('columns', ['tbl'])
def test_select_suggest_cols_and_funcs():
suggestion = suggest_type('SELECT ', 'SELECT ')
assert suggestion == ('columns-and-functions', [])
def test_from_suggest_tables():
suggestion = suggest_type('SELECT * FROM ', 'SELECT * FROM ')
assert suggestion == ('tables', [])
def test_distinct_suggest_cols():
suggestion = suggest_type('SELECT DISTINCT ', 'SELECT DISTINCT ')
assert suggestion == ('columns', [])
def test_multiple_cols_suggest_cols():
suggestion = suggest_type('SELECT a, b, FROM tbl', 'SELECT a, b,')
assert suggestion == ('columns-and-functions', ['tbl'])
def test_multiple_tables_suggest_tables():
suggestion = suggest_type('SELECT a, b FROM tbl1, ',
'SELECT a, b FROM tbl1, ')
assert suggestion == ('tables', [])
| from pgcli.packages.sqlcompletion import suggest_type
def test_select_suggests_cols_with_table_scope():
suggestion = suggest_type('SELECT FROM tabl', 'SELECT ')
assert suggestion == ('columns-and-functions', ['tabl'])
def test_where_suggests_columns_functions():
suggestion = suggest_type('SELECT * FROM tabl WHERE ',
'SELECT * FROM tabl WHERE ')
assert suggestion == ('columns-and-functions', ['tabl'])
def test_lparen_suggests_cols():
suggestion = suggest_type('SELECT MAX( FROM tbl', 'SELECT MAX(')
assert suggestion == ('columns', ['tbl'])
def test_select_suggests_cols_and_funcs():
suggestion = suggest_type('SELECT ', 'SELECT ')
assert suggestion == ('columns-and-functions', [])
def test_from_suggests_tables():
suggestion = suggest_type('SELECT * FROM ', 'SELECT * FROM ')
assert suggestion == ('tables', [])
def test_distinct_suggests_cols():
suggestion = suggest_type('SELECT DISTINCT ', 'SELECT DISTINCT ')
assert suggestion == ('columns', [])
def test_col_comma_suggests_cols():
suggestion = suggest_type('SELECT a, b, FROM tbl', 'SELECT a, b,')
assert suggestion == ('columns-and-functions', ['tbl'])
def test_table_comma_suggests_tables():
suggestion = suggest_type('SELECT a, b FROM tbl1, ',
'SELECT a, b FROM tbl1, ')
assert suggestion == ('tables', [])
def test_into_suggests_tables():
suggestion = suggest_type('INSERT INTO ',
'INSERT INTO ')
assert suggestion == ('tables', [])
| Add a test for where clause and rename all tests functions. | Add a test for where clause and rename all tests functions.
| Python | bsd-3-clause | thedrow/pgcli,d33tah/pgcli,n-someya/pgcli,bitmonk/pgcli,joewalnes/pgcli,yx91490/pgcli,TamasNo1/pgcli,MattOates/pgcli,TamasNo1/pgcli,j-bennet/pgcli,lk1ngaa7/pgcli,zhiyuanshi/pgcli,koljonen/pgcli,dbcli/vcli,dbcli/pgcli,lk1ngaa7/pgcli,dbcli/pgcli,j-bennet/pgcli,suzukaze/pgcli,janusnic/pgcli,darikg/pgcli,johshoff/pgcli,nosun/pgcli,w4ngyi/pgcli,darikg/pgcli,stuartquin/pgcli,dbcli/vcli,bitemyapp/pgcli,d33tah/pgcli,suzukaze/pgcli,bitemyapp/pgcli,koljonen/pgcli,thedrow/pgcli,joewalnes/pgcli,nosun/pgcli,yx91490/pgcli,johshoff/pgcli,janusnic/pgcli,n-someya/pgcli,bitmonk/pgcli,zhiyuanshi/pgcli,w4ngyi/pgcli,MattOates/pgcli |
52982c735f729ddf0a9c020d495906c4a4899462 | txircd/modules/rfc/umode_i.py | txircd/modules/rfc/umode_i.py | from twisted.plugin import IPlugin
from txircd.module_interface import IMode, IModuleData, Mode, ModuleData
from txircd.utils import ModeType
from zope.interface import implements
class InvisibleMode(ModuleData, Mode):
implements(IPlugin, IModuleData, IMode)
name = "InvisibleMode"
core = True
affectedActions = [ "showchanneluser" ]
def actions(self):
return [ ("modeactioncheck-user-i-showchanneluser", 1, self.isInvisible) ]
def userModes(self):
return [ ("i", ModeType.NoParam, self) ]
def isInvisible(self, user, channel, fromUser, userSeeing):
if "i" in user.modes:
return True
return None
def apply(self, actionName, user, param, channel, fromUser, sameUser):
if user != sameUser:
return None
if not channel or fromUser not in channel.users:
return False
return None
invisibleMode = InvisibleMode() | from twisted.plugin import IPlugin
from txircd.module_interface import IMode, IModuleData, Mode, ModuleData
from txircd.utils import ModeType
from zope.interface import implements
class InvisibleMode(ModuleData, Mode):
implements(IPlugin, IModuleData, IMode)
name = "InvisibleMode"
core = True
affectedActions = [ "showchanneluser", "showuser" ]
def actions(self):
return [ ("modeactioncheck-user-i-showchanneluser", 1, self.isInvisibleChan),
("modeactioncheck-user-i-showuser", 1, self.isInvisibleUser) ]
def userModes(self):
return [ ("i", ModeType.NoParam, self) ]
def isInvisibleChan(self, user, channel, fromUser, userSeeing):
if "i" in user.modes:
return True
return None
def isInvisibleUser(self, user, fromUser, userSeeing):
if "i" in user.modes:
return True
return None
def apply(self, actionName, user, param, *params):
if actionName == "showchanneluser":
return self.applyChannels(user, *params)
return self.applyUsers(user, *params)
def applyChannels(self, user, channel, fromUser, sameUser):
if user != sameUser:
return None
if not channel or fromUser not in channel.users:
return False
return None
def applyUsers(self, user, fromUser, sameUser):
if user != sameUser:
return None
for channel in fromUser.channels:
if user in channel.users:
return None
return False
invisibleMode = InvisibleMode() | Make the invisible check action not necessarily require an accompanying channel | Make the invisible check action not necessarily require an accompanying channel
| Python | bsd-3-clause | Heufneutje/txircd,ElementalAlchemist/txircd |
c447ca3d85d9862be38034be85b2328e3d6b02a3 | vcproj/tests/test_solution.py | vcproj/tests/test_solution.py | import vcproj.solution
import tempfile, filecmp
import pytest
@pytest.fixture(scope="session")
def test_sol():
return vcproj.solution.parse('vcproj/tests/test_solution/vc15sol/vc15sol.sln')
def test_all_projects(test_sol):
projects = test_sol.project_names()
len(list(projects)) == 59
def test_project_names(test_sol):
projects = test_sol.project_names()
assert 'Helper' in projects
assert 'MDraw' in projects
def test_project_files(test_sol):
proj_files = list(test_sol.project_files())
assert 'PrivateLib\\PrivateLib.vcxproj' in proj_files
assert 'Helper\\Helper.vcxproj' in proj_files
assert 'Resource\\Resource.vcxproj' in proj_files
def test_dependencies(test_sol):
deps = list(test_sol.dependencies('DXHHTest'))
assert deps == ['Public', 'MDraw']
def test_set_dependencies():
s = vcproj.solution.parse('vcproj/tests/test_solution/test.sln')
s.set_dependencies('lib1', ['lib2'])
assert list(s.dependencies('lib1')) == ['lib2']
def test_write():
s = vcproj.solution.parse('vcproj/tests/test_solution/test.sln')
temp = tempfile.NamedTemporaryFile()
temp.close()
s.write(temp.name)
assert filecmp.cmp('vcproj/tests/test_solution/test.sln', temp.name)
| import vcproj.solution
import tempfile, filecmp
import pytest
@pytest.fixture(scope="session")
def test_sol():
return vcproj.solution.parse('vcproj/tests/test_solution/test.sln')
def test_project_files(test_sol):
assert list(test_sol.project_files()) == ['test\\test.vcxproj', 'lib1\\lib1.vcxproj', 'lib2\\lib2.vcxproj']
def test_dependencies(test_sol):
assert list(test_sol.dependencies('test')) == ['lib1', 'lib2']
def test_project_names(test_sol):
assert list(test_sol.project_names()) == ['test', 'lib1', 'lib2']
def test_set_dependencies(test_sol):
test_sol.set_dependencies('lib1', ['lib2'])
assert list(test_sol.dependencies('lib1')) == ['lib2']
def test_write():
s = vcproj.solution.parse('vcproj/tests/test_solution/test.sln')
temp = tempfile.NamedTemporaryFile()
temp.close()
s.write(temp.name)
assert filecmp.cmp('vcproj/tests/test_solution/test.sln', temp.name)
| Add back in test of 2010 solution | Add back in test of 2010 solution
| Python | unlicense | jhandley/pyvcproj,jhandley/pyvcproj,jhandley/pyvcproj |
fb91bf1e7c1677124f4aa1ce9c534fb437145980 | pygametemplate/helper.py | pygametemplate/helper.py | """Module containing helper functions for using pygame."""
def load_class_assets(calling_object, assets_dict):
"""Load class assets. Only call if class_assets_loaded is False."""
calling_class = type(calling_object)
for attribute_name in assets_dict:
setattr(calling_class, attribute_name, assets_dict[attribute_name])
setattr(calling_class, "class_assets_loaded", True)
def wrap_text(text, font, max_width):
"""
Returns an array of lines which can be blitted beneath each other
in the given font in a box of the given maximum width.
"""
def wrap_paragraph(paragraph):
"""Wraps text that doesn't contain newlines."""
def too_long(string):
return font.size(string)[0] > max_width
def raise_word_too_long_error(word):
raise ValueError("\"%s\" is too long to be wrapped." % word)
lines = []
words = paragraph.split()
line = words.pop(0)
if too_long(line):
raise_word_too_long_error(line)
for word in words:
if too_long(word):
raise_word_too_long_error(word)
if too_long(" ".join((line, word))):
lines.append(line)
line = word
else:
line = " ".join((line, word))
lines.append(line)
return lines
paragraphs = text.split("\n")
return sum(map(wrap_paragraph, paragraphs), [])
| """Module containing helper functions for using pygame."""
def load_class_assets(calling_object, assets_dict):
"""Load class assets. Only call if class_assets_loaded is False."""
calling_class = type(calling_object)
for attribute_name in assets_dict:
setattr(calling_class, attribute_name, assets_dict[attribute_name])
setattr(calling_class, "class_assets_loaded", True)
def wrap_text(text, font, max_width):
"""
Returns an array of lines which can be blitted beneath each other
in the given font in a box of the given maximum width.
"""
def wrap_paragraph(paragraph):
"""Wraps text that doesn't contain newlines."""
def too_long(string):
return font.size(string)[0] > max_width
def raise_word_too_long_error(word):
raise ValueError(f"'{word}' is too long to be wrapped.")
lines = []
words = paragraph.split()
line = words.pop(0)
if too_long(line):
raise_word_too_long_error(line)
for word in words:
if too_long(word):
raise_word_too_long_error(word)
if too_long(" ".join((line, word))):
lines.append(line)
line = word
else:
line = " ".join((line, word))
lines.append(line)
return lines
paragraphs = text.split("\n")
return sum(map(wrap_paragraph, paragraphs), [])
| Replace % with f-string :) | Replace % with f-string :)
| Python | mit | AndyDeany/pygame-template |
b57d0b0d3d65995270318d94b551d8bacda73d22 | baseline.py | baseline.py | #/usr/bin/python
""" Baseline example that needs to be beaten """
import numpy as np
import matplotlib.pyplot as plt
x, y, yerr = np.loadtxt("data/data.txt", unpack=True)
A = np.vstack((np.ones_like(x), x)).T
C = np.diag(yerr * yerr)
cov = np.linalg.inv(np.dot(A.T, np.linalg.solve(C, A)))
b_ls, m_ls = np.dot(cov, np.dot(A.T, np.linalg.solve(C, y)))
fig, ax = plt.subplots()
ax.errorbar(x, y, yerr=yerr, c="k", fmt="o")
x_range = np.array([min(x), max(x)])
ax.plot(x_range, m_ls * x_range + b_ls, c="#666666", lw=2, zorder=-100)
ax.set_xlabel("x")
ax.set_ylabel("y")
fig.savefig("assets/result.png")
print m_ls, b_ls
| #/usr/bin/python
""" Baseline example that needs to be beaten """
import os
import numpy as np
import matplotlib.pyplot as plt
x, y, yerr = np.loadtxt("data/data.txt", unpack=True)
A = np.vstack((np.ones_like(x), x)).T
C = np.diag(yerr * yerr)
cov = np.linalg.inv(np.dot(A.T, np.linalg.solve(C, A)))
b_ls, m_ls = np.dot(cov, np.dot(A.T, np.linalg.solve(C, y)))
fig, ax = plt.subplots()
ax.errorbar(x, y, yerr=yerr, c="k", fmt="o")
x_range = np.array([min(x), max(x)])
ax.plot(x_range, m_ls * x_range + b_ls, c="#666666", lw=2, zorder=-100)
ax.set_xlabel("x")
ax.set_ylabel("y")
fig.savefig("assets/result.png")
print("Results of m, b: ({0:.4f} {1:.4f})".format(m_ls, b_ls))
# Let's store result parameters in environment variables, and we will deal
# with more complex values (e.g., uncertainties, etc) later
os.environ["RESULT_M"] = "{0:.5f}".format(m_ls)
os.environ["RESULT_B"] = "{0:.5f}".format(b_ls)
| Add results to environment parameters RESULT_M, RESULT_B | Add results to environment parameters RESULT_M, RESULT_B
| Python | mit | arfon/dottravis,arfon/dottravis |
b960962472f1c40fbaa1338d2cba316810ba119b | tt_dailyemailblast/admin.py | tt_dailyemailblast/admin.py | from django.contrib import admin
from django.db import models as django_models
from tinymce.widgets import TinyMCE
from .models import (Recipient, RecipientList, DailyEmailBlast,
DailyEmailBlastType)
def send_blasts(model_admin, request, qs):
for blast in qs:
print blast.send()
class RecipientInline(admin.TabularInline):
model = RecipientList.recipients.through
verbose_name = 'recipient'
verbose_name_plural = 'recipients'
class RecipientListAdmin(admin.ModelAdmin):
model = RecipientList
inlines = [RecipientInline]
class RecipientListInline(admin.TabularInline):
model = DailyEmailBlast.recipient_lists.through
verbose_name = 'recipient list'
verbose_name_plural = 'recipient lists'
class DailyEmailBlastAdmin(admin.ModelAdmin):
model = DailyEmailBlast
inlines = [RecipientListInline]
formfield_overrides = {
django_models.TextField: {'widget': TinyMCE()},
}
actions = [send_blasts]
admin.site.register(DailyEmailBlastType)
admin.site.register(Recipient)
admin.site.register(RecipientList, RecipientListAdmin)
admin.site.register(DailyEmailBlast, DailyEmailBlastAdmin)
| from django.contrib import admin
from django.db import models as django_models
from tinymce.widgets import TinyMCE
from .models import (Recipient, RecipientList, DailyEmailBlast,
DailyEmailBlastType)
def send_blasts(model_admin, request, qs):
for blast in qs:
print blast.send()
class RecipientInline(admin.TabularInline):
model = RecipientList.recipients.through
verbose_name = 'recipient'
verbose_name_plural = 'recipients'
class RecipientListAdmin(admin.ModelAdmin):
model = RecipientList
inlines = [RecipientInline]
class RecipientListInline(admin.TabularInline):
model = DailyEmailBlast.recipient_lists.through
verbose_name = 'recipient list'
verbose_name_plural = 'recipient lists'
class DailyEmailBlastAdmin(admin.ModelAdmin):
model = DailyEmailBlast
inlines = [RecipientListInline]
list_display = ('blast_type', 'created_on', 'sent_on',
'send_completed_on',)
formfield_overrides = {
django_models.TextField: {'widget': TinyMCE()},
}
actions = [send_blasts]
admin.site.register(DailyEmailBlastType)
admin.site.register(Recipient)
admin.site.register(RecipientList, RecipientListAdmin)
admin.site.register(DailyEmailBlast, DailyEmailBlastAdmin)
| Include all dates in blast list display | Include all dates in blast list display
| Python | apache-2.0 | texastribune/tt_dailyemailblast,texastribune/tt_dailyemailblast |
ef404dad280ec2f7317e0176d3e91b20d1bbe7c0 | inbox/notify/__init__.py | inbox/notify/__init__.py | from redis import StrictRedis, BlockingConnectionPool
from inbox.config import config
import json
REDIS_HOSTNAME = config.get('NOTIFY_QUEUE_REDIS_HOSTNAME')
REDIS_DB = int(config.get('NOTIFY_QUEUE_REDIS_DB'))
MAX_CONNECTIONS = 40
redis_pool = BlockingConnectionPool(
max_connections=MAX_CONNECTIONS,
host=REDIS_HOSTNAME, port=6379, db=REDIS_DB)
def notify_transaction(transaction, db_session):
from inbox.models import Namespace
# We're only interested in "message created" events
if transaction.command != 'insert' or transaction.object_type != 'message':
return
namespace = db_session.query(Namespace).get(transaction.namespace_id)
redis_client = StrictRedis(connection_pool=redis_pool)
job = {
'class': 'ProcessMessageQueue',
'args': [
'nylas_notification',
namespace.public_id,
transaction.object_public_id
]
}
pipeline = redis_client.pipeline()
pipeline.sadd('resque:queues', 'nylas_default')
pipeline.lpush('resque:queue:nylas_default', json.dumps(job))
pipeline.execute()
pipeline.reset()
| import json
from redis import StrictRedis, BlockingConnectionPool
from inbox.config import config
from nylas.logging import get_logger
log = get_logger()
REDIS_HOSTNAME = config.get('NOTIFY_QUEUE_REDIS_HOSTNAME')
REDIS_PORT = int(config.get('NOTIFY_QUEUE_REDIS_PORT', 6379))
REDIS_DB = int(config.get('NOTIFY_QUEUE_REDIS_DB'))
MAX_CONNECTIONS = 40
redis_pool = BlockingConnectionPool(
max_connections=MAX_CONNECTIONS,
host=REDIS_HOSTNAME, port=REDIS_PORT, db=REDIS_DB)
def notify_transaction(transaction, db_session):
from inbox.models import Namespace
# We're only interested in "message created" events
if transaction.command != 'insert' or transaction.object_type != 'message':
return
log.info('Transaction prepared to enqueue',
transaction_id=transaction.record_id)
namespace = db_session.query(Namespace).get(transaction.namespace_id)
redis_client = StrictRedis(connection_pool=redis_pool)
job = {
'class': 'ProcessMessageQueue',
'args': [
'nylas_notification',
namespace.public_id,
transaction.object_public_id
]
}
try:
pipeline = redis_client.pipeline()
pipeline.sadd('resque:queues', 'nylas_default')
pipeline.lpush('resque:queue:nylas_default', json.dumps(job))
log.info('Transaction enqueued',
transaction_id=transaction.record_id,
namespace_id=transaction.namespace_id,
job_details=job)
pipeline.execute()
pipeline.reset()
except Exception as e:
log.error('Transaction not enqueued!',
transaction_id=transaction.record_id,
namespace_id=transaction.namespace_id,
job_details=job,
error=e)
raise e
| Add logger an try/except logic | Add logger an try/except logic
| Python | agpl-3.0 | jobscore/sync-engine,jobscore/sync-engine,jobscore/sync-engine,jobscore/sync-engine |
45261d57bdb1ee23c84ea6c5d83550b7e84c26f1 | highlander/highlander.py | highlander/highlander.py | from functools import wraps
from logging import getLogger
from os import getcwd, unlink
from os.path import join, realpath, isfile
from psutil import Process
logger = getLogger(__name__)
def one(f):
@wraps(f)
def decorator():
pid_file = realpath(join(getcwd(), '.pid'))
if _is_running(pid_file):
exit(0)
_set_running(pid_file)
try:
f()
finally:
unlink(pid_file)
return decorator
def _is_running():
pass
def _read_pid_file(filename):
if not isfile(str(filename)):
return None
with open(filename, 'r') as f:
pid, create_time = f.read().split(',')
return Process(int(pid))
def _set_running(filename):
p = Process()
with open(filename, 'w') as f:
f.write('{},{}'.format(p.pid, p.create_time()))
| from functools import wraps
from logging import getLogger
from os import getcwd, unlink
from os.path import join, realpath, isfile
from psutil import Process
logger = getLogger(__name__)
def one(f):
@wraps(f)
def decorator():
pid_file = realpath(join(getcwd(), '.pid'))
if _is_running(pid_file):
exit(0)
_set_running(pid_file)
try:
f()
finally:
unlink(pid_file)
return decorator
def _is_running():
pass
def _read_pid_file(filename):
if not isfile(str(filename)):
return None
with open(filename, 'r') as f:
pid, create_time = f.read().split(',')
return Process(int(pid))
def _set_running(filename):
if isfile(str(filename)):
raise Exception('PID file already exists.')
p = Process()
with open(filename, 'w') as f:
f.write('{},{}'.format(p.pid, p.create_time()))
| Check to make sure the file doesn't exist if we get to the set running state. | Check to make sure the file doesn't exist if we get to the set running state.
| Python | mit | chriscannon/highlander |
3d1612e5f9e20cf74a962dd4ca1b538776d5ec7e | StationPopWithoutTrain.py | StationPopWithoutTrain.py | def before_train_station_pop(station, escalator):
# calculate the number of people waiting to depart on the train by the time the train arive.
station.travelers_departing = station.travelers_departing + (escalator.rate * escalators.entering * station.train_wait)
# number of people who have arived and want to leave.
station.travelers_arriving = station.travelers_arriving - (escalator.rate * station.train_time
# Get the total station population.
population = station.pop
return population
| """This module calculates the number of people in the station by the time the next train arives"""
def before_train_station_pop(station, escalator):
"""This function calculates the total number of people as a sume of people
waiting to board the next train, and the number of people waiting to leave
the station by the elebvators."""
station.travelers_departing = station.travelers_departing + (escalator.rate * station.escalators_entering * station.train_wait)
# number of people who have arived and want to leave.
station.travelers_arriving = station.travelers_arriving - (escalator.rate * station.escalators_exiting * station.train_wait)
| Simplify the function to calculate the platform population between trains | Simplify the function to calculate the platform population between trains
The function to calculate the change in platform population in the time
between trains was needlessly complex. It has now been simplified.
ref #17
| Python | mit | ForestPride/rail-problem |
b3befb47d4b48e83b42fc6b10a10269d32cafb4e | src-backend/api/urls.py | src-backend/api/urls.py | from django.conf.urls import url, include
from views import ProcedureViewSet
from rest_framework import routers
router = routers.SimpleRouter()
router.register(r'procedures', ProcedureViewSet)
urlpatterns = [
url(r'^', include(router.urls))
]
| from django.conf.urls import url, include
from views import ProcedureViewSet
from rest_framework import routers
router = routers.SimpleRouter(trailing_slash=False)
router.register(r'procedures', ProcedureViewSet)
urlpatterns = [
url(r'^', include(router.urls))
]
| Remove trailing slash from the router | Remove trailing slash from the router
| Python | bsd-3-clause | SanaMobile/sana.protocol_builder,SanaMobile/sana.protocol_builder,SanaMobile/sana.protocol_builder,SanaMobile/sana.protocol_builder,SanaMobile/sana.protocol_builder |
c40fc13dca5a0596a72d5c26214777f8a2845675 | tests/test_repr.py | tests/test_repr.py | """ Test __str__ methods. """
import pexpect
from . import PexpectTestCase
class TestCaseMisc(PexpectTestCase.PexpectTestCase):
def test_str_spawnu(self):
""" Exercise spawnu.__str__() """
# given,
p = pexpect.spawnu('cat')
# exercise,
value = p.__str__()
# verify
assert isinstance(value, str)
def test_str_spawn(self):
""" Exercise spawn.__str__() """
# given,
p = pexpect.spawn('cat')
# exercise,
value = p.__str__()
# verify
assert isinstance(value, str)
| """ Test __str__ methods. """
import pexpect
from . import PexpectTestCase
class TestCaseMisc(PexpectTestCase.PexpectTestCase):
def test_str_spawnu(self):
""" Exercise spawnu.__str__() """
# given,
p = pexpect.spawnu('cat')
# exercise,
value = str(p)
# verify
assert isinstance(value, str)
def test_str_spawn(self):
""" Exercise spawn.__str__() """
# given,
p = pexpect.spawn('cat')
# exercise,
value = str(p)
# verify
assert isinstance(value, str)
| Use str(p) and not p.__str__() | Use str(p) and not p.__str__()
| Python | isc | bangi123/pexpect,Depado/pexpect,dongguangming/pexpect,Depado/pexpect,quatanium/pexpect,Wakeupbuddy/pexpect,Wakeupbuddy/pexpect,Depado/pexpect,crdoconnor/pexpect,crdoconnor/pexpect,quatanium/pexpect,nodish/pexpect,quatanium/pexpect,crdoconnor/pexpect,dongguangming/pexpect,Wakeupbuddy/pexpect,Wakeupbuddy/pexpect,Depado/pexpect,bangi123/pexpect,blink1073/pexpect,blink1073/pexpect,nodish/pexpect,blink1073/pexpect,bangi123/pexpect,nodish/pexpect,dongguangming/pexpect,bangi123/pexpect,dongguangming/pexpect |
6110bc1137f5e3f1f12249c366323c6c0b48dbe3 | IPython/nbconvert/utils/base.py | IPython/nbconvert/utils/base.py | """Global configuration class."""
#-----------------------------------------------------------------------------
# Copyright (c) 2013, the IPython Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from IPython.utils.traitlets import List
from IPython.config.configurable import LoggingConfigurable
from IPython.utils.traitlets import Unicode
#-----------------------------------------------------------------------------
# Classes and functions
#-----------------------------------------------------------------------------
class NbConvertBase(LoggingConfigurable):
"""Global configurable class for shared config
Useful for display data priority that might be use by many transformers
"""
display_data_priority = List(['javascript', 'html', 'application/pdf', 'svg', 'latex', 'png', 'jpg', 'jpeg' , 'text'],
config=True,
help= """
An ordered list of preferred output type, the first
encountered will usually be used when converting discarding
the others.
"""
)
default_language = Unicode('ipython', config=True, help='default highlight language')
def __init__(self, **kw):
super(NbConvertBase, self).__init__(**kw)
| """Global configuration class."""
#-----------------------------------------------------------------------------
# Copyright (c) 2013, the IPython Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from IPython.utils.traitlets import List
from IPython.config.configurable import LoggingConfigurable
from IPython.utils.traitlets import Unicode
#-----------------------------------------------------------------------------
# Classes and functions
#-----------------------------------------------------------------------------
class NbConvertBase(LoggingConfigurable):
"""Global configurable class for shared config
Useful for display data priority that might be use by many transformers
"""
display_data_priority = List(['html', 'javascript', 'application/pdf', 'svg', 'latex', 'png', 'jpg', 'jpeg' , 'text'],
config=True,
help= """
An ordered list of preferred output type, the first
encountered will usually be used when converting discarding
the others.
"""
)
default_language = Unicode('ipython', config=True, help='default highlight language')
def __init__(self, **kw):
super(NbConvertBase, self).__init__(**kw)
| Revert "Moved JS in front of HTML" | Revert "Moved JS in front of HTML"
This reverts commit 8b0164edde418138d4e28c20d63fa422931ae6a8.
| Python | bsd-3-clause | ipython/ipython,ipython/ipython |
9a6467688f567abc405a3fca6c4bfda7b6cd0351 | FileWatcher.py | FileWatcher.py | from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler
import os
class MyEventHandler(FileSystemEventHandler):
def __init__(self, filePath, callback):
super(MyEventHandler, self).__init__()
self.filePath = filePath
self.callback = callback
def on_modified(self, event):
if event.src_path == self.filePath:
self.callback()
class LibraryFileWatcher(object):
def __init__(self, filePath, callback):
super(LibraryFileWatcher, self).__init__()
self.filePath = filePath
self.callback = callback
self.eventHandler = MyEventHandler(filePath, callback)
self.resume()
def __del__(self):
self.observer.stop()
self.observer.join()
def pause(self):
self.observer.stop()
def resume(self):
self.observer = Observer()
self.watch = self.observer.schedule(self.eventHandler, path=os.path.dirname(self.filePath))
self.observer.start() | from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler
import os
class MyEventHandler(FileSystemEventHandler):
def __init__(self, filePath, callback):
super(MyEventHandler, self).__init__()
self.filePath = filePath
self.callback = callback
def on_modified(self, event):
if os.path.normpath(event.src_path) == self.filePath:
self.callback()
class LibraryFileWatcher(object):
def __init__(self, filePath, callback):
super(LibraryFileWatcher, self).__init__()
self.filePath = os.path.normpath(filePath)
self.callback = callback
self.eventHandler = MyEventHandler(self.filePath, callback)
self.resume()
def __del__(self):
self.observer.stop()
self.observer.join()
def pause(self):
self.observer.stop()
def resume(self):
self.observer = Observer()
self.watch = self.observer.schedule(self.eventHandler, path=os.path.dirname(self.filePath))
self.observer.start() | Handle filepaths in an OS independent manner. | Handle filepaths in an OS independent manner.
--CAR
| Python | apache-2.0 | BBN-Q/PyQLab,calebjordan/PyQLab,Plourde-Research-Lab/PyQLab,rmcgurrin/PyQLab |
b50ef13cb25c795a1ad3b2bfdbbb47b709fcbd39 | binding/python/__init__.py | binding/python/__init__.py | # This file is part of SpaceVecAlg.
#
# SpaceVecAlg is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SpaceVecAlg is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with SpaceVecAlg. If not, see <http://www.gnu.org/licenses/>.
from _rbdyn import *
| # This file is part of RBDyn.
#
# RBDyn is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# RBDyn is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with RBDyn. If not, see <http://www.gnu.org/licenses/>.
from _rbdyn import *
| Fix bad copy/past in licence header. | Fix bad copy/past in licence header.
| Python | bsd-2-clause | jrl-umi3218/RBDyn,jrl-umi3218/RBDyn,gergondet/RBDyn,jrl-umi3218/RBDyn,gergondet/RBDyn,gergondet/RBDyn,jrl-umi3218/RBDyn,gergondet/RBDyn,gergondet/RBDyn |
c1e5822f07e2fe4ca47633ed3dfda7d7bee64b6c | nvchecker/source/aiohttp_httpclient.py | nvchecker/source/aiohttp_httpclient.py | # MIT licensed
# Copyright (c) 2013-2017 lilydjwg <lilydjwg@gmail.com>, et al.
import atexit
import aiohttp
connector = aiohttp.TCPConnector(limit=20)
__all__ = ['session', 'HTTPError']
class HTTPError(Exception):
def __init__(self, code, message, response):
self.code = code
self.message = message
self.response = response
class BetterClientSession(aiohttp.ClientSession):
async def _request(self, *args, **kwargs):
if hasattr(self, "nv_config") and self.nv_config.get("proxy"):
kwargs.setdefault("proxy", self.nv_config.get("proxy"))
res = await super(BetterClientSession, self)._request(
*args, **kwargs)
if res.status >= 400:
raise HTTPError(res.status, res.reason, res)
return res
session = BetterClientSession(connector=connector, read_timeout=10, conn_timeout=5)
atexit.register(session.close)
| # MIT licensed
# Copyright (c) 2013-2017 lilydjwg <lilydjwg@gmail.com>, et al.
import atexit
import asyncio
import aiohttp
connector = aiohttp.TCPConnector(limit=20)
__all__ = ['session', 'HTTPError']
class HTTPError(Exception):
def __init__(self, code, message, response):
self.code = code
self.message = message
self.response = response
class BetterClientSession(aiohttp.ClientSession):
async def _request(self, *args, **kwargs):
if hasattr(self, "nv_config") and self.nv_config.get("proxy"):
kwargs.setdefault("proxy", self.nv_config.get("proxy"))
res = await super(BetterClientSession, self)._request(
*args, **kwargs)
if res.status >= 400:
raise HTTPError(res.status, res.reason, res)
return res
session = BetterClientSession(connector=connector)
@atexit.register
def cleanup():
loop = asyncio.get_event_loop()
loop.run_until_complete(session.close())
| Handle graceful exit and timeout | Handle graceful exit and timeout
Timeout was refactored and the defaults work correctly here.
| Python | mit | lilydjwg/nvchecker |
3856b48af3e83f49a66c0c29b81e0a80ad3248d9 | nubes/connectors/aws/connector.py | nubes/connectors/aws/connector.py | import boto3.session
from nubes.connectors import base
class AWSConnector(base.BaseConnector):
def __init__(self, aws_access_key_id, aws_secret_access_key, region_name):
self.connection = boto3.session.Session(
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
region_name=region_name)
@classmethod
def name(cls):
return "aws"
def create_server(self, image_id, min_count, max_count, **kwargs):
ec2_resource = self.connection.resource("ec2")
server = ec2_resource.create_instances(ImageId=image_id,
MinCount=min_count,
MaxCount=max_count,
**kwargs)
return server
def list_servers(self):
ec2_client = self.connection.client("ec2")
desc = ec2_client.describe_instances()
return desc
def delete_server(self, instance_id):
ec2_resource = self.connection.resource("ec2")
ec2_resource.instances.filter(
InstanceIds=[instance_id]).stop()
ec2_resource.instances.filter(
InstanceIds=[instance_id]).terminate()
| import boto3.session
from nubes.connectors import base
class AWSConnector(base.BaseConnector):
def __init__(self, aws_access_key_id, aws_secret_access_key, region_name):
self.connection = boto3.session.Session(
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
region_name=region_name)
self.ec2_resource = self.connection.resource("ec2")
self.ec2_client = self.connection.client("ec2")
@classmethod
def name(cls):
return "aws"
def create_server(self, image_id, min_count, max_count, **kwargs):
server = self.ec2_resource.create_instances(ImageId=image_id,
MinCount=min_count,
MaxCount=max_count,
**kwargs)
return server
def list_servers(self):
desc = self.ec2_client.describe_instances()
return desc
def delete_server(self, instance_id):
self.ec2_resource.instances.filter(
InstanceIds=[instance_id]).stop()
self.ec2_resource.instances.filter(
InstanceIds=[instance_id]).terminate()
| Move client and resource to __init__ | Move client and resource to __init__
* moved the calls to create the ec2 session resource session client
to the init
| Python | apache-2.0 | omninubes/nubes |
770bbf80a78d2f418e47ca2dc641c7dccbb86cac | rollbar/test/asgi_tests/helper.py | rollbar/test/asgi_tests/helper.py | import asyncio
import functools
from rollbar.contrib.asgi import ASGIApp
def async_test_func_wrapper(asyncfunc):
@functools.wraps(asyncfunc)
def wrapper(*args, **kwargs):
try:
loop = asyncio.get_running_loop()
except RuntimeError:
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
loop.run_until_complete(asyncfunc(*args, **kwargs))
finally:
loop.close()
else:
loop.run_until_complete(asyncfunc(*args, **kwargs))
return wrapper
@ASGIApp
class FailingTestASGIApp:
def __init__(self):
self.asgi_app = async_test_func_wrapper(self.asgi_app)
async def app(self, scope, receive, send):
raise RuntimeError("Invoked only for testing")
| import asyncio
import functools
import inspect
import sys
from rollbar.contrib.asgi import ASGIApp
def run(coro):
if sys.version_info >= (3, 7):
return asyncio.run(coro)
assert inspect.iscoroutine(coro)
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
return loop.run_until_complete(coro)
finally:
loop.close()
asyncio.set_event_loop(None)
def wrap_async(asyncfunc):
@functools.wraps(asyncfunc)
def wrapper(*args, **kwargs):
run(asyncfunc(*args, **kwargs))
return wrapper
@ASGIApp
class FailingTestASGIApp:
def __init__(self):
self.asgi_app = wrap_async(self.asgi_app)
async def app(self, scope, receive, send):
raise RuntimeError("Invoked only for testing")
| Refactor async wrapper. Use asyncio.run() for Py3.7 | Refactor async wrapper. Use asyncio.run() for Py3.7
| Python | mit | rollbar/pyrollbar |
c32e87894d4baf404d5b300459fc68a6d9d973c8 | zun/db/__init__.py | zun/db/__init__.py | # Copyright 2015 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_db import options
from zun.common import paths
import zun.conf
_DEFAULT_SQL_CONNECTION = 'sqlite:///' + paths.state_path_def('zun.sqlite')
options.set_defaults(zun.conf.CONF)
options.set_defaults(zun.conf.CONF, _DEFAULT_SQL_CONNECTION, 'zun.sqlite')
| # Copyright 2015 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_db import options
from zun.common import paths
import zun.conf
_DEFAULT_SQL_CONNECTION = 'sqlite:///' + paths.state_path_def('zun.sqlite')
options.set_defaults(zun.conf.CONF)
options.set_defaults(zun.conf.CONF, _DEFAULT_SQL_CONNECTION)
| Remove the duplicated config sqlite_db | Remove the duplicated config sqlite_db
The config sqlite_db has been removed from oslo.db. See here:
https://review.openstack.org/#/c/449437/
Change-Id: I9197b08aeb7baabf2d3fdd4cf4bd06b57a6782ff
| Python | apache-2.0 | kevin-zhaoshuai/zun,kevin-zhaoshuai/zun,kevin-zhaoshuai/zun |
b8ac8edbd12c6b021815e4fa4fd68cfee7dc18cf | frigg/builds/api.py | frigg/builds/api.py | # -*- coding: utf8 -*-
import json
from django.http import HttpResponse, Http404
from django.http.response import JsonResponse
from django.shortcuts import get_object_or_404
from django.views.decorators.csrf import csrf_exempt
from frigg.decorators import token_required
from .models import Build, Project
@token_required
@csrf_exempt
def report_build(request):
try:
payload = json.loads(request.body)
build = Build.objects.get(pk=payload['id'])
build.handle_worker_report(payload)
response = JsonResponse({'message': 'Thanks for building it'})
except Build.DoesNotExist:
response = JsonResponse({'error': 'Build not found'})
response.status_code = 404
return response
@csrf_exempt
def build_badge(request, owner, project, branch='master'):
project = get_object_or_404(Project, owner=owner, name=project)
badge = project.get_badge(branch)
if badge is None:
raise Http404
return HttpResponse(content=badge, content_type='image/svg+xml')
| # -*- coding: utf8 -*-
import json
from django.http import HttpResponse, Http404
from django.http.response import JsonResponse
from django.shortcuts import get_object_or_404
from django.views.decorators.cache import never_cache
from django.views.decorators.csrf import csrf_exempt
from frigg.decorators import token_required
from .models import Build, Project
@token_required
@csrf_exempt
def report_build(request):
try:
payload = json.loads(request.body)
build = Build.objects.get(pk=payload['id'])
build.handle_worker_report(payload)
response = JsonResponse({'message': 'Thanks for building it'})
except Build.DoesNotExist:
response = JsonResponse({'error': 'Build not found'})
response.status_code = 404
return response
@never_cache
@csrf_exempt
def build_badge(request, owner, project, branch='master'):
project = get_object_or_404(Project, owner=owner, name=project)
badge = project.get_badge(branch)
if badge is None:
raise Http404
return HttpResponse(content=badge, content_type='image/svg+xml')
| Add @never_cache decorator to the badge view | Add @never_cache decorator to the badge view
| Python | mit | frigg/frigg-hq,frigg/frigg-hq,frigg/frigg-hq |
04b7e79ce3fed1afac129098badb632ca226fdee | dispatch.py | dispatch.py | #!/usr/bin/env python
"""
Copyright (c) 2008-2011, Anthony Garcia <lagg@lavabit.com>
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""
import config
import steam
steam.set_api_key(config.api_key)
from optf2.backend import openid
from optf2.frontend import render
openid.set_session(render.session)
import web
if config.enable_fastcgi:
web.wsgi.runwsgi = lambda func, addr=None: web.wsgi.runfcgi(func, addr)
if __name__ == "__main__":
render.application.run()
| #!/usr/bin/env python
"""
Copyright (c) 2008-2011, Anthony Garcia <lagg@lavabit.com>
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""
import config
import steam
steam.set_api_key(config.api_key)
from optf2.backend import openid
from optf2.frontend import render
openid.set_session(render.session)
import web
# wsgi
application = render.application.wsgifunc()
if config.enable_fastcgi:
web.wsgi.runwsgi = lambda func, addr=None: web.wsgi.runfcgi(func, addr)
if __name__ == "__main__":
render.application.run()
| Add wsgi handler by default | Add wsgi handler by default
| Python | isc | Lagg/optf2,FlaminSarge/optf2,Lagg/optf2,FlaminSarge/optf2,Lagg/optf2,FlaminSarge/optf2 |
67255ac86d2ef91ce355655112c919f2e08045b4 | django_uwsgi/urls.py | django_uwsgi/urls.py | from django.conf.urls import patterns, url
from . import views
urlpatterns = [
url(r'^$', views.UwsgiStatus.as_view(), name='uwsgi_index'),
url(r'^reload/$', views.UwsgiReload.as_view(), name='uwsgi_reload'),
url(r'^clear_cache/$', views.UwsgiCacheClear.as_view(), name='uwsgi_cache_clear'),
]
| from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.UwsgiStatus.as_view(), name='uwsgi_index'),
url(r'^reload/$', views.UwsgiReload.as_view(), name='uwsgi_reload'),
url(r'^clear_cache/$', views.UwsgiCacheClear.as_view(), name='uwsgi_cache_clear'),
]
| Remove usage of patterns in import line | Remove usage of patterns in import line
| Python | mit | unbit/django-uwsgi,unbit/django-uwsgi |
b2cac05be3f6c510edfaf1ae478fabdcf06fd19a | mgsv_names.py | mgsv_names.py | import random
global adjectives, animals, rares
with open('adjectives.txt') as f:
adjectives = f.readlines()
with open('animals.txt') as f:
animals = f.readlines()
with open('rares.txt') as f:
rares = f.readlines()
uncommons = {
# Adjectives:
'master': 'miller',
'raging': 'bull',
'hidden': 'dragon',
'humming': 'bird',
'spicy': 'sandworm',
# Animals:
'ocelot': 'revolver',
'lion': 'snooping',
'tiger': 'crouching',
'hippo': 'hungry',
'falcon': 'punching',
}
def get_name():
adj = random.choice(adjectives).strip()
anim = random.choice(animals).strip()
r = random.random()
if r < 0.001 or r >= 0.999:
return random.choice(rares).strip()
elif r < 0.3 and adj in uncommons:
return ' '.join((adj, uncommons[adj]))
elif r >= 0.7 and anim in uncommons:
return ' '.join((uncommons[anim], anim))
return ' '.join((adj, anim))
if __name__ == '__main__':
print(get_name())
| import random, os
global adjectives, animals, rares
with open(os.path.join(os.path.dirname(__file__), 'adjectives.txt')) as f:
adjectives = f.readlines()
with open(os.path.join(os.path.dirname(__file__), 'animals.txt')) as f:
animals = f.readlines()
with open(os.path.join(os.path.dirname(__file__), 'rares.txt')) as f:
rares = f.readlines()
uncommons = {
# Adjectives:
'master': 'miller',
'raging': 'bull',
'hidden': 'dragon',
'humming': 'bird',
'spicy': 'sandworm',
# Animals:
'ocelot': 'revolver',
'lion': 'snooping',
'tiger': 'crouching',
'hippo': 'hungry',
'falcon': 'punching',
}
def generate_name():
adj = random.choice(adjectives).strip()
anim = random.choice(animals).strip()
r = random.random()
if r < 0.001 or r >= 0.999:
return random.choice(rares).strip()
elif r < 0.3 and adj in uncommons:
return ' '.join((adj, uncommons[adj]))
elif r >= 0.7 and anim in uncommons:
return ' '.join((uncommons[anim], anim))
return ' '.join((adj, anim))
if __name__ == '__main__':
print(generate_name())
| Load text files from the same dir as the script. | Load text files from the same dir as the script.
Also renamed our name generator.
| Python | unlicense | rotated8/mgsv_names |
40edd4a635dd8f83a21f15f22883e7dae8d8d0a8 | test/test_modes/test_backspace.py | test/test_modes/test_backspace.py | from pyqode.qt import QtCore
from pyqode.qt.QtTest import QTest
from pyqode.core.api import TextHelper
from pyqode.core import modes
from test.helpers import editor_open
def get_mode(editor):
return editor.modes.get(modes.SmartBackSpaceMode)
def test_enabled(editor):
mode = get_mode(editor)
assert mode.enabled
mode.enabled = False
mode.enabled = True
@editor_open(__file__)
def test_key_pressed(editor):
QTest.qWait(1000)
TextHelper(editor).goto_line(20, 4)
assert editor.textCursor().positionInBlock() == 4
QTest.keyPress(editor, QtCore.Qt.Key_Backspace)
assert editor.textCursor().positionInBlock() == 0
TextHelper(editor).goto_line(19, 5)
assert editor.textCursor().positionInBlock() == 5
QTest.keyPress(editor, QtCore.Qt.Key_Backspace)
assert editor.textCursor().positionInBlock() == 4
TextHelper(editor).goto_line(20, 0)
assert editor.textCursor().positionInBlock() == 0
QTest.keyPress(editor, QtCore.Qt.Key_Backspace)
assert editor.textCursor().positionInBlock() == 0
| from pyqode.qt import QtCore
from pyqode.qt.QtTest import QTest
from pyqode.core.api import TextHelper
from pyqode.core import modes
from test.helpers import editor_open
def get_mode(editor):
return editor.modes.get(modes.SmartBackSpaceMode)
def test_enabled(editor):
mode = get_mode(editor)
assert mode.enabled
mode.enabled = False
mode.enabled = True
@editor_open(__file__)
def test_key_pressed(editor):
QTest.qWait(1000)
TextHelper(editor).goto_line(21, 4)
QTest.qWait(2000)
assert editor.textCursor().positionInBlock() == 4
QTest.keyPress(editor, QtCore.Qt.Key_Backspace)
QTest.qWait(2000)
assert editor.textCursor().positionInBlock() == 0
TextHelper(editor).goto_line(19, 5)
QTest.qWait(2000)
assert editor.textCursor().positionInBlock() == 5
QTest.keyPress(editor, QtCore.Qt.Key_Backspace)
QTest.qWait(2000)
assert editor.textCursor().positionInBlock() == 4
TextHelper(editor).goto_line(20, 0)
QTest.qWait(2000)
assert editor.textCursor().positionInBlock() == 0
QTest.keyPress(editor, QtCore.Qt.Key_Backspace)
QTest.qWait(2000)
assert editor.textCursor().positionInBlock() == 28
| Fix test backspace (this test has to be changed since the parent implementation is now called when there is no space to eat) | Fix test backspace (this test has to be changed since the parent implementation is now called when there is no space to eat)
| Python | mit | pyQode/pyqode.core,zwadar/pyqode.core,pyQode/pyqode.core |
e68836173dec1e1fe80e07cca8eb67ebe19e424e | cegui/src/ScriptingModules/PythonScriptModule/bindings/distutils/PyCEGUI/__init__.py | cegui/src/ScriptingModules/PythonScriptModule/bindings/distutils/PyCEGUI/__init__.py | import os
import os.path
# atrocious and unholy!
def get_my_path():
import fake
return os.path.dirname(str(fake).split()[3][1:])
libpath = os.path.abspath(get_my_path())
#print "libpath =", libpath
os.environ['PATH'] = libpath + ";" + os.environ['PATH']
from PyCEGUI import *
| import os
import os.path
# atrocious and unholy!
def get_my_path():
import fake
return os.path.dirname(os.path.abspath(fake.__file__))
libpath = get_my_path()
#print "libpath =", libpath
os.environ['PATH'] = libpath + ";" + os.environ['PATH']
from PyCEGUI import *
| Use a less pathetic method to retrieve the PyCEGUI dirname | MOD: Use a less pathetic method to retrieve the PyCEGUI dirname
| Python | mit | ruleless/CEGUI,OpenTechEngine/CEGUI,ruleless/CEGUI,ruleless/CEGUI,ruleless/CEGUI,OpenTechEngine/CEGUI,OpenTechEngine/CEGUI,OpenTechEngine/CEGUI |
cd30723af9f82b7a91d1ad1e2a5b86f88d8f4b17 | harvester/post_processing/dedup_sourceresource.py | harvester/post_processing/dedup_sourceresource.py | # pass in a Couchdb doc, get back one with de-duplicated sourceResource values
def dedup_sourceresource(doc):
''' Look for duplicate values in the doc['sourceResource'] and
remove.
Values must be *exactly* the same
'''
for key, value in doc['sourceResource'].items():
if not isinstance(value, basestring):
new_list = []
for item in value:
if item not in new_list:
new_list.append(item)
doc['sourceResource'][key] = new_list
return doc
| # pass in a Couchdb doc, get back one with de-duplicated sourceResource values
def dedup_sourceresource(doc):
''' Look for duplicate values in the doc['sourceResource'] and
remove.
Values must be *exactly* the same
'''
for key, value in doc['sourceResource'].items():
if isinstance(value, list):
# can't use set() because of dict values (non-hashable)
new_list = []
for item in value:
if item not in new_list:
new_list.append(item)
doc['sourceResource'][key] = new_list
return doc
| Make sure dedup item is a list. | Make sure dedup item is a list.
| Python | bsd-3-clause | barbarahui/harvester,ucldc/harvester,ucldc/harvester,mredar/harvester,mredar/harvester,barbarahui/harvester |
b98bd25a8b25ca055ca92393f24b6a04382457a8 | forms.py | forms.py | from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField
from wtforms.validators import DataRequired, Email
class Login(FlaskForm):
username = StringField('Username', validators=[DataRequired()])
password = PasswordField('Password', validators=[DataRequired()])
| from flask import flash
from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField
from wtforms.validators import DataRequired, Email, Length
def flash_errors(form):
""" Universal interface to handle form error.
Handles form error with the help of flash message
"""
for field, errors in form.errors.items():
for error in errors:
flash(u'Error in the %s field - %s' % (
getattr(form, field).label.text,
error
))
class Login(FlaskForm):
username = StringField('Username',
validators=[DataRequired(), Length(max=16)])
password = PasswordField('Password',
validators=[DataRequired()])
| Add universal interface for validation error message | Add universal interface for validation error message
| Python | mit | openedoo/module_employee,openedoo/module_employee,openedoo/module_employee |
1af120a5ce7f2fc35aeb7e77a747b0e8382bba51 | api_tests/utils.py | api_tests/utils.py | from blinker import ANY
from urlparse import urlparse
from contextlib import contextmanager
from addons.osfstorage import settings as osfstorage_settings
def create_test_file(node, user, filename='test_file', create_guid=True):
osfstorage = node.get_addon('osfstorage')
root_node = osfstorage.get_root()
test_file = root_node.append_file(filename)
if create_guid:
test_file.get_guid(create=True)
test_file.create_version(user, {
'object': '06d80e',
'service': 'cloud',
osfstorage_settings.WATERBUTLER_RESOURCE: 'osf',
}, {
'size': 1337,
'contentType': 'img/png'
}).save()
return test_file
def urlparse_drop_netloc(url):
url = urlparse(url)
if url[4]:
return url[2] + '?' + url[4]
return url[2]
@contextmanager
def disconnected_from_listeners(signal):
"""Temporarily disconnect all listeners for a Blinker signal."""
listeners = list(signal.receivers_for(ANY))
for listener in listeners:
signal.disconnect(listener)
yield
for listener in listeners:
signal.connect(listener)
| from blinker import ANY
from urlparse import urlparse
from contextlib import contextmanager
from addons.osfstorage import settings as osfstorage_settings
def create_test_file(target, user, filename='test_file', create_guid=True):
osfstorage = target.get_addon('osfstorage')
root_node = osfstorage.get_root()
test_file = root_node.append_file(filename)
if create_guid:
test_file.get_guid(create=True)
test_file.create_version(user, {
'object': '06d80e',
'service': 'cloud',
osfstorage_settings.WATERBUTLER_RESOURCE: 'osf',
}, {
'size': 1337,
'contentType': 'img/png'
}).save()
return test_file
def urlparse_drop_netloc(url):
url = urlparse(url)
if url[4]:
return url[2] + '?' + url[4]
return url[2]
@contextmanager
def disconnected_from_listeners(signal):
"""Temporarily disconnect all listeners for a Blinker signal."""
listeners = list(signal.receivers_for(ANY))
for listener in listeners:
signal.disconnect(listener)
yield
for listener in listeners:
signal.connect(listener)
| Update api test util to create files to use target name instead | Update api test util to create files to use target name instead
| Python | apache-2.0 | mattclark/osf.io,cslzchen/osf.io,HalcyonChimera/osf.io,mfraezz/osf.io,mattclark/osf.io,CenterForOpenScience/osf.io,aaxelb/osf.io,adlius/osf.io,pattisdr/osf.io,pattisdr/osf.io,CenterForOpenScience/osf.io,brianjgeiger/osf.io,cslzchen/osf.io,Johnetordoff/osf.io,mattclark/osf.io,mfraezz/osf.io,mfraezz/osf.io,cslzchen/osf.io,felliott/osf.io,adlius/osf.io,brianjgeiger/osf.io,cslzchen/osf.io,saradbowman/osf.io,felliott/osf.io,Johnetordoff/osf.io,erinspace/osf.io,caseyrollins/osf.io,aaxelb/osf.io,erinspace/osf.io,brianjgeiger/osf.io,adlius/osf.io,pattisdr/osf.io,brianjgeiger/osf.io,felliott/osf.io,aaxelb/osf.io,HalcyonChimera/osf.io,aaxelb/osf.io,Johnetordoff/osf.io,CenterForOpenScience/osf.io,erinspace/osf.io,baylee-d/osf.io,mfraezz/osf.io,felliott/osf.io,caseyrollins/osf.io,saradbowman/osf.io,HalcyonChimera/osf.io,baylee-d/osf.io,CenterForOpenScience/osf.io,caseyrollins/osf.io,adlius/osf.io,HalcyonChimera/osf.io,baylee-d/osf.io,Johnetordoff/osf.io |
21dbb8af7412c04b768a9d68e1f8566786d5100c | mdot_rest/serializers.py | mdot_rest/serializers.py | from .models import Resource, ResourceLink, IntendedAudience
from rest_framework import serializers
class ResourceLinkSerializer(serializers.ModelSerializer):
class Meta:
model = ResourceLink
fields = ('link_type', 'url',)
class IntendedAudienceSerializer(serializers.ModelSerializer):
class Meta:
model = IntendedAudience
fields = ('audience',)
class ResourceSerializer(serializers.ModelSerializer):
resource_links = ResourceLinkSerializer(many=True, read_only=True)
intended_audiences = IntendedAudienceSerializer(many=True, read_only=True)
class Meta:
model = Resource
fields = (
'id',
'title',
'feature_desc',
'featured',
'accessible',
'responsive_web',
'resource_links',
'intended_audiences',
'campus_bothell',
'campus_tacoma',
'campus_seattle',
'created_date',
'last_modified',
)
| from .models import Resource, ResourceLink, IntendedAudience
from rest_framework import serializers
class ResourceLinkSerializer(serializers.ModelSerializer):
class Meta:
model = ResourceLink
fields = ('link_type', 'url',)
class IntendedAudienceSerializer(serializers.ModelSerializer):
class Meta:
model = IntendedAudience
fields = ('audience',)
class ResourceSerializer(serializers.ModelSerializer):
resource_links = ResourceLinkSerializer(many=True, read_only=True)
intended_audiences = IntendedAudienceSerializer(many=True, read_only=True)
class Meta:
model = Resource
fields = (
'id',
'title',
'feature_desc',
'image',
'featured',
'accessible',
'responsive_web',
'resource_links',
'intended_audiences',
'campus_bothell',
'campus_tacoma',
'campus_seattle',
'created_date',
'last_modified',
)
| Add image field to the resource serialization. | Add image field to the resource serialization.
| Python | apache-2.0 | uw-it-aca/mdot-rest,uw-it-aca/mdot-rest |
f7c9bbd5ac49254d564a56ba3713b55abcfa4079 | byceps/blueprints/news/views.py | byceps/blueprints/news/views.py | # -*- coding: utf-8 -*-
"""
byceps.blueprints.news.views
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2016 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from flask import abort, g
from ...services.news import service as news_service
from ...util.framework import create_blueprint
from ...util.templating import templated
blueprint = create_blueprint('news', __name__)
ITEMS_PER_PAGE = 4
@blueprint.route('/', defaults={'page': 1})
@blueprint.route('/pages/<int:page>')
@templated
def index(page):
"""Show a page of news items."""
items = news_service.get_items_paginated(g.party.brand.id, page,
ITEMS_PER_PAGE)
return {
'items': items,
'page': page,
}
@blueprint.route('/<slug>')
@templated
def view(slug):
"""Show a single news item."""
item = news_service.find_item_by_slug(g.party.brand.id, slug)
if item is None:
abort(404)
return {
'item': item,
}
| # -*- coding: utf-8 -*-
"""
byceps.blueprints.news.views
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2016 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from flask import abort, current_app, g
from ...services.news import service as news_service
from ...util.framework import create_blueprint
from ...util.templating import templated
blueprint = create_blueprint('news', __name__)
@blueprint.route('/', defaults={'page': 1})
@blueprint.route('/pages/<int:page>')
@templated
def index(page):
"""Show a page of news items."""
items_per_page = _get_items_per_page_value()
items = news_service.get_items_paginated(g.party.brand.id, page,
items_per_page)
return {
'items': items,
'page': page,
}
@blueprint.route('/<slug>')
@templated
def view(slug):
"""Show a single news item."""
item = news_service.find_item_by_slug(g.party.brand.id, slug)
if item is None:
abort(404)
return {
'item': item,
}
def _get_items_per_page_value(default=4):
return int(current_app.config.get('NEWS_ITEMS_PER_PAGE', default))
| Allow configuration of the number of news items per page | Allow configuration of the number of news items per page
| Python | bsd-3-clause | homeworkprod/byceps,homeworkprod/byceps,m-ober/byceps,m-ober/byceps,m-ober/byceps,homeworkprod/byceps |
5352e164b38099cbc7fe4eba87c00bc1c1d30d44 | bluezero/eddystone.py | bluezero/eddystone.py | """
Level 1 file for creating Eddystone beacons
"""
from bluezero import tools
from bluezero import broadcaster
class EddystoneURL:
def __init__(self, url):
service_data = tools.url_to_advert(url, 0x10, 0x00)
url_beacon = broadcaster.Beacon()
url_beacon.add_service_data('FEAA', service_data)
url_beacon.start_beacon()
| """
Level 1 file for creating Eddystone beacons
"""
from bluezero import tools
from bluezero import broadcaster
class EddystoneURL:
def __init__(self, url, tx_power=0x08):
"""
The Eddystone-URL frame broadcasts a URL using a compressed encoding
format in order to fit more within the limited advertisement packet.
Example:
>>> from bluezero import eddystone
>>> eddystone.EddystoneURL('https://github.com/ukBaz')
:param url: String containing URL e.g. ('http://camjam.me')
:param tx_power:
"""
service_data = tools.url_to_advert(url, 0x10, tx_power)
if len(service_data) > 17:
raise Exception('URL too long')
url_beacon = broadcaster.Beacon()
url_beacon.add_service_data('FEAA', service_data)
url_beacon.start_beacon()
| Test for URL length error | Test for URL length error
| Python | mit | ukBaz/python-bluezero,ukBaz/python-bluezero |
8d229401ea69799638d8cd005bc4dc87bb4327a4 | src/mist/io/tests/MyRequestsClass.py | src/mist/io/tests/MyRequestsClass.py | import requests
class MyRequests(object):
"""
Simple class to make requests with or withour cookies etc.
This way we can have the same request methods both in io and core
"""
def __init__(self, uri, data=None, cookie=None, timeout=None):
self.headers = {'Cookie': cookie}
self.timeout = timeout
self.uri = uri
self.data = data
def post(self):
response = requests.post(self.uri, data=self.data, headers=self.headers, timeout=self.timeout)
return response
def get(self):
response = requests.get(self.uri, data=self.data, headers=self.headers, timeout=self.timeout)
return response
def put(self):
response = requests.put(self.uri, data=self.data, headers=self.headers, timeout=self.timeout)
return response
def delete(self):
response = requests.delete(self.uri, data=self.data, headers=self.headers, timeout=self.timeout)
return response
| import requests
class MyRequests(object):
"""
Simple class to make requests with or withour cookies etc.
This way we can have the same request methods both in io and core
"""
def __init__(self, uri, data=None, cookie=None, timeout=None, csrf=None):
self.headers = {'Cookie': cookie, 'Csrf-Token': csrf}
self.timeout = timeout
self.uri = uri
self.data = data
def post(self):
response = requests.post(self.uri, data=self.data, headers=self.headers, timeout=self.timeout)
return response
def get(self):
response = requests.get(self.uri, data=self.data, headers=self.headers, timeout=self.timeout)
return response
def put(self):
response = requests.put(self.uri, data=self.data, headers=self.headers, timeout=self.timeout)
return response
def delete(self):
response = requests.delete(self.uri, data=self.data, headers=self.headers, timeout=self.timeout)
return response
| Add csrf token in MyRequests class | Add csrf token in MyRequests class
| Python | agpl-3.0 | kelonye/mist.io,munkiat/mist.io,Lao-liu/mist.io,DimensionDataCBUSydney/mist.io,afivos/mist.io,DimensionDataCBUSydney/mist.io,afivos/mist.io,Lao-liu/mist.io,DimensionDataCBUSydney/mist.io,kelonye/mist.io,zBMNForks/mist.io,Lao-liu/mist.io,johnnyWalnut/mist.io,munkiat/mist.io,DimensionDataCBUSydney/mist.io,Lao-liu/mist.io,zBMNForks/mist.io,zBMNForks/mist.io,johnnyWalnut/mist.io,johnnyWalnut/mist.io,afivos/mist.io,munkiat/mist.io,munkiat/mist.io,kelonye/mist.io |
6dceb819f86fd469a4d817dec0156646a5f574cf | matchzoo/data_generator/callbacks/lambda_callback.py | matchzoo/data_generator/callbacks/lambda_callback.py | from matchzoo.data_generator.callbacks.callback import Callback
class LambdaCallback(Callback):
"""
LambdaCallback. Just a shorthand for creating a callback class.
See :class:`matchzoo.data_generator.callbacks.Callback` for more details.
Example:
>>> from matchzoo.data_generator.callbacks import LambdaCallback
>>> callback = LambdaCallback(on_batch_unpacked=print)
>>> callback.on_batch_unpacked('x', 'y')
x y
"""
def __init__(self, on_batch_data_pack=None, on_batch_unpacked=None):
"""Init."""
self._on_batch_unpacked = on_batch_unpacked
self._on_batch_data_pack = on_batch_data_pack
def on_batch_data_pack(self, data_pack):
"""`on_batch_data_pack`."""
if self._on_batch_data_pack:
self._on_batch_data_pack(data_pack)
def on_batch_unpacked(self, x, y):
"""`on_batch_unpacked`."""
if self._on_batch_unpacked:
self._on_batch_unpacked(x, y)
| from matchzoo.data_generator.callbacks.callback import Callback
class LambdaCallback(Callback):
"""
LambdaCallback. Just a shorthand for creating a callback class.
See :class:`matchzoo.data_generator.callbacks.Callback` for more details.
Example:
>>> import matchzoo as mz
>>> from matchzoo.data_generator.callbacks import LambdaCallback
>>> data = mz.datasets.toy.load_data()
>>> batch_func = lambda x: print(type(x))
>>> unpack_func = lambda x, y: print(type(x), type(y))
>>> callback = LambdaCallback(on_batch_data_pack=batch_func,
... on_batch_unpacked=unpack_func)
>>> data_gen = mz.DataGenerator(
... data, batch_size=len(data), callbacks=[callback])
>>> _ = data_gen[0]
<class 'matchzoo.data_pack.data_pack.DataPack'>
<class 'dict'> <class 'numpy.ndarray'>
"""
def __init__(self, on_batch_data_pack=None, on_batch_unpacked=None):
"""Init."""
self._on_batch_unpacked = on_batch_unpacked
self._on_batch_data_pack = on_batch_data_pack
def on_batch_data_pack(self, data_pack):
"""`on_batch_data_pack`."""
if self._on_batch_data_pack:
self._on_batch_data_pack(data_pack)
def on_batch_unpacked(self, x, y):
"""`on_batch_unpacked`."""
if self._on_batch_unpacked:
self._on_batch_unpacked(x, y)
| Update data generator lambda callback docs. | Update data generator lambda callback docs.
| Python | apache-2.0 | faneshion/MatchZoo,faneshion/MatchZoo |
73e4f2c333e7b4f02dbb0ec344a3a671ba97cac3 | library-examples/read-replace-export-excel.py | library-examples/read-replace-export-excel.py | """
Proto type that does the following:
input:Excel file in language A
output 1:Copy of input file, with original strings replaced with serial numbers
output 2:Single xlsx file that contains serial numbers and original texts from input file.
"""
import shutil
from openpyxl import load_workbook, Workbook
shutil.copyfile('sample-input-fortest.xlsx','sample-input-fortest-out.xlsx')
#point to the file to be read. Intuitive.
wb2 = load_workbook('sample-input-fortest.xlsx')
#convince your self that sheet names are retireved.
sheet_names = wb2.get_sheet_names()
print sheet_names
#work book is simply a list of sheets
sheet = wb2[sheet_names[0]]
print sheet
print "can iterate sheets, rows and columns intuitively"
string_list = list()
string_list.append(("sequence_number","original language"))
seq_no = 1
for sheet in wb2:
for row in sheet.rows:
for cell in row:
if None!=cell.value:
string_list.append((seq_no,cell.value))
seq_no+=1
wb_out = Workbook(write_only=True)
ws = wb_out.create_sheet()
for string in string_list:
ws.append(string)
wb_out.save('new_big_file.xlsx')
| """
Proto type that does the following:
input:Excel file in language A
output 1:Copy of input file, with original strings replaced with serial numbers
output 2:Single xlsx file that contains serial numbers and original texts from input file.
"""
import shutil
from openpyxl import load_workbook, Workbook
#point to the file to be read. Intuitive.
wb2 = load_workbook('sample-input-fortest.xlsx')
#convince your self that sheet names are retireved.
sheet_names = wb2.get_sheet_names()
print sheet_names
#work book is simply a list of sheets
sheet = wb2[sheet_names[0]]
print sheet
#go trhough the excel file, extract strings & replace with number.
string_list = list()
string_list.append(("sequence_number","original language"))
seq_no = 1
for sheet in wb2:
for row in sheet.rows:
for cell in row:
if None!=cell.value:
string_list.append((seq_no,cell.value))
cell.value=str(seq_no)
seq_no+=1
#save the file containing numbers that replaced the string.
wb2.save('sample-input-fortest-out.xlsx')
#save the extracted strings
wb_out = Workbook(write_only=True)
ws = wb_out.create_sheet()
for string in string_list:
ws.append(string)
wb_out.save('new_big_file.xlsx')
| Change so original input does not change. | Change so original input does not change.
| Python | apache-2.0 | iku000888/Excel_Translation_Helper |
23d4081392f84f2d5359f44ed4dde41611bb4cd2 | tests/race_deleting_keys_test.py | tests/race_deleting_keys_test.py | import nose.plugins.attrib
import time as _time
import subprocess
import sys
import redisdl
import unittest
import json
import os.path
from . import util
from . import big_data
@nose.plugins.attrib.attr('slow')
class RaceDeletingKeysTest(unittest.TestCase):
def setUp(self):
import redis
self.r = redis.Redis()
for key in self.r.keys('*'):
self.r.delete(key)
def test_delete_race(self):
bd = big_data.BigData(self.r)
count = bd.determine_key_count()
# data is already inserted
big_data_path = os.path.join(os.path.dirname(__file__), 'big_data.py')
p = subprocess.Popen(
[sys.executable, big_data_path, 'delete', str(count)],
stdout=subprocess.PIPE,
)
_time.sleep(1)
start = _time.time()
dump = redisdl.dumps()
finish = _time.time()
out, err = p.communicate()
delete_start, delete_finish = [int(time) for time in out.split(' ')]
assert delete_start < start
assert delete_finish > finish
| import nose.plugins.attrib
import time as _time
import subprocess
import sys
import redisdl
import unittest
import json
import os.path
from . import util
from . import big_data
@nose.plugins.attrib.attr('slow')
class RaceDeletingKeysTest(unittest.TestCase):
def setUp(self):
import redis
self.r = redis.Redis()
for key in self.r.keys('*'):
self.r.delete(key)
def test_delete_race(self):
bd = big_data.BigData(self.r)
count = bd.determine_key_count()
# data is already inserted
big_data_path = os.path.join(os.path.dirname(__file__), 'big_data.py')
p = subprocess.Popen(
[sys.executable, big_data_path, 'delete', str(count)],
stdout=subprocess.PIPE,
)
_time.sleep(1)
start = _time.time()
dump = redisdl.dumps()
finish = _time.time()
out, err = p.communicate()
delete_start, delete_finish = [int(time) for time in out.split(' ')]
assert delete_start < start
assert finish > start + 5
assert delete_finish > start + 5
| Replace finish order requirement with a duration requirement | Replace finish order requirement with a duration requirement
| Python | bsd-2-clause | p/redis-dump-load,hyunchel/redis-dump-load,p/redis-dump-load,hyunchel/redis-dump-load |
93903d065cd1ff8f3f0c715668f05c804c5561f9 | profile/linearsvc.py | profile/linearsvc.py | import cProfile
from sklearn.svm import LinearSVC
from sklearn.datasets import load_svmlight_file
from sklearn.metrics import accuracy_score
X, y = load_svmlight_file("data.txt")
svc = LinearSVC()
cProfile.runctx('svc.fit(X, y)', {'svc': svc, 'X': X, 'y': y}, {})
svc.fit(X, y)
results = svc.predict(X)
accuracy = accuracy_score(y, results)
print("Accuracy: {}".format(accuracy))
| import timeit
from sklearn.svm import LinearSVC
from sklearn.datasets import load_svmlight_file
from sklearn.metrics import accuracy_score
setup = """
from sklearn.svm import LinearSVC
from sklearn.datasets import load_svmlight_file
X, y = load_svmlight_file("data.txt")
svc = LinearSVC()
"""
time = timeit.timeit('svc.fit(X, y)', setup=setup, number=1)
print("Time: {}".format(time))
X, y = load_svmlight_file("data.txt")
svc = LinearSVC()
svc.fit(X, y)
results = svc.predict(X)
accuracy = accuracy_score(y, results)
print("Accuracy: {}".format(accuracy))
| Use timeit instead of cProfile | Use timeit instead of cProfile
| Python | mit | JuliaPackageMirrors/SoftConfidenceWeighted.jl,IshitaTakeshi/SoftConfidenceWeighted.jl |
3ecfdf41da3eb3b881c112254b913ff907424bd7 | Scripts/2-Upload.py | Scripts/2-Upload.py |
import os
import json
# Get Steam settings
steamData = open("steam.json")
steamConfig = json.load(steamData)
steamSDKDir = steamConfig["sdkDir"]
steamBuilder = steamConfig["builder"]
steamCommand = steamConfig["command"]
steamAppFile = steamConfig["appFile"]
steamUser = steamConfig["user"]
steamPassword = steamConfig["password"]
steamData.close()
# Generate paths
buildAppFile = os.path.join("..", steamAppFile)
buildRootDir = os.path.join(steamSDKDir, "tools", "ContentBuilder")
# Generate full command line
commandLine = os.path.join(steamBuilder, steamCommand)
commandLine += " +login " + steamUser + " " + steamPassword
commandLine += " +run_app_build " + buildAppFile
commandLine += " +quit"
# Call
currentPath = os.getcwd()
os.chdir(buildRootDir)
os.system(commandLine)
os.chdir(currentPath)
| #!/usr/bin/env python
import os
import json
# Get Steam settings
steamData = open("steam.json")
steamConfig = json.load(steamData)
steamSDKDir = steamConfig["sdkDir"]
steamBuilder = steamConfig["builder"]
steamCommand = steamConfig["command"]
steamAppFile = steamConfig["appFile"]
steamUser = steamConfig["user"]
steamPassword = steamConfig["password"]
steamData.close()
# Generate paths
buildAppFile = os.path.join("..", steamAppFile)
buildRootDir = os.path.join(steamSDKDir, "tools", "ContentBuilder")
# Generate full command line
commandLine = os.path.join(steamBuilder, steamCommand)
commandLine += " +login " + steamUser + " " + steamPassword
commandLine += " +run_app_build " + buildAppFile
commandLine += " +quit"
# Call
currentPath = os.getcwd()
os.chdir(buildRootDir)
os.system(commandLine)
os.chdir(currentPath)
| Make steam upload script works for Linux | Make steam upload script works for Linux
| Python | bsd-3-clause | arbonagw/HeliumRain,arbonagw/HeliumRain,arbonagw/HeliumRain,arbonagw/HeliumRain,arbonagw/HeliumRain |
92b7a5463e505f84862dd96e07c9caa5a97107a9 | client/test/server_tests.py | client/test/server_tests.py | from nose.tools import *
from mockito import *
import unittest
from source.server import *
from source.exception import *
from source.commands.system import *
class MyTestCase(unittest.TestCase):
def createCommandResponse(self, command, parameters = {}, timeout = None):
response = mock()
response.status_code = 200
json = { 'command': command, 'parameters': parameters }
if timeout is not None:
json['timeout'] = timeout
when(response).json().thenReturn({ 'command': command, 'timeout': timeout, 'parameters': parameters })
return response
def setResponse(self, response):
when(self.server._requests).get('').thenReturn(response)
def setUp(self):
self.server = Server('')
self.server._requests = mock()
def tearDown(self):
pass
def test_get(self):
self.setResponse(self.createCommandResponse('copy', parameters = {'src': 'source', 'dst': 'destination' }, timeout = 10))
response = self.server.get()
self.assertIsInstance(response, Copy)
self.assertEqual(response.parameters, {'src': 'source', 'dst': 'destination', })
self.assertIs(response.timeout, 10)
def test_get_command_not_found(self):
self.setResponse(self.createCommandResponse('Not found command'))
self.assertRaises(CommandNotFoundException, self.server.get) | from mockito import *
import unittest
from source.server import *
from source.exception import *
from source.commands.system import *
class ServerTestCase(unittest.TestCase):
def createCommandResponse(self, command, parameters = {}, timeout = None):
response = mock()
response.status_code = 200
json = { 'command': command, 'parameters': parameters }
if timeout is not None:
json['timeout'] = timeout
when(response).json().thenReturn({ 'command': command, 'timeout': timeout, 'parameters': parameters })
return response
def setResponse(self, response):
when(self.server._requests).get('').thenReturn(response)
def setUp(self):
self.server = Server('')
self.server._requests = mock()
def tearDown(self):
pass
def testGet(self):
self.setResponse(self.createCommandResponse('copy', parameters = {'src': 'source', 'dst': 'destination' }, timeout = 10))
response = self.server.get()
self.assertIsInstance(response, Copy)
self.assertEqual(response.parameters, {'src': 'source', 'dst': 'destination', })
self.assertIs(response.timeout, 10)
def testGetCommandNotFound(self):
self.setResponse(self.createCommandResponse('Not found command'))
self.assertRaises(CommandNotFoundException, self.server.get) | Change function names to camelCase | Change function names to camelCase
| Python | mit | CaminsTECH/owncloud-test |
52584725e462ab304bc2e976fa691f0d830e7efb | Speech/processor.py | Speech/processor.py | # Retrieve file from Facebook
import urllib, convert, re, os
# from speech_py import speech_to_text_offline as STT_o
# from speech_py import speech_to_text_google as STT
from speech_py import speech_to_text_ibm_rest as STT
def transcribe(audio_url):
if not os.path.isdir('./audio/retrieved_audio'):
os.makedirs('./audio/retrieved_audio')
reg_ex = '\w+.mp4'
file_name = re.search(reg_ex, audio_url).group(0)
urllib.urlretrieve(audio_url, './audio/retrieved_audio/{}'.format(file_name))
convert.convert('./audio/retrieved_audio/{}'.format(file_name))
# Converted in: ./converted/{name}.wav
return STT('./audio/converted/{}'.format(file_name[:-4]+".wav"))
| # Retrieve file from Facebook
import urllib, convert, re, os
# from speech_py import speech_to_text_google as STT
from speech_py import speech_to_text_ibm_rest as STT
def transcribe(audio_url):
if not os.path.isdir('./audio/retrieved_audio'):
os.makedirs('./audio/retrieved_audio')
reg_ex = '\w+.mp4'
file_name = re.search(reg_ex, audio_url).group(0)
urllib.urlretrieve(audio_url, './audio/retrieved_audio/{}'.format(file_name))
convert.convert('./audio/retrieved_audio/{}'.format(file_name))
# Converted in: ./converted/{name}.wav
return STT('./audio/converted/{}'.format(file_name[:-4]+".wav"))
| Modify ffmpeg path heroku 3 | Modify ffmpeg path heroku 3
| Python | mit | hungtraan/FacebookBot,hungtraan/FacebookBot,hungtraan/FacebookBot |
14e000acafe7c374294a7de6ffe295c9d56df68f | tests/test_postgresql_specific.py | tests/test_postgresql_specific.py | import pytest
from tests.utils import is_postgresql_env_with_json_field
@pytest.mark.skipif(not is_postgresql_env_with_json_field(),
reason="requires postgresql and Django 1.9+")
@pytest.mark.django_db
def test_dirty_json_field():
from tests.models import TestModelWithJSONField
tm = TestModelWithJSONField.objects.create(json_field={'data': 'dummy_data'})
assert tm.get_dirty_fields() == {}
tm.json_field = {'data': 'foo'}
assert tm.get_dirty_fields() == {'json_field': {'data': 'dummy_data'}}
| import pytest
from tests.utils import is_postgresql_env_with_json_field
@pytest.mark.skipif(not is_postgresql_env_with_json_field(),
reason="requires postgresql and Django 1.9+")
@pytest.mark.django_db
def test_dirty_json_field():
from tests.models import TestModelWithJSONField
tm = TestModelWithJSONField.objects.create(json_field={'data': [1, 2, 3]})
data = tm.json_field['data']
data.append(4)
assert tm.get_dirty_fields(verbose=True) == {
'json_field': {
'current': {'data': [1, 2, 3, 4]},
'saved': {'data': [1, 2, 3]}
}
}
| Update postgresql json_field to reflect deepcopy fix | Update postgresql json_field to reflect deepcopy fix
| Python | bsd-3-clause | jdotjdot/django-dirtyfields,romgar/django-dirtyfields,smn/django-dirtyfields |
db8e02661df65e1a50c5810968afef7ecd44db42 | braid/bazaar.py | braid/bazaar.py | import os
from fabric.api import run
from braid import package, fails
def install():
package.install('bzr')
def branch(branch, location):
if fails('[ -d {}/.bzr ]'.format(location)):
run('mkdir -p {}'.format(os.path.dirname(location)))
run('bzr branch {} {}'.format(branch, location))
else:
# FIXME (https://github.com/twisted-infra/braid/issues/5)
# We currently don't check that this the correct branch
run('bzr update {}'.format(location))
| import os
from fabric.api import run
from braid import package, fails
def install():
package.install('bzr')
def branch(branch, location):
if fails('[ -d {}/.bzr ]'.format(location)):
run('mkdir -p {}'.format(os.path.dirname(location)))
run('bzr branch {} {}'.format(branch, location))
else:
run('bzr pull --overwrite -d {} {}'.format(location, branch))
| Make bzr always pull from the specified remote. | Make bzr always pull from the specified remote.
Refs: #5.
| Python | mit | alex/braid,alex/braid |
3d5d6d093420294ed7b5fa834285d1d55da82d5d | pyroSAR/tests/test_snap_exe.py | pyroSAR/tests/test_snap_exe.py | import pytest
from contextlib import contextmanager
from pyroSAR._dev_config import ExamineExe
from pyroSAR.snap.auxil import ExamineSnap
@contextmanager
def not_raises(ExpectedException):
try:
yield
except ExpectedException:
raise AssertionError(
"Did raise exception {0} when it should not!".format(
repr(ExpectedException)
)
)
except Exception:
raise AssertionError(
"An unexpected exception {0} raised.".format(repr(Exception))
)
class TestExemineExe:
def test_exception(self):
with pytest.warns(UserWarning):
ExamineExe.examine('some_exe_file.exe')
def test_warn_snap(self):
with pytest.warns(UserWarning):
ExamineExe.examine('snap')
# def test_not_exception(self):
# SNAP_EXECUTABLE = ['snap64.exe', 'snap32.exe', 'snap.exe', 'snap']
# with not_raises(ValueError):
# ExamineExe.examine(SNAP_EXECUTABLE)
class TestExamineSnap:
def test_exception(self):
with pytest.warns(UserWarning):
ExamineExe.examine('some_exe_file.exe')
# def test_not_exception(self):
# with not_raises(AssertionError):
# test_snap_exe = ExamineSnap() | from contextlib import contextmanager
import pytest
from pyroSAR._dev_config import ExamineExe
from pyroSAR.snap.auxil import ExamineSnap
@contextmanager
def not_raises(ExpectedException):
try:
yield
except ExpectedException:
raise AssertionError(
"Did raise exception {0} when it should not!".format(
repr(ExpectedException)
)
)
except Exception:
raise AssertionError(
"An unexpected exception {0} raised.".format(repr(Exception))
)
class TestExemineExe:
def test_exception(self):
with pytest.warns(UserWarning):
ExamineExe.examine('some_exe_file.exe')
def test_not_exception(self):
SNAP_EXECUTABLE = ['snap64.exe', 'snap32.exe', 'snap.exe', 'snap']
with pytest.warns(None) as record:
ExamineExe.examine(SNAP_EXECUTABLE)
assert len(record) == 0
class TestExamineSnap:
def test_exception(self):
with pytest.warns(UserWarning):
ExamineSnap(snap_executable='some_exe_file.exe')
def test_not_exception(self):
with pytest.warns(None) as record:
ExamineSnap()
assert len(record) == 0
| Add unit test to determine if the classes ExamineExe and ExamineSnap will work properly. | Add unit test to determine if the classes ExamineExe and ExamineSnap will work properly.
Fixed a bug `assert len(record) == 1` in 'test_not_exception' method in class `TestExamineExe`.
| Python | mit | johntruckenbrodt/pyroSAR,johntruckenbrodt/pyroSAR |
b55c4c0536ca23484375d93f2ef011de0d5ce417 | app/app.py | app/app.py | from flask import Flask
app = Flask(__name__)
@app.route('/')
def index():
return 'Hello Docker + Nginx + Gunicorn + Flask!'
if __name__ == "__main__":
app.run(host="0.0.0.0", debug=True)
| from flask import Flask
app = Flask(__name__)
@app.route('/')
def index():
return 'Hello Docker + Nginx + Gunicorn + Flask!'
| Remove __name__ == __main__ becuase it'll never be used | Remove __name__ == __main__ becuase it'll never be used
| Python | mit | everett-toews/guestbook,rackerlabs/guestbook,everett-toews/guestbook,rackerlabs/guestbook |
ed326fba4f44552eeb206f3c5af9ad6f5e89ca44 | localeurl/models.py | localeurl/models.py | from django.conf import settings
from django.core import urlresolvers
from django.utils import translation
from localeurl import utils
def reverse(*args, **kwargs):
reverse_kwargs = kwargs.get('kwargs', {})
locale = utils.supported_language(reverse_kwargs.pop('locale',
translation.get_language()))
url = django_reverse(*args, **kwargs)
_, path = utils.strip_script_prefix(url)
return utils.locale_url(path, locale)
django_reverse = None
def patch_reverse():
"""
Monkey-patches the urlresolvers.reverse function. Will not patch twice.
"""
global django_reverse
if urlresolvers.reverse is not reverse:
django_reverse = urlresolvers.reverse
urlresolvers.reverse = reverse
if settings.USE_I18N:
patch_reverse()
| from django.conf import settings
from django.core import urlresolvers
from django.utils import translation
from localeurl import utils
def reverse(*args, **kwargs):
reverse_kwargs = kwargs.get('kwargs', {})
if reverse_kwargs!=None:
locale = utils.supported_language(reverse_kwargs.pop('locale',
translation.get_language()))
else:
locale = translation.get_language()
url = django_reverse(*args, **kwargs)
_, path = utils.strip_script_prefix(url)
return utils.locale_url(path, locale)
django_reverse = None
def patch_reverse():
"""
Monkey-patches the urlresolvers.reverse function. Will not patch twice.
"""
global django_reverse
if urlresolvers.reverse is not reverse:
django_reverse = urlresolvers.reverse
urlresolvers.reverse = reverse
if settings.USE_I18N:
patch_reverse()
| Handle situation when kwargs is None | Handle situation when kwargs is None
| Python | mit | eugena/django-localeurl |
4b35247fe384d4b2b206fa7650398511a493253c | setup.py | setup.py | from distutils.core import setup
import sys
import os
import re
PACKAGENAME = 'OpSimSummary'
packageDir = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'opsimsummary')
versionFile = os.path.join(packageDir, 'version.py')
# Obtain the package version
with open(versionFile, 'r') as f:
s = f.read()
# Look up the string value assigned to __version__ in version.py using regexp
versionRegExp = re.compile("__VERSION__ = \"(.*?)\"")
# Assign to __version__
__version__ = versionRegExp.findall(s)[0]
print(__version__)
setup(# package information
name=PACKAGENAME,
version=__version__,
description='simple repo to study OpSim output summaries',
long_description=''' ''',
# What code to include as packages
packages=[PACKAGENAME],
packagedir={PACKAGENAME: 'opsimsummary'},
# What data to include as packages
include_package_data=True,
package_data={PACKAGENAME:['example_data/*.dat', 'example_data/*.simlib']}
)
| from distutils.core import setup
import sys
import os
import re
PACKAGENAME = 'OpSimSummary'
packageDir = os.path.join(os.path.dirname(os.path.abspath(__file__)),
PACKAGENAME)
versionFile = os.path.join(packageDir, 'version.py')
# Obtain the package version
with open(versionFile, 'r') as f:
s = f.read()
# Look up the string value assigned to __version__ in version.py using regexp
versionRegExp = re.compile("__VERSION__ = \"(.*?)\"")
# Assign to __version__
__version__ = versionRegExp.findall(s)[0]
print(__version__)
setup(# package information
name=PACKAGENAME,
version=__version__,
description='simple repo to study OpSim output summaries',
long_description=''' ''',
# What code to include as packages
packages=[PACKAGENAME],
packagedir={PACKAGENAME: 'opsimsummary'},
# What data to include as packages
include_package_data=True,
package_data={PACKAGENAME:['example_data/*.dat', 'example_data/*.simlib']}
)
| Revert "Revert "Changed back due to problems, will fix later"" | Revert "Revert "Changed back due to problems, will fix later""
This reverts commit 5e92c0ef714dea823e1deeef21b5141d9e0111a0.
modified: setup.py
| Python | mit | rbiswas4/simlib |
83c0cb83a5eeaff693765c7d297b470adfdcec9e | setup.py | setup.py | #!/usr/bin/env python
from distutils.core import setup
version = __import__('stale').__version__
setup(
name="stale",
version=version,
description="Identifies (and optionally removes) stale Delicious links",
author="Jon Parise",
author_email="jon@indelible.org",
url="http://bitbucket.org/jparise/stale/",
scripts = ['stale.py'],
license = "MIT License",
classifiers = ['License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python'],
)
| #!/usr/bin/env python
from distutils.core import setup
version = __import__('stale').__version__
setup(
name="stale",
version=version,
description="Identifies (and optionally removes) stale Delicious links",
author="Jon Parise",
author_email="jon@indelible.org",
url="https://github.com/jparise/stale",
scripts = ['stale.py'],
license = "MIT License",
classifiers = ['License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python'],
)
| Use the GitHub URL instead of the BitBucket URL | Use the GitHub URL instead of the BitBucket URL
| Python | mit | jparise/stale |
2ef360762cf807806417fbd505319165716e4591 | setup.py | setup.py | #!/usr/bin/env python
# Copyright (c) 2014, Michael Boyle
# See LICENSE file for details: <https://github.com/moble/quaternion/blob/master/LICENSE>
from auto_version import calculate_version, build_py_copy_version
def configuration(parent_package='', top_path=None):
import numpy
from distutils.errors import DistutilsError
if numpy.__dict__.get('quaternion') is not None:
raise DistutilsError('The target NumPy already has a quaternion type')
from numpy.distutils.misc_util import Configuration
# if(os.environ.get('THIS_IS_TRAVIS') is not None):
# print("This appears to be Travis!")
# compile_args = ['-O3']
# else:
# compile_args = ['-ffast-math', '-O3']
compile_args = ['-O3']
config = Configuration('quaternion', parent_package, top_path)
config.add_extension('numpy_quaternion',
['quaternion.c', 'numpy_quaternion.c'],
depends=['quaternion.c', 'quaternion.h', 'numpy_quaternion.c'],
extra_compile_args=compile_args, )
return config
if __name__ == "__main__":
from numpy.distutils.core import setup
setup(configuration=configuration,
version=calculate_version(),
cmdclass={'build_py': build_py_copy_version},)
| #!/usr/bin/env python
# Copyright (c) 2014, Michael Boyle
# See LICENSE file for details: <https://github.com/moble/quaternion/blob/master/LICENSE>
from auto_version import calculate_version, build_py_copy_version
def configuration(parent_package='', top_path=None):
import numpy
from distutils.errors import DistutilsError
if numpy.__dict__.get('quaternion') is not None:
raise DistutilsError('The target NumPy already has a quaternion type')
from numpy.distutils.misc_util import Configuration
# if(os.environ.get('THIS_IS_TRAVIS') is not None):
# print("This appears to be Travis!")
# compile_args = ['-O3']
# else:
# compile_args = ['-ffast-math', '-O3']
compile_args = ['-ffast-math', '-O3']
config = Configuration('quaternion', parent_package, top_path)
config.add_extension('numpy_quaternion',
['quaternion.c', 'numpy_quaternion.c'],
depends=['quaternion.c', 'quaternion.h', 'numpy_quaternion.c'],
extra_compile_args=compile_args, )
return config
if __name__ == "__main__":
from numpy.distutils.core import setup
setup(configuration=configuration,
version=calculate_version(),
cmdclass={'build_py': build_py_copy_version},)
| Add fast-math back to compiler options, now that anaconda can handle it | Add fast-math back to compiler options, now that anaconda can handle it
Closes #13
See https://github.com/ContinuumIO/anaconda-issues/issues/182
| Python | mit | moble/quaternion,moble/quaternion |
9f485a55227406c3cfbfb3154ec8d0f2cad8ae67 | publisher/build_paper.py | publisher/build_paper.py | #!/usr/bin/env python
import docutils.core as dc
from writer import writer
import os.path
import sys
import glob
preamble = r'''
% These preamble commands are from build_paper.py
% PDF Standard Fonts
\usepackage{mathptmx}
\usepackage[scaled=.90]{helvet}
\usepackage{courier}
% Make verbatim environment smaller
\makeatletter
\g@addto@macro\@verbatim\footnotesize
\makeatother
\renewcommand{\quote}{}
'''
settings = {'documentclass': 'IEEEtran',
'use_verbatim_when_possible': True,
'use_latex_citations': True,
'latex_preamble': preamble}
if len(sys.argv) != 2:
print "Usage: build_paper.py paper_directory"
sys.exit(-1)
path = sys.argv[1]
if not os.path.isdir(path):
print("Cannot open directory: %s" % path)
sys.exit(-1)
rst = glob.glob(os.path.join(path, '*.rst'))[0]
content = open(rst, 'r').read()
content = '''
.. role:: math(raw)
:format: latex
''' + content
tex = dc.publish_string(source=content, writer=writer,
settings_overrides=settings)
out = open('/tmp/paper.tex', 'w')
out.write(tex)
out.close()
| #!/usr/bin/env python
import docutils.core as dc
from writer import writer
import os.path
import sys
import glob
preamble = r'''
% These preamble commands are from build_paper.py
% PDF Standard Fonts
\usepackage{mathptmx}
\usepackage[scaled=.90]{helvet}
\usepackage{courier}
% Make verbatim environment smaller
\makeatletter
\g@addto@macro\@verbatim\footnotesize
\makeatother
\renewcommand{\quote}{}
'''
settings = {'documentclass': 'IEEEtran',
'use_verbatim_when_possible': True,
'use_latex_citations': True,
'latex_preamble': preamble,
'documentoptions': 'letterpaper,compsoc,twoside'}
if len(sys.argv) != 2:
print "Usage: build_paper.py paper_directory"
sys.exit(-1)
path = sys.argv[1]
if not os.path.isdir(path):
print("Cannot open directory: %s" % path)
sys.exit(-1)
rst = glob.glob(os.path.join(path, '*.rst'))[0]
content = open(rst, 'r').read()
content = '''
.. role:: math(raw)
:format: latex
''' + content
tex = dc.publish_string(source=content, writer=writer,
settings_overrides=settings)
out = open('/tmp/paper.tex', 'w')
out.write(tex)
out.close()
| Use IEEE computer society layout to improve looks. | Use IEEE computer society layout to improve looks.
| Python | bsd-2-clause | Stewori/euroscipy_proceedings,helgee/euroscipy_proceedings,juhasch/euroscipy_proceedings,mwcraig/scipy_proceedings,sbenthall/scipy_proceedings,helgee/euroscipy_proceedings,SepidehAlassi/euroscipy_proceedings,SepidehAlassi/euroscipy_proceedings,chendaniely/scipy_proceedings,dotsdl/scipy_proceedings,mikaem/euroscipy_proceedings,mjklemm/euroscipy_proceedings,Stewori/euroscipy_proceedings,katyhuff/scipy_proceedings,Stewori/euroscipy_proceedings,katyhuff/scipy_proceedings,mikaem/euroscipy_proceedings,mikaem/euroscipy_proceedings,helgee/euroscipy_proceedings,michaelpacer/scipy_proceedings,katyhuff/scipy_proceedings,michaelpacer/scipy_proceedings,mwcraig/scipy_proceedings,dotsdl/scipy_proceedings,euroscipy/euroscipy_proceedings,juhasch/euroscipy_proceedings,springcoil/euroscipy_proceedings,euroscipy/euroscipy_proceedings,springcoil/euroscipy_proceedings,chendaniely/scipy_proceedings,springcoil/euroscipy_proceedings,juhasch/euroscipy_proceedings,euroscipy/euroscipy_proceedings,sbenthall/scipy_proceedings,michaelpacer/scipy_proceedings,SepidehAlassi/euroscipy_proceedings,dotsdl/scipy_proceedings,mjklemm/euroscipy_proceedings,mwcraig/scipy_proceedings,mjklemm/euroscipy_proceedings,sbenthall/scipy_proceedings,chendaniely/scipy_proceedings |
cb2c937fa16590a7431f450c0fc79cc68dd9984c | readthedocs/cdn/purge.py | readthedocs/cdn/purge.py | import logging
from django.conf import settings
log = logging.getLogger(__name__)
CDN_SERVICE = getattr(settings, 'CDN_SERVICE')
CDN_USERNAME = getattr(settings, 'CDN_USERNAME')
CDN_KEY = getattr(settings, 'CDN_KEY')
CDN_SECET = getattr(settings, 'CDN_SECET')
CDN_ID = getattr(settings, 'CDN_ID')
def purge(files):
log.error("CDN not configured, can't purge files")
if CDN_USERNAME and CDN_KEY and CDN_SECET and CDN_ID:
if CDN_SERVICE == 'maxcdn':
from maxcdn import MaxCDN as cdn_service
api = cdn_service(CDN_USERNAME, CDN_KEY, CDN_SECET)
def purge(files):
return api.purge(CDN_ID, files)
| import logging
from django.conf import settings
log = logging.getLogger(__name__)
CDN_SERVICE = getattr(settings, 'CDN_SERVICE')
CDN_USERNAME = getattr(settings, 'CDN_USERNAME')
CDN_KEY = getattr(settings, 'CDN_KEY')
CDN_SECET = getattr(settings, 'CDN_SECET')
CDN_ID = getattr(settings, 'CDN_ID')
def purge(files):
log.error("CDN not configured, can't purge files")
if CDN_USERNAME and CDN_KEY and CDN_SECET and CDN_ID:
if CDN_SERVICE == 'maxcdn':
from maxcdn import MaxCDN
api = MaxCDN(CDN_USERNAME, CDN_KEY, CDN_SECET)
def purge(files):
return api.purge(CDN_ID, files)
| Clean up bad logic to make it slightly less bad | Clean up bad logic to make it slightly less bad
| Python | mit | sid-kap/readthedocs.org,wanghaven/readthedocs.org,CedarLogic/readthedocs.org,mhils/readthedocs.org,sunnyzwh/readthedocs.org,titiushko/readthedocs.org,laplaceliu/readthedocs.org,hach-que/readthedocs.org,pombredanne/readthedocs.org,safwanrahman/readthedocs.org,wanghaven/readthedocs.org,michaelmcandrew/readthedocs.org,safwanrahman/readthedocs.org,fujita-shintaro/readthedocs.org,techtonik/readthedocs.org,stevepiercy/readthedocs.org,singingwolfboy/readthedocs.org,fujita-shintaro/readthedocs.org,attakei/readthedocs-oauth,techtonik/readthedocs.org,hach-que/readthedocs.org,kenwang76/readthedocs.org,clarkperkins/readthedocs.org,royalwang/readthedocs.org,atsuyim/readthedocs.org,clarkperkins/readthedocs.org,atsuyim/readthedocs.org,kenwang76/readthedocs.org,clarkperkins/readthedocs.org,wanghaven/readthedocs.org,soulshake/readthedocs.org,emawind84/readthedocs.org,attakei/readthedocs-oauth,michaelmcandrew/readthedocs.org,davidfischer/readthedocs.org,safwanrahman/readthedocs.org,istresearch/readthedocs.org,mhils/readthedocs.org,titiushko/readthedocs.org,SteveViss/readthedocs.org,espdev/readthedocs.org,royalwang/readthedocs.org,rtfd/readthedocs.org,singingwolfboy/readthedocs.org,davidfischer/readthedocs.org,clarkperkins/readthedocs.org,titiushko/readthedocs.org,techtonik/readthedocs.org,attakei/readthedocs-oauth,michaelmcandrew/readthedocs.org,stevepiercy/readthedocs.org,GovReady/readthedocs.org,pombredanne/readthedocs.org,royalwang/readthedocs.org,wijerasa/readthedocs.org,laplaceliu/readthedocs.org,sid-kap/readthedocs.org,GovReady/readthedocs.org,espdev/readthedocs.org,kenshinthebattosai/readthedocs.org,istresearch/readthedocs.org,SteveViss/readthedocs.org,Tazer/readthedocs.org,VishvajitP/readthedocs.org,fujita-shintaro/readthedocs.org,tddv/readthedocs.org,stevepiercy/readthedocs.org,fujita-shintaro/readthedocs.org,royalwang/readthedocs.org,LukasBoersma/readthedocs.org,wijerasa/readthedocs.org,sid-kap/readthedocs.org,gjtorikian/readthedocs.org,emawind84/readthedocs.org,kenwang76/readthedocs.org,CedarLogic/readthedocs.org,wanghaven/readthedocs.org,tddv/readthedocs.org,atsuyim/readthedocs.org,CedarLogic/readthedocs.org,davidfischer/readthedocs.org,LukasBoersma/readthedocs.org,espdev/readthedocs.org,emawind84/readthedocs.org,CedarLogic/readthedocs.org,singingwolfboy/readthedocs.org,istresearch/readthedocs.org,emawind84/readthedocs.org,SteveViss/readthedocs.org,tddv/readthedocs.org,Tazer/readthedocs.org,attakei/readthedocs-oauth,kenwang76/readthedocs.org,safwanrahman/readthedocs.org,VishvajitP/readthedocs.org,stevepiercy/readthedocs.org,titiushko/readthedocs.org,istresearch/readthedocs.org,singingwolfboy/readthedocs.org,kenshinthebattosai/readthedocs.org,hach-que/readthedocs.org,gjtorikian/readthedocs.org,VishvajitP/readthedocs.org,sunnyzwh/readthedocs.org,wijerasa/readthedocs.org,mhils/readthedocs.org,gjtorikian/readthedocs.org,GovReady/readthedocs.org,VishvajitP/readthedocs.org,sunnyzwh/readthedocs.org,rtfd/readthedocs.org,laplaceliu/readthedocs.org,soulshake/readthedocs.org,rtfd/readthedocs.org,soulshake/readthedocs.org,LukasBoersma/readthedocs.org,Tazer/readthedocs.org,sid-kap/readthedocs.org,soulshake/readthedocs.org,hach-que/readthedocs.org,espdev/readthedocs.org,davidfischer/readthedocs.org,kenshinthebattosai/readthedocs.org,rtfd/readthedocs.org,GovReady/readthedocs.org,LukasBoersma/readthedocs.org,Tazer/readthedocs.org,espdev/readthedocs.org,gjtorikian/readthedocs.org,pombredanne/readthedocs.org,kenshinthebattosai/readthedocs.org,SteveViss/readthedocs.org,mhils/readthedocs.org,wijerasa/readthedocs.org,laplaceliu/readthedocs.org,techtonik/readthedocs.org,sunnyzwh/readthedocs.org,michaelmcandrew/readthedocs.org,atsuyim/readthedocs.org |
552afcd33d890d2798b52919c0b4c0d146b7d914 | make_ids.py | make_ids.py | #!/usr/bin/env python
import csv
import json
import os
import sys
def format_entities_as_list(entities):
for i, entity in enumerate(entities, 1):
yield (unicode(i), json.dumps(entity["terms"]))
def generate_entities(fobj):
termsets_seen = set()
for line in fobj:
entity = json.loads(line)
termset = tuple(entity["terms"])
if termset not in termsets_seen:
termsets_seen.add(termset)
yield entity
def load_entities_from_file(infile, outfile):
if os.path.exists(outfile):
raise RuntimeError("Output file %r already exists" % outfile)
with open(infile) as in_fobj:
with open(outfile, "wb") as out_fobj:
writer = csv.writer(out_fobj)
for row in format_entities_as_list(generate_entities(in_fobj)):
writer.writerow(row)
if __name__ == '__main__':
load_entities_from_file(sys.argv[1], sys.argv[2])
| #!/usr/bin/env python
import csv
import json
import os
import sys
def format_entities_as_list(entities):
"""Format entities read from an iterator as lists.
:param entities: An iterator yielding entities as dicts:
eg {"terms": ["Fred"]}
Yield a sequence of entites formatted as lists containing string values.
Also allocates identifier numbers. Sequences are formatted as json. eg:
["1", '["Fred"]']
The resulting sequence is ideal for conversion to CSV.
"""
for i, entity in enumerate(entities, 1):
yield (unicode(i), json.dumps(entity["terms"]))
def generate_entities(fobj):
"""Generate entities by reading from a file object.
:param fobj: File object to read from. Each line in the file should
represent an entity.
Yields a sequence of dicts representing entities, where the dicts will
contain at the least a "terms" object.
"""
termsets_seen = set()
for line in fobj:
entity = json.loads(line)
termset = tuple(entity["terms"])
if termset not in termsets_seen:
termsets_seen.add(termset)
yield entity
def convert_entities_from_file_to_csv(infile, outfile):
"""Convert entities from a file to CSV format.
:param infile: The file name to read entities from. Formatted as jsonlines
(http://jsonlines.org/) - one line per entity.
:param outfile: The file name to write entities to as CSV.
"""
if os.path.exists(outfile):
raise RuntimeError("Output file %r already exists" % outfile)
with open(infile) as in_fobj:
with open(outfile, "wb") as out_fobj:
writer = csv.writer(out_fobj)
for row in format_entities_as_list(generate_entities(in_fobj)):
writer.writerow(row)
if __name__ == '__main__':
convert_entities_from_file_to_csv(sys.argv[1], sys.argv[2])
| Add docstrings to all functions | Add docstrings to all functions
| Python | mit | alphagov/entity-manager |
4663fdb44628238997ecc5adbb0f0193c99efc6c | script/lib/config.py | script/lib/config.py | #!/usr/bin/env python
import platform
import sys
BASE_URL = 'http://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
LIBCHROMIUMCONTENT_COMMIT = '26dd65a62e35aa98b25c10cbfc00f1a621fd4c4b'
ARCH = {
'cygwin': '32bit',
'darwin': '64bit',
'linux2': platform.architecture()[0],
'win32': '32bit',
}[sys.platform]
DIST_ARCH = {
'32bit': 'ia32',
'64bit': 'x64',
}[ARCH]
TARGET_PLATFORM = {
'cygwin': 'win32',
'darwin': 'darwin',
'linux2': 'linux',
'win32': 'win32',
}[sys.platform]
verbose_mode = False
def enable_verbose_mode():
print 'Running in verbose mode'
global verbose_mode
verbose_mode = True
def is_verbose_mode():
return verbose_mode
| #!/usr/bin/env python
import platform
import sys
BASE_URL = 'http://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
LIBCHROMIUMCONTENT_COMMIT = 'c01b10faf0d478e48f537210ec263fabd551578d'
ARCH = {
'cygwin': '32bit',
'darwin': '64bit',
'linux2': platform.architecture()[0],
'win32': '32bit',
}[sys.platform]
DIST_ARCH = {
'32bit': 'ia32',
'64bit': 'x64',
}[ARCH]
TARGET_PLATFORM = {
'cygwin': 'win32',
'darwin': 'darwin',
'linux2': 'linux',
'win32': 'win32',
}[sys.platform]
verbose_mode = False
def enable_verbose_mode():
print 'Running in verbose mode'
global verbose_mode
verbose_mode = True
def is_verbose_mode():
return verbose_mode
| Update libchromiumcontent to disable zygote process | Update libchromiumcontent to disable zygote process
| Python | mit | Jonekee/electron,Faiz7412/electron,cos2004/electron,mubassirhayat/electron,smczk/electron,fomojola/electron,Jonekee/electron,IonicaBizauKitchen/electron,howmuchcomputer/electron,Floato/electron,bruce/electron,joaomoreno/atom-shell,tomashanacek/electron,ervinb/electron,gbn972/electron,Zagorakiss/electron,rhencke/electron,tomashanacek/electron,biblerule/UMCTelnetHub,beni55/electron,astoilkov/electron,deed02392/electron,coderhaoxin/electron,dkfiresky/electron,bruce/electron,shaundunne/electron,oiledCode/electron,systembugtj/electron,brave/muon,lzpfmh/electron,coderhaoxin/electron,Jonekee/electron,miniak/electron,eriser/electron,shaundunne/electron,brenca/electron,howmuchcomputer/electron,cqqccqc/electron,bbondy/electron,RobertJGabriel/electron,pandoraui/electron,anko/electron,destan/electron,eriser/electron,simonfork/electron,kcrt/electron,egoist/electron,jiaz/electron,gamedevsam/electron,twolfson/electron,bwiggs/electron,leethomas/electron,iftekeriba/electron,shockone/electron,adamjgray/electron,edulan/electron,jaanus/electron,michaelchiche/electron,evgenyzinoviev/electron,howmuchcomputer/electron,xiruibing/electron,jannishuebl/electron,hokein/atom-shell,baiwyc119/electron,nicobot/electron,leftstick/electron,preco21/electron,MaxWhere/electron,destan/electron,michaelchiche/electron,jiaz/electron,LadyNaggaga/electron,bitemyapp/electron,aaron-goshine/electron,brave/muon,deed02392/electron,Evercoder/electron,vaginessa/electron,Jacobichou/electron,LadyNaggaga/electron,Andrey-Pavlov/electron,gabriel/electron,gbn972/electron,shennushi/electron,Rokt33r/electron,neutrous/electron,xiruibing/electron,electron/electron,soulteary/electron,setzer777/electron,jtburke/electron,smczk/electron,RIAEvangelist/electron,leethomas/electron,bwiggs/electron,yan-foto/electron,shennushi/electron,yan-foto/electron,jiaz/electron,bbondy/electron,posix4e/electron,ervinb/electron,Neron-X5/electron,michaelchiche/electron,meowlab/electron,aaron-goshine/electron,greyhwndz/electron,fomojola/electron,carsonmcdonald/electron,matiasinsaurralde/electron,MaxGraey/electron,vHanda/electron,noikiy/electron,synaptek/electron,felixrieseberg/electron,mattotodd/electron,medixdev/electron,JussMee15/electron,tinydew4/electron,adamjgray/electron,jacksondc/electron,leolujuyi/electron,rsvip/electron,voidbridge/electron,leethomas/electron,dahal/electron,trankmichael/electron,adamjgray/electron,aaron-goshine/electron,leolujuyi/electron,faizalpribadi/electron,jjz/electron,systembugtj/electron,astoilkov/electron,tinydew4/electron,trankmichael/electron,rreimann/electron,twolfson/electron,aichingm/electron,jsutcodes/electron,davazp/electron,pirafrank/electron,rhencke/electron,kcrt/electron,jaanus/electron,jlhbaseball15/electron,carsonmcdonald/electron,preco21/electron,RobertJGabriel/electron,gabrielPeart/electron,rreimann/electron,cqqccqc/electron,shaundunne/electron,gerhardberger/electron,dkfiresky/electron,yalexx/electron,joneit/electron,fffej/electron,ankitaggarwal011/electron,leolujuyi/electron,bobwol/electron,jlord/electron,dongjoon-hyun/electron,aliib/electron,brenca/electron,dahal/electron,shennushi/electron,Neron-X5/electron,michaelchiche/electron,saronwei/electron,Evercoder/electron,anko/electron,mattotodd/electron,nicholasess/electron,JussMee15/electron,synaptek/electron,chrisswk/electron,pirafrank/electron,Evercoder/electron,stevemao/electron,mhkeller/electron,mjaniszew/electron,egoist/electron,Neron-X5/electron,carsonmcdonald/electron,nekuz0r/electron,brave/muon,bpasero/electron,wolfflow/electron,gerhardberger/electron,icattlecoder/electron,kokdemo/electron,IonicaBizauKitchen/electron,eriser/electron,coderhaoxin/electron,bright-sparks/electron,sircharleswatson/electron,cqqccqc/electron,baiwyc119/electron,thompsonemerson/electron,hokein/atom-shell,Andrey-Pavlov/electron,leethomas/electron,trigrass2/electron,icattlecoder/electron,bbondy/electron,SufianHassan/electron,simonfork/electron,arusakov/electron,jjz/electron,jannishuebl/electron,DivyaKMenon/electron,kostia/electron,pirafrank/electron,tylergibson/electron,bbondy/electron,pirafrank/electron,rhencke/electron,bitemyapp/electron,kazupon/electron,jtburke/electron,digideskio/electron,takashi/electron,joneit/electron,simongregory/electron,anko/electron,astoilkov/electron,Andrey-Pavlov/electron,jsutcodes/electron,hokein/atom-shell,John-Lin/electron,Jonekee/electron,kokdemo/electron,shiftkey/electron,stevemao/electron,timruffles/electron,aecca/electron,MaxWhere/electron,medixdev/electron,nicobot/electron,minggo/electron,posix4e/electron,Ivshti/electron,tonyganch/electron,nicholasess/electron,cqqccqc/electron,aichingm/electron,jaanus/electron,preco21/electron,natgolov/electron,howmuchcomputer/electron,Evercoder/electron,pandoraui/electron,jhen0409/electron,sshiting/electron,minggo/electron,Andrey-Pavlov/electron,tonyganch/electron,John-Lin/electron,Faiz7412/electron,neutrous/electron,miniak/electron,evgenyzinoviev/electron,xiruibing/electron,dongjoon-hyun/electron,greyhwndz/electron,shaundunne/electron,nicholasess/electron,brenca/electron,jjz/electron,Faiz7412/electron,minggo/electron,leftstick/electron,trigrass2/electron,stevemao/electron,hokein/atom-shell,meowlab/electron,tomashanacek/electron,jiaz/electron,webmechanicx/electron,christian-bromann/electron,tincan24/electron,zhakui/electron,tylergibson/electron,thompsonemerson/electron,kcrt/electron,sircharleswatson/electron,setzer777/electron,ianscrivener/electron,aliib/electron,neutrous/electron,arturts/electron,darwin/electron,astoilkov/electron,leftstick/electron,GoooIce/electron,John-Lin/electron,jaanus/electron,bpasero/electron,fabien-d/electron,brave/muon,biblerule/UMCTelnetHub,farmisen/electron,ianscrivener/electron,evgenyzinoviev/electron,farmisen/electron,stevekinney/electron,saronwei/electron,arturts/electron,aaron-goshine/electron,voidbridge/electron,fabien-d/electron,roadev/electron,jonatasfreitasv/electron,rhencke/electron,RIAEvangelist/electron,ervinb/electron,the-ress/electron,wan-qy/electron,ianscrivener/electron,posix4e/electron,natgolov/electron,kcrt/electron,electron/electron,wolfflow/electron,shaundunne/electron,timruffles/electron,mhkeller/electron,tincan24/electron,fritx/electron,chriskdon/electron,noikiy/electron,gerhardberger/electron,biblerule/UMCTelnetHub,joaomoreno/atom-shell,saronwei/electron,gabrielPeart/electron,egoist/electron,brave/electron,systembugtj/electron,bruce/electron,vaginessa/electron,jonatasfreitasv/electron,christian-bromann/electron,cos2004/electron,fffej/electron,anko/electron,aichingm/electron,rreimann/electron,lzpfmh/electron,jhen0409/electron,cos2004/electron,takashi/electron,rhencke/electron,tincan24/electron,brenca/electron,tomashanacek/electron,nekuz0r/electron,chrisswk/electron,brenca/electron,meowlab/electron,chriskdon/electron,gabrielPeart/electron,edulan/electron,yan-foto/electron,davazp/electron,eric-seekas/electron,Faiz7412/electron,benweissmann/electron,chriskdon/electron,robinvandernoord/electron,jacksondc/electron,wan-qy/electron,bobwol/electron,davazp/electron,Floato/electron,MaxGraey/electron,leethomas/electron,JesselJohn/electron,JesselJohn/electron,twolfson/electron,gabriel/electron,christian-bromann/electron,gabrielPeart/electron,gabriel/electron,GoooIce/electron,Rokt33r/electron,thompsonemerson/electron,simonfork/electron,natgolov/electron,webmechanicx/electron,electron/electron,Andrey-Pavlov/electron,coderhaoxin/electron,Gerhut/electron,aichingm/electron,IonicaBizauKitchen/electron,sky7sea/electron,felixrieseberg/electron,xfstudio/electron,etiktin/electron,Gerhut/electron,DivyaKMenon/electron,timruffles/electron,thompsonemerson/electron,gamedevsam/electron,shockone/electron,stevemao/electron,oiledCode/electron,Rokt33r/electron,trankmichael/electron,seanchas116/electron,bitemyapp/electron,Zagorakiss/electron,sircharleswatson/electron,abhishekgahlot/electron,adcentury/electron,brave/electron,subblue/electron,leolujuyi/electron,matiasinsaurralde/electron,LadyNaggaga/electron,bwiggs/electron,fabien-d/electron,neutrous/electron,brave/electron,arusakov/electron,adcentury/electron,sky7sea/electron,pandoraui/electron,minggo/electron,dongjoon-hyun/electron,sky7sea/electron,tincan24/electron,fritx/electron,medixdev/electron,joneit/electron,ianscrivener/electron,mirrh/electron,yalexx/electron,tomashanacek/electron,soulteary/electron,sshiting/electron,leolujuyi/electron,tincan24/electron,baiwyc119/electron,saronwei/electron,coderhaoxin/electron,trankmichael/electron,yalexx/electron,pombredanne/electron,benweissmann/electron,bright-sparks/electron,chriskdon/electron,leftstick/electron,bobwol/electron,jonatasfreitasv/electron,Gerhut/electron,shiftkey/electron,pombredanne/electron,vipulroxx/electron,mrwizard82d1/electron,JesselJohn/electron,tonyganch/electron,mrwizard82d1/electron,jiaz/electron,thomsonreuters/electron,davazp/electron,renaesop/electron,fomojola/electron,preco21/electron,jonatasfreitasv/electron,kostia/electron,aliib/electron,darwin/electron,gbn972/electron,saronwei/electron,mjaniszew/electron,zhakui/electron,mubassirhayat/electron,John-Lin/electron,MaxWhere/electron,joneit/electron,aichingm/electron,bruce/electron,icattlecoder/electron,mhkeller/electron,Floato/electron,aliib/electron,trankmichael/electron,greyhwndz/electron,jlord/electron,bwiggs/electron,deed02392/electron,dongjoon-hyun/electron,fomojola/electron,egoist/electron,trankmichael/electron,shockone/electron,RIAEvangelist/electron,Floato/electron,jlhbaseball15/electron,kenmozi/electron,vaginessa/electron,GoooIce/electron,gerhardberger/electron,vHanda/electron,jlhbaseball15/electron,noikiy/electron,bpasero/electron,etiktin/electron,Ivshti/electron,tinydew4/electron,yalexx/electron,the-ress/electron,GoooIce/electron,subblue/electron,ankitaggarwal011/electron,adamjgray/electron,vipulroxx/electron,hokein/atom-shell,matiasinsaurralde/electron,xfstudio/electron,takashi/electron,brave/electron,ianscrivener/electron,smczk/electron,dahal/electron,shiftkey/electron,leolujuyi/electron,evgenyzinoviev/electron,shiftkey/electron,egoist/electron,kenmozi/electron,systembugtj/electron,simongregory/electron,webmechanicx/electron,d-salas/electron,joneit/electron,fffej/electron,vipulroxx/electron,SufianHassan/electron,SufianHassan/electron,brenca/electron,aecca/electron,tylergibson/electron,leethomas/electron,egoist/electron,jannishuebl/electron,lzpfmh/electron,bobwol/electron,the-ress/electron,Neron-X5/electron,gabrielPeart/electron,ankitaggarwal011/electron,greyhwndz/electron,simongregory/electron,yan-foto/electron,dkfiresky/electron,rreimann/electron,faizalpribadi/electron,smczk/electron,nicholasess/electron,nekuz0r/electron,minggo/electron,sshiting/electron,beni55/electron,trigrass2/electron,mrwizard82d1/electron,Floato/electron,mattdesl/electron,pombredanne/electron,roadev/electron,arusakov/electron,MaxWhere/electron,micalan/electron,mattotodd/electron,jlhbaseball15/electron,bpasero/electron,baiwyc119/electron,chrisswk/electron,baiwyc119/electron,subblue/electron,vaginessa/electron,Zagorakiss/electron,electron/electron,MaxGraey/electron,robinvandernoord/electron,thingsinjars/electron,roadev/electron,joaomoreno/atom-shell,robinvandernoord/electron,arturts/electron,iftekeriba/electron,nicobot/electron,seanchas116/electron,destan/electron,adcentury/electron,synaptek/electron,micalan/electron,tylergibson/electron,kazupon/electron,Jonekee/electron,beni55/electron,jjz/electron,bobwol/electron,brave/electron,rajatsingla28/electron,kazupon/electron,cqqccqc/electron,felixrieseberg/electron,sircharleswatson/electron,nekuz0r/electron,IonicaBizauKitchen/electron,wan-qy/electron,tonyganch/electron,smczk/electron,dongjoon-hyun/electron,jsutcodes/electron,wan-qy/electron,thomsonreuters/electron,GoooIce/electron,stevekinney/electron,biblerule/UMCTelnetHub,jannishuebl/electron,xiruibing/electron,tylergibson/electron,jiaz/electron,joaomoreno/atom-shell,timruffles/electron,kazupon/electron,digideskio/electron,jlord/electron,digideskio/electron,Floato/electron,wan-qy/electron,gamedevsam/electron,mirrh/electron,bwiggs/electron,bright-sparks/electron,IonicaBizauKitchen/electron,jhen0409/electron,sircharleswatson/electron,vHanda/electron,kostia/electron,oiledCode/electron,edulan/electron,arusakov/electron,JussMee15/electron,rajatsingla28/electron,jsutcodes/electron,bright-sparks/electron,faizalpribadi/electron,xfstudio/electron,leftstick/electron,eric-seekas/electron,biblerule/UMCTelnetHub,kenmozi/electron,gamedevsam/electron,carsonmcdonald/electron,MaxGraey/electron,mhkeller/electron,jjz/electron,rreimann/electron,IonicaBizauKitchen/electron,John-Lin/electron,ervinb/electron,meowlab/electron,Gerhut/electron,jtburke/electron,simongregory/electron,miniak/electron,the-ress/electron,astoilkov/electron,setzer777/electron,darwin/electron,aecca/electron,gamedevsam/electron,darwin/electron,takashi/electron,etiktin/electron,Neron-X5/electron,brave/muon,renaesop/electron,mattotodd/electron,jcblw/electron,trigrass2/electron,fireball-x/atom-shell,gabriel/electron,pandoraui/electron,medixdev/electron,lrlna/electron,abhishekgahlot/electron,bitemyapp/electron,iftekeriba/electron,jaanus/electron,eric-seekas/electron,posix4e/electron,xiruibing/electron,simongregory/electron,fireball-x/atom-shell,mrwizard82d1/electron,Jonekee/electron,bright-sparks/electron,thomsonreuters/electron,SufianHassan/electron,leftstick/electron,eric-seekas/electron,aichingm/electron,renaesop/electron,fritx/electron,deed02392/electron,voidbridge/electron,roadev/electron,noikiy/electron,thomsonreuters/electron,chrisswk/electron,pandoraui/electron,RIAEvangelist/electron,mjaniszew/electron,stevekinney/electron,digideskio/electron,takashi/electron,zhakui/electron,Zagorakiss/electron,xfstudio/electron,subblue/electron,aaron-goshine/electron,fffej/electron,pombredanne/electron,cos2004/electron,Gerhut/electron,thingsinjars/electron,gabrielPeart/electron,electron/electron,jtburke/electron,eric-seekas/electron,kokdemo/electron,shennushi/electron,dahal/electron,mrwizard82d1/electron,michaelchiche/electron,pirafrank/electron,roadev/electron,deed02392/electron,icattlecoder/electron,fritx/electron,the-ress/electron,stevemao/electron,kostia/electron,neutrous/electron,darwin/electron,Rokt33r/electron,preco21/electron,miniak/electron,soulteary/electron,howmuchcomputer/electron,LadyNaggaga/electron,webmechanicx/electron,RobertJGabriel/electron,d-salas/electron,Evercoder/electron,simonfork/electron,kokdemo/electron,vipulroxx/electron,carsonmcdonald/electron,d-salas/electron,bwiggs/electron,DivyaKMenon/electron,felixrieseberg/electron,lrlna/electron,edulan/electron,tincan24/electron,mjaniszew/electron,pombredanne/electron,zhakui/electron,fffej/electron,christian-bromann/electron,kazupon/electron,jaanus/electron,ianscrivener/electron,thomsonreuters/electron,sshiting/electron,electron/electron,MaxWhere/electron,robinvandernoord/electron,rreimann/electron,wan-qy/electron,voidbridge/electron,subblue/electron,sky7sea/electron,kenmozi/electron,brave/electron,setzer777/electron,tinydew4/electron,rsvip/electron,kenmozi/electron,synaptek/electron,matiasinsaurralde/electron,oiledCode/electron,voidbridge/electron,gabriel/electron,destan/electron,digideskio/electron,gerhardberger/electron,abhishekgahlot/electron,renaesop/electron,michaelchiche/electron,matiasinsaurralde/electron,rhencke/electron,bitemyapp/electron,kostia/electron,jlhbaseball15/electron,shiftkey/electron,mhkeller/electron,joaomoreno/atom-shell,dkfiresky/electron,gabriel/electron,pirafrank/electron,mattdesl/electron,shaundunne/electron,mubassirhayat/electron,yalexx/electron,cos2004/electron,jhen0409/electron,davazp/electron,fireball-x/atom-shell,GoooIce/electron,mirrh/electron,Faiz7412/electron,vipulroxx/electron,webmechanicx/electron,DivyaKMenon/electron,jsutcodes/electron,lzpfmh/electron,jlord/electron,baiwyc119/electron,mattdesl/electron,nekuz0r/electron,subblue/electron,vipulroxx/electron,nicobot/electron,eriser/electron,mirrh/electron,the-ress/electron,nicholasess/electron,BionicClick/electron,BionicClick/electron,wolfflow/electron,tylergibson/electron,faizalpribadi/electron,pandoraui/electron,Evercoder/electron,RIAEvangelist/electron,adcentury/electron,thomsonreuters/electron,eric-seekas/electron,micalan/electron,eriser/electron,destan/electron,benweissmann/electron,rajatsingla28/electron,RobertJGabriel/electron,farmisen/electron,mhkeller/electron,davazp/electron,Jacobichou/electron,bitemyapp/electron,mirrh/electron,twolfson/electron,jacksondc/electron,oiledCode/electron,benweissmann/electron,roadev/electron,ankitaggarwal011/electron,jsutcodes/electron,kokdemo/electron,thingsinjars/electron,lrlna/electron,vaginessa/electron,farmisen/electron,adcentury/electron,icattlecoder/electron,neutrous/electron,JussMee15/electron,nekuz0r/electron,bpasero/electron,d-salas/electron,shiftkey/electron,brave/muon,soulteary/electron,webmechanicx/electron,Ivshti/electron,thompsonemerson/electron,LadyNaggaga/electron,wolfflow/electron,JesselJohn/electron,trigrass2/electron,jacksondc/electron,vaginessa/electron,vHanda/electron,smczk/electron,vHanda/electron,xiruibing/electron,mattdesl/electron,electron/electron,BionicClick/electron,BionicClick/electron,jacksondc/electron,ankitaggarwal011/electron,RobertJGabriel/electron,chrisswk/electron,adamjgray/electron,jcblw/electron,Ivshti/electron,Jacobichou/electron,JussMee15/electron,cqqccqc/electron,yan-foto/electron,jonatasfreitasv/electron,coderhaoxin/electron,rajatsingla28/electron,bruce/electron,faizalpribadi/electron,adamjgray/electron,simongregory/electron,iftekeriba/electron,kostia/electron,fomojola/electron,mjaniszew/electron,jcblw/electron,bbondy/electron,jlord/electron,abhishekgahlot/electron,BionicClick/electron,jjz/electron,benweissmann/electron,beni55/electron,posix4e/electron,zhakui/electron,jhen0409/electron,jhen0409/electron,aecca/electron,medixdev/electron,bobwol/electron,MaxWhere/electron,bruce/electron,ervinb/electron,meowlab/electron,LadyNaggaga/electron,MaxGraey/electron,pombredanne/electron,joaomoreno/atom-shell,fireball-x/atom-shell,JesselJohn/electron,eriser/electron,gamedevsam/electron,rsvip/electron,arturts/electron,seanchas116/electron,Rokt33r/electron,Jacobichou/electron,Neron-X5/electron,voidbridge/electron,fritx/electron,jtburke/electron,dkfiresky/electron,farmisen/electron,Zagorakiss/electron,rajatsingla28/electron,kokdemo/electron,mirrh/electron,shockone/electron,SufianHassan/electron,noikiy/electron,synaptek/electron,zhakui/electron,anko/electron,aecca/electron,howmuchcomputer/electron,twolfson/electron,thingsinjars/electron,systembugtj/electron,robinvandernoord/electron,saronwei/electron,lrlna/electron,dongjoon-hyun/electron,Andrey-Pavlov/electron,timruffles/electron,lrlna/electron,tomashanacek/electron,greyhwndz/electron,natgolov/electron,minggo/electron,RobertJGabriel/electron,beni55/electron,etiktin/electron,oiledCode/electron,micalan/electron,gerhardberger/electron,sshiting/electron,tonyganch/electron,aecca/electron,gbn972/electron,DivyaKMenon/electron,xfstudio/electron,mjaniszew/electron,thingsinjars/electron,fffej/electron,sshiting/electron,lzpfmh/electron,robinvandernoord/electron,Ivshti/electron,Jacobichou/electron,gbn972/electron,medixdev/electron,kcrt/electron,thingsinjars/electron,dkfiresky/electron,adcentury/electron,anko/electron,icattlecoder/electron,kcrt/electron,tinydew4/electron,natgolov/electron,evgenyzinoviev/electron,benweissmann/electron,abhishekgahlot/electron,miniak/electron,jannishuebl/electron,JesselJohn/electron,destan/electron,Gerhut/electron,Jacobichou/electron,mattdesl/electron,miniak/electron,d-salas/electron,aliib/electron,twolfson/electron,christian-bromann/electron,dahal/electron,edulan/electron,bright-sparks/electron,trigrass2/electron,soulteary/electron,yalexx/electron,jannishuebl/electron,kazupon/electron,fomojola/electron,JussMee15/electron,rajatsingla28/electron,mattotodd/electron,bpasero/electron,chriskdon/electron,DivyaKMenon/electron,micalan/electron,stevemao/electron,mattotodd/electron,shennushi/electron,digideskio/electron,arusakov/electron,tinydew4/electron,aliib/electron,gerhardberger/electron,seanchas116/electron,farmisen/electron,natgolov/electron,SufianHassan/electron,jcblw/electron,simonfork/electron,mattdesl/electron,synaptek/electron,biblerule/UMCTelnetHub,nicobot/electron,iftekeriba/electron,deed02392/electron,arturts/electron,renaesop/electron,greyhwndz/electron,seanchas116/electron,jcblw/electron,joneit/electron,John-Lin/electron,setzer777/electron,the-ress/electron,nicobot/electron,aaron-goshine/electron,renaesop/electron,felixrieseberg/electron,ankitaggarwal011/electron,faizalpribadi/electron,Rokt33r/electron,chriskdon/electron,simonfork/electron,christian-bromann/electron,mubassirhayat/electron,soulteary/electron,fritx/electron,shockone/electron,iftekeriba/electron,jacksondc/electron,arturts/electron,stevekinney/electron,takashi/electron,stevekinney/electron,rsvip/electron,dahal/electron,noikiy/electron,cos2004/electron,abhishekgahlot/electron,bpasero/electron,micalan/electron,BionicClick/electron,fireball-x/atom-shell,lzpfmh/electron,matiasinsaurralde/electron,shennushi/electron,d-salas/electron,shockone/electron,tonyganch/electron,beni55/electron,Zagorakiss/electron,etiktin/electron,mubassirhayat/electron,arusakov/electron,wolfflow/electron,sky7sea/electron,posix4e/electron,mrwizard82d1/electron,thompsonemerson/electron,ervinb/electron,rsvip/electron,fabien-d/electron,sky7sea/electron,edulan/electron,stevekinney/electron,systembugtj/electron,jonatasfreitasv/electron,seanchas116/electron,xfstudio/electron,meowlab/electron,wolfflow/electron,jcblw/electron,setzer777/electron,kenmozi/electron,jtburke/electron,fabien-d/electron,lrlna/electron,carsonmcdonald/electron,etiktin/electron,preco21/electron,vHanda/electron,bbondy/electron,astoilkov/electron,felixrieseberg/electron,sircharleswatson/electron,jlhbaseball15/electron,gbn972/electron,yan-foto/electron,evgenyzinoviev/electron,nicholasess/electron,RIAEvangelist/electron |
6869d5edd706d95c8cadbd1945b29fdd3bfecd6b | blaze/datashape/unification.py | blaze/datashape/unification.py | """
Unification is a generalization of Numpy broadcasting.
In Numpy we two arrays and broadcast them to yield similar
shaped arrays.
In Blaze we take two arrays with more complex datashapes and
unify the types prescribed by more complicated pattern matching
on the types.
"""
from numpy import promote_types
from coretypes import Fixed, Range, TypeVar, Record, \
CType, Enum, top, dynamic
class Incommensurable(Exception):
def __init__(self, space, dim):
self.space = space
self.dim = dim
def __str__(self):
return "No way of unifying (%s) (%s)" % (
self.space, self.dim
)
def unify(a, b):
"""
Unification of Datashapes.
"""
ta = type(a)
tb = type(b)
# --
# Unification over BlazeT has two zeros
if ta == top or tb == top:
return top
if ta == dynamic or tb == dynamic:
return top
# --
if (ta,tb) == (Fixed, Fixed):
if a.val == b.val:
return Fixed(a.val)
else:
return Enum(a.val, b.val)
# --
if (ta,tb) == (TypeVar, Fixed):
return TypeVar('x0')
if (ta,tb) == (Fixed, TypeVar):
return TypeVar('x0')
# --
if (ta,tb) == (Record, Record):
c = a.d.items() + b.d.items()
return Record(**dict(c))
# --
if (ta,tb) == (Fixed, Range):
return Range(min(a.val, b.lower), max(a.val, b.upper))
if (ta,tb) == (Range, Fixed):
return Range(min(a.lower, b.val), max(a.val, b.val))
if (ta,tb) == (Range, Range):
return Range(min(a.lower, b.lower), max(b.upper, b.upper))
# --
#if (ta,tb) == (Union, Union):
#return Union(a.parameters + b.parameters)
# --
if (ta,tb) == (CType, CType):
return CType.from_str(promote_types(a.name, b.name).name)
raise Incommensurable(a,b)
| """
Unification is a generalization of Numpy broadcasting.
In Numpy we two arrays and broadcast them to yield similar
shaped arrays.
In Blaze we take two arrays with more complex datashapes and
unify the types prescribed by more complicated pattern matching
on the types.
"""
from numpy import promote_types
from blaze.datashape.coretypes import TypeVar
from blaze.expr.typeinference import infer
class Incommensurable(TypeError):
pass
def unify(sig, concrete=True):
"""
Unification of Datashapes.
"""
resolved = infer(sig)
if all(not isinstance(a, TypeVar) for a in resolved):
return resolved
| Remove very old type unifier, for robust one | Remove very old type unifier, for robust one
| Python | bsd-2-clause | seibert/blaze-core,seibert/blaze-core,seibert/blaze-core,seibert/blaze-core,seibert/blaze-core |
e00dc2a5725faeb3b11c6aac0d9ed0be0a55d33f | OIPA/iati/parser/schema_validators.py | OIPA/iati/parser/schema_validators.py | import os
import os.path
from lxml import etree
from common.util import findnth_occurence_in_string
def validate(iati_parser, xml_etree):
base = os.path.dirname(os.path.abspath(__file__))
location = base + "/../schemas/" + iati_parser.VERSION \
+ "/iati-activities-schema.xsd"
xsd_data = open(location)
xmlschema_doc = etree.parse(xsd_data)
xsd_data.close()
xmlschema = etree.XMLSchema(xmlschema_doc)
xml_errors = None
try:
xmlschema.assertValid(xml_etree)
except etree.DocumentInvalid as xml_errors:
pass
if xml_errors:
for error in xml_errors.error_log:
element = error.message[
(findnth_occurence_in_string(
error.message, '\'', 0
) + 1):findnth_occurence_in_string(
error.message, '\'', 1
)
]
attribute = '-'
if 'attribute' in error.message:
attribute = error.message[
(findnth_occurence_in_string(
error.message, '\'', 2
) + 1):findnth_occurence_in_string(
error.message, '\'', 3
)
]
iati_parser.append_error(
'XsdValidationError',
element,
attribute,
error.message.split(':')[0],
error.line,
error.message.split(':')[1],
'unkown for XSD validation errors')
| import os
import os.path
from lxml import etree
from common.util import findnth_occurence_in_string
def validate(iati_parser, xml_etree):
base = os.path.dirname(os.path.abspath(__file__))
location = base + "/../schemas/" + iati_parser.VERSION \
+ "/iati-activities-schema.xsd"
xsd_data = open(location)
xmlschema_doc = etree.parse(xsd_data)
xsd_data.close()
xmlschema = etree.XMLSchema(xmlschema_doc)
xml_errors = None
try:
xmlschema.assertValid(xml_etree)
except etree.DocumentInvalid as e:
xml_errors = e
pass
if xml_errors:
for error in xml_errors.error_log:
element = error.message[
(findnth_occurence_in_string(
error.message, '\'', 0
) + 1):findnth_occurence_in_string(
error.message, '\'', 1
)
]
attribute = '-'
if 'attribute' in error.message:
attribute = error.message[
(findnth_occurence_in_string(
error.message, '\'', 2
) + 1):findnth_occurence_in_string(
error.message, '\'', 3
)
]
iati_parser.append_error(
'XsdValidationError',
element,
attribute,
error.message.split(':')[0],
error.line,
error.message.split(':')[1],
'unkown for XSD validation errors')
| Fix another bug related to logging dataset errors | Fix another bug related to logging dataset errors
OIPA-612 / #589
| Python | agpl-3.0 | openaid-IATI/OIPA,openaid-IATI/OIPA,zimmerman-zimmerman/OIPA,openaid-IATI/OIPA,zimmerman-zimmerman/OIPA,openaid-IATI/OIPA,zimmerman-zimmerman/OIPA,openaid-IATI/OIPA,zimmerman-zimmerman/OIPA,zimmerman-zimmerman/OIPA |
8c9739572aa679cb6d55cb31737bff6d304db2d1 | openstack/tests/functional/network/v2/test_extension.py | openstack/tests/functional/network/v2/test_extension.py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import six
from openstack.tests.functional import base
class TestExtension(base.BaseFunctionalTest):
def test_list_and_find(self):
extensions = list(self.conn.network.extensions())
self.assertGreater(len(extensions), 0)
for ext in extensions:
self.assertIsInstance(ext.name, six.string_types)
self.assertIsInstance(ext.alias, six.string_types)
| # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import six
from openstack.tests.functional import base
class TestExtension(base.BaseFunctionalTest):
def test_list(self):
extensions = list(self.conn.network.extensions())
self.assertGreater(len(extensions), 0)
for ext in extensions:
self.assertIsInstance(ext.name, six.string_types)
self.assertIsInstance(ext.alias, six.string_types)
def test_find(self):
extension = self.conn.network.find_extension('external-net')
self.assertEqual('Neutron external network', extension.name)
| Add a functional test for find_extension | Add a functional test for find_extension
Change-Id: I351a1c1529beb3cae799650e1e57364b3521d00c
| Python | apache-2.0 | briancurtin/python-openstacksdk,dtroyer/python-openstacksdk,openstack/python-openstacksdk,dudymas/python-openstacksdk,stackforge/python-openstacksdk,briancurtin/python-openstacksdk,stackforge/python-openstacksdk,openstack/python-openstacksdk,dudymas/python-openstacksdk,dtroyer/python-openstacksdk |
d44a338e704732b9e3e7cb935eb2c9b38d2cfa06 | api/drive.py | api/drive.py | # -*- encoding:utf8 -*-
import httplib2
from flask import Blueprint, redirect, request, Response, abort
from model.oauth import OAuth
from model.utils import Utils
drive = Blueprint('drive', __name__, url_prefix='/drive')
@drive.route("/auth", methods=['GET'])
def hookauth():
flow = OAuth().get_flow()
if not flow:
abort(500)
auth_uri = flow.step1_get_authorize_url()
return redirect(auth_uri)
@drive.route("/callback", methods=['GET'])
def callback():
try:
code = request.args['code']
except:
abort(400)
flow = OAuth().get_flow()
credentials = flow.step2_exchange(code)
http = httplib2.Http()
credentials.authorize(http)
dic = {"response": "success"}
return Response(Utils().dump_json(dic), mimetype='application/json')
| # -*- encoding:utf8 -*-
import httplib2
from flask import Blueprint, redirect, request, Response, abort
from model.cache import Cache
from model.oauth import OAuth
from model.utils import Utils
drive = Blueprint('drive', __name__, url_prefix='/drive')
@drive.route("/auth", methods=['GET'])
def hookauth():
flow = OAuth().get_flow()
if not flow:
abort(500)
auth_uri = flow.step1_get_authorize_url()
return redirect(auth_uri)
@drive.route("/callback", methods=['GET'])
def callback():
try:
code = request.args['code']
except:
abort(400)
flow = OAuth().get_flow()
credentials = flow.step2_exchange(code)
http = httplib2.Http()
credentials.authorize(http)
dic = {"response": "success"}
return Response(Utils().dump_json(dic), mimetype='application/json')
@drive.route("/webhook", methods=['POST'])
def webhook():
document_id = request.json.get('id')
if not document_id:
abort(400)
return
Cache().clear(document_id)
dic = {"response": "success", "document_id": document_id}
return Response(Utils().dump_json(dic), mimetype='application/json') | Introduce cache clear logic through GoogleDrive webhook endpoint. | Introduce cache clear logic through GoogleDrive webhook endpoint.
| Python | mit | supistar/Botnyan |
c1d6e066ea622cc3fa7cec33cb77aa12e43a6519 | avocado/exporters/_html.py | avocado/exporters/_html.py | from django.template import Context
from django.template.loader import get_template
from _base import BaseExporter
class HTMLExporter(BaseExporter):
preferred_formats = ('html', 'string')
def write(self, iterable, buff=None, template=None):
if not buff and not template:
raise Exception('Either a file-like object or template must be supplied')
generator = self.read(iterable)
if buff:
for row in generator:
buff.write(row)
return buff
context = Context({'rows': generator})
if isinstance(template, basestring):
template = get_template(template)
return template.render(context)
| from django.template import Context
from django.template.loader import get_template
from _base import BaseExporter
class HTMLExporter(BaseExporter):
preferred_formats = ('html', 'string')
def write(self, iterable, buff=None, template=None):
if not buff and not template:
raise Exception('Either a file-like object or template must be supplied')
generator = self.read(iterable)
if buff:
for row in generator:
for item in row:
buff.write(item)
return buff
context = Context({'rows': generator})
if isinstance(template, basestring):
template = get_template(template)
return template.render(context)
| Fix missing row iteration in HTMLExporter | Fix missing row iteration in HTMLExporter | Python | bsd-2-clause | murphyke/avocado,murphyke/avocado,murphyke/avocado,murphyke/avocado |
323167f22c3176366cf2f90ce2ec314ee2c49c8f | moa/factory_registers.py | moa/factory_registers.py | from kivy.factory import Factory
r = Factory.register
r('StageTreeNode', module='moa.render.treerender')
r('StageSimpleDisplay', module='moa.render.stage_simple')
# --------------------- devices -----------------------------
r('Device', module='moa.device')
r('DigitalChannel', module='moa.device.digital')
r('DigitalPort', module='moa.device.digital')
r('ButtonChannel', module='moa.device.digital')
r('ButtonPort', module='moa.device.digital')
r('AnalogChannel', module='moa.device.analog')
r('AnalogPort', module='moa.device.analog')
r('NumericPropertyChannel', module='moa.device.analog')
r('NumericPropertyPort', module='moa.device.analog')
# ---------------------- stages --------------------------------
r('MoaStage', module='moa.stage')
r('Delay', module='moa.stage.delay')
r('GateStage', module='moa.stage.gate')
r('DigitalGateStage', module='moa.stage.gate')
r('AnalogGateStage', module='moa.stage.gate')
| from kivy.factory import Factory
r = Factory.register
r('StageTreeNode', module='moa.render.treerender')
r('StageSimpleDisplay', module='moa.render.stage_simple')
# --------------------- devices -----------------------------
r('Device', module='moa.device.__init__')
r('DigitalChannel', module='moa.device.digital')
r('DigitalPort', module='moa.device.digital')
r('ButtonChannel', module='moa.device.digital')
r('ButtonPort', module='moa.device.digital')
r('AnalogChannel', module='moa.device.analog')
r('AnalogPort', module='moa.device.analog')
r('NumericPropertyChannel', module='moa.device.analog')
r('NumericPropertyPort', module='moa.device.analog')
# ---------------------- stages --------------------------------
r('MoaStage', module='moa.stage.__init__')
r('Delay', module='moa.stage.delay')
r('GateStage', module='moa.stage.gate')
r('DigitalGateStage', module='moa.stage.gate')
r('AnalogGateStage', module='moa.stage.gate')
| Use __init__ for factory imports. | Use __init__ for factory imports.
| Python | mit | matham/moa |
a9be23f6e3b45b766b770b60e3a2a318e6fd7e71 | tests/script/test_no_silent_add_and_commit.py | tests/script/test_no_silent_add_and_commit.py | import pytest
pytestmark = pytest.mark.slow
version_file_content = """
major = 0
minor = 2
patch = 0
"""
config_file_content = """
__config_version__ = 1
GLOBALS = {
'serializer': '{{major}}.{{minor}}.{{patch}}',
}
FILES = ["VERSION"]
VERSION = ['major', 'minor', 'patch']
VCS = {
'name': 'git',
}
"""
def test_update_major(test_environment):
test_environment.ensure_file_is_present("VERSION", "0.2.0")
test_environment.ensure_file_is_present(
"punch_version.py",
version_file_content
)
test_environment.ensure_file_is_present(
"punch_config.py",
config_file_content
)
test_environment.output(["git", "init"])
test_environment.output(["git", "add", "punch_config.py"])
test_environment.output(["git", "commit", "-m", "some message"])
test_environment.ensure_file_is_present("untracked_file")
test_environment.call(["punch", "--part", "minor"])
out = test_environment.output(
["git", "ls-tree", "-r", "master", "--name-only"]
)
assert "untracked_file" not in out
| import pytest
pytestmark = pytest.mark.slow
version_file_content = """
major = 0
minor = 2
patch = 0
"""
config_file_content = """
__config_version__ = 1
GLOBALS = {
'serializer': '{{major}}.{{minor}}.{{patch}}',
}
FILES = ["VERSION"]
VERSION = ['major', 'minor', 'patch']
VCS = {
'name': 'git',
}
"""
def test_check_no_silent_addition_happens(test_environment):
test_environment.ensure_file_is_present("VERSION", "0.2.0")
test_environment.ensure_file_is_present(
"punch_version.py",
version_file_content
)
test_environment.ensure_file_is_present(
"punch_config.py",
config_file_content
)
test_environment.output(["git", "init"])
test_environment.output(["git", "add", "punch_config.py"])
test_environment.output(["git", "commit", "-m", "some message"])
test_environment.ensure_file_is_present("untracked_file")
test_environment.call(["punch", "--part", "minor"])
out = test_environment.output(
["git", "ls-tree", "-r", "master", "--name-only"]
)
assert "untracked_file" not in out
| Test name changed to reflect behaviour | Test name changed to reflect behaviour
| Python | isc | lgiordani/punch |
d98b891d882ca916984586631b5ba09c52652a74 | app/__init__.py | app/__init__.py | from flask import Flask
from flask.ext.bower import Bower
from flask.ext.pymongo import PyMongo
from config import Config
app = Flask(__name__)
app.config.from_object(Config)
# Register bower
Bower(app)
# Create mongodb client
mongo = PyMongo(app)
from .report.views import index, report | from flask import Flask
from flask_bower import Bower
from flask_pymongo import PyMongo
from config import Config
app = Flask(__name__)
app.config.from_object(Config)
# Register bower
Bower(app)
# Create mongodb client
mongo = PyMongo(app)
from .report.views import index, report | Resolve the deprecated flask ext imports | Resolve the deprecated flask ext imports
| Python | mit | mingrammer/pyreportcard,mingrammer/pyreportcard |
8ebec493b086525d23bbe4110c9d277c9b9b8301 | src/sentry/tsdb/dummy.py | src/sentry/tsdb/dummy.py | """
sentry.tsdb.dummy
~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
from sentry.tsdb.base import BaseTSDB
class DummyTSDB(BaseTSDB):
"""
A no-op time-series storage.
"""
def incr(self, model, key, timestamp=None, count=1):
pass
def get_range(self, model, keys, start, end, rollup=None):
return dict((k, []) for k in keys)
| """
sentry.tsdb.dummy
~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
from sentry.tsdb.base import BaseTSDB
class DummyTSDB(BaseTSDB):
"""
A no-op time-series storage.
"""
def incr(self, model, key, timestamp=None, count=1):
pass
def get_range(self, model, keys, start, end, rollup=None):
return dict((k, []) for k in keys)
def record(self, model, key, values, timestamp=None):
pass
def get_distinct_counts_series(self, model, keys, start, end=None, rollup=None):
return {k: [] for k in keys}
def get_distinct_counts_totals(self, model, keys, start, end=None, rollup=None):
return {k: 0 for k in keys}
| Add support for DummyTSDB backend. | Add support for DummyTSDB backend.
| Python | bsd-3-clause | daevaorn/sentry,gencer/sentry,mvaled/sentry,BuildingLink/sentry,daevaorn/sentry,beeftornado/sentry,jean/sentry,JackDanger/sentry,JamesMura/sentry,zenefits/sentry,jean/sentry,jean/sentry,ifduyue/sentry,mvaled/sentry,gencer/sentry,BayanGroup/sentry,imankulov/sentry,nicholasserra/sentry,JamesMura/sentry,beeftornado/sentry,fotinakis/sentry,alexm92/sentry,mvaled/sentry,mitsuhiko/sentry,alexm92/sentry,looker/sentry,ifduyue/sentry,BayanGroup/sentry,zenefits/sentry,BuildingLink/sentry,zenefits/sentry,gencer/sentry,looker/sentry,BuildingLink/sentry,imankulov/sentry,mvaled/sentry,nicholasserra/sentry,JamesMura/sentry,looker/sentry,JackDanger/sentry,ifduyue/sentry,JamesMura/sentry,mitsuhiko/sentry,looker/sentry,BayanGroup/sentry,imankulov/sentry,zenefits/sentry,jean/sentry,fotinakis/sentry,gencer/sentry,jean/sentry,zenefits/sentry,BuildingLink/sentry,fotinakis/sentry,fotinakis/sentry,daevaorn/sentry,JackDanger/sentry,daevaorn/sentry,looker/sentry,nicholasserra/sentry,BuildingLink/sentry,mvaled/sentry,ifduyue/sentry,alexm92/sentry,ifduyue/sentry,gencer/sentry,JamesMura/sentry,mvaled/sentry,beeftornado/sentry |
0498778db28fd2e2272b48fb84a99eece7b662ff | autocorrect.py | autocorrect.py | # Open list of correcly-spelled words.
wordFile = open("words.txt")
threshold = 8
listOfWords = input().split()
index = 0
def lev(a, b):
if min(len(a), len(b)) == 0:
return max(len(a), len(b))
else:
return min(lev(a[:-1], b) + 1, lev(a, b[:-1]) + 1,
lev(a[:-1], b[:-1]) + int(not a == b))
for x in listOfWords:
replacement = (x, threshold + 1)
for word in wordFile:
x = x.lower()
word = word[:-1].lower()
if x == word:
replacement = (x, 0)
break # Some words may actually be spelled correctly!
d = lev(x, word)
if (d < threshold) and (replacement[1] > d):
replacement = (word, d)
listOfWords[index] = replacement[0]
index += 1
print(*listOfWords)
| # Open list of correcly-spelled words.
wordFile = open("words.txt")
threshold = 8
listOfWords = input().split()
index = 0
# Compute Levenshtein distance
def lev(a, b):
if min(len(a), len(b)) == 0:
return max(len(a), len(b))
elif len(a) == len(b):
# Use Hamming Distance (special case)
return sum(x != y for x, y in zip(a, b))
else:
return min(lev(a[:-1], b) + 1, lev(a, b[:-1]) + 1,
lev(a[:-1], b[:-1]) + int(not a[-1] == b[-1]))
for x in listOfWords:
replacement = (x, threshold + 1)
for word in wordFile:
x = x.lower()
word = word[:-1].lower()
if x == word:
replacement = (x, 0)
break # Some words may actually be spelled correctly!
d = lev(x, word)
if (d < threshold) and (replacement[1] > d):
replacement = (word, d)
listOfWords[index] = replacement[0]
index += 1
wordFile.seek(0)
print(*listOfWords)
| Use Hamming distance for efficiency | Use Hamming distance for efficiency
Hamming distance is faster when strings are of same length (Hamming is a
special case of Levenshtein).
| Python | mit | jmanuel1/spellingbee |
76c44154ca1bc2eeb4e24cc820338c36960b1b5c | caniuse/test/test_caniuse.py | caniuse/test/test_caniuse.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import pytest
from caniuse.main import check
def test_package_name_has_been_used():
assert 'Sorry' in check('requests')
assert 'Sorry' in check('flask')
assert 'Sorry' in check('pip')
def test_package_name_has_not_been_used():
assert 'Congratulation' in check('this_package_name_has_not_been_used')
assert 'Congratulation' in check('you_will_never_use_this_package_name')
assert 'Congratulation' in check('I_suck_and_my_tests_are_order_dependent')
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import pytest
from click.testing import CliRunner
from caniuse.main import check
from caniuse.cli import cli
class TestAPI():
def test_package_name_has_been_used(self):
assert 'Sorry' in check('requests')
assert 'Sorry' in check('flask')
assert 'Sorry' in check('pip')
def test_package_name_has_not_been_used(self):
assert 'Congratulation' in check('this_package_name_has_not_been_used')
assert 'Congratulation' in \
check('you_will_never_use_this_package_name')
assert 'Congratulation' in \
check('I_suck_and_my_tests_are_order_dependent')
class TestCLI():
def test_package_name_has_been_used(self):
runner = CliRunner()
result_one = runner.invoke(cli, ['requests'])
assert 'Sorry' in result_one.output
result_two = runner.invoke(cli, ['flask'])
assert 'Sorry' in result_two.output
result_three = runner.invoke(cli, ['pip'])
assert 'Sorry' in result_three.output
def test_package_name_has_not_been_used(self):
runner = CliRunner()
result_one = runner.invoke(
cli, ['this_package_name_has_not_been_used'])
assert 'Congratulation' in result_one.output
result_two = runner.invoke(
cli, ['you_will_never_use_this_package_name'])
assert 'Congratulation' in result_two.output
result_three = runner.invoke(
cli, ['I_suck_and_my_tests_are_order_dependent'])
assert 'Congratulation' in result_three.output
| Add tests for cli.py to improve code coverage | Add tests for cli.py to improve code coverage
| Python | mit | lord63/caniuse |
429bd22a98895252dfb993d770c9b3060fef0fe3 | tests/runalldoctests.py | tests/runalldoctests.py | import doctest
import glob
import pkg_resources
try:
pkg_resources.require('OWSLib')
except (ImportError, pkg_resources.DistributionNotFound):
pass
testfiles = glob.glob('*.txt')
for file in testfiles:
doctest.testfile(file)
| import doctest
import getopt
import glob
import sys
import pkg_resources
try:
pkg_resources.require('OWSLib')
except (ImportError, pkg_resources.DistributionNotFound):
pass
def run(pattern):
if pattern is None:
testfiles = glob.glob('*.txt')
else:
testfiles = glob.glob(pattern)
for file in testfiles:
doctest.testfile(file)
if __name__ == "__main__":
try:
opts, args = getopt.getopt(sys.argv[1:], "t:v")
except getopt.GetoptError:
print "Usage: python runalldoctests.py [-t GLOB_PATTERN]"
sys.exit(2)
pattern = None
for o, a in opts:
if o == '-t':
pattern = a
run(pattern)
| Add option to pick single test file from the runner | Add option to pick single test file from the runner
| Python | bsd-3-clause | datagovuk/OWSLib,kwilcox/OWSLib,QuLogic/OWSLib,KeyproOy/OWSLib,tomkralidis/OWSLib,menegon/OWSLib,datagovuk/OWSLib,datagovuk/OWSLib,dblodgett-usgs/OWSLib,ocefpaf/OWSLib,mbertrand/OWSLib,gfusca/OWSLib,jaygoldfinch/OWSLib,daf/OWSLib,JuergenWeichand/OWSLib,bird-house/OWSLib,geographika/OWSLib,kalxas/OWSLib,Jenselme/OWSLib,robmcmullen/OWSLib,geopython/OWSLib,jachym/OWSLib,daf/OWSLib,daf/OWSLib,b-cube/OWSLib,jaygoldfinch/OWSLib |
459546a9cedb8e9cf3bee67edb4a76d37874f03b | tests/test_athletics.py | tests/test_athletics.py | from nose.tools import ok_, eq_
from pennathletics.athletes import get_roster, get_player
class TestAthletics():
def test_roster(self):
ok_(get_roster("m-baskbl", 2015) != [])
def test_player_empty(self):
ok_(get_player("m-baskbl", 2014) != [])
def test_player_number(self):
eq_(get_player("m-baskbl", 2013, jersey=1)[0].height, "6'2\"")
| from nose.tools import ok_, eq_
from pennathletics.athletes import get_roster, get_player
class TestAthletics():
def test_roster(self):
ok_(get_roster("m-baskbl", 2015) != [])
def test_player_empty(self):
ok_(get_player("m-baskbl", 2014) != [])
def test_player_number(self):
eq_(get_player("m-baskbl", 2013, jersey=1)[0].height, "6'2\"")
def test_player_hometown(self):
player = get_player("m-baskbl", 2012, homeTown="Belfast, Ireland")[0]
eq_(player.weight, '210 lbs')
def test_player_softball(self):
# 19 players on the 2013 softball team
eq_(len(get_roster("w-softbl", 2013)), 19)
| Add a few more tests for variety | Add a few more tests for variety
| Python | mit | pennlabs/pennathletics |
921225181fc1d0242d61226c7b10663ddba1a1a2 | indra/tests/test_rlimsp.py | indra/tests/test_rlimsp.py | from indra.sources import rlimsp
def test_simple_usage():
stmts = rlimsp.process_pmc('PMC3717945')
| from indra.sources import rlimsp
def test_simple_usage():
rp = rlimsp.process_pmc('PMC3717945')
stmts = rp.statements
assert len(stmts) == 6, len(stmts)
def test_ungrounded_usage():
rp = rlimsp.process_pmc('PMC3717945', with_grounding=False)
assert len(rp.statements)
| Update test and add test for ungrounded endpoint. | Update test and add test for ungrounded endpoint.
| Python | bsd-2-clause | johnbachman/belpy,johnbachman/belpy,sorgerlab/indra,sorgerlab/belpy,sorgerlab/belpy,johnbachman/indra,pvtodorov/indra,johnbachman/indra,sorgerlab/belpy,johnbachman/indra,sorgerlab/indra,bgyori/indra,pvtodorov/indra,pvtodorov/indra,johnbachman/belpy,sorgerlab/indra,bgyori/indra,bgyori/indra,pvtodorov/indra |
c461c57a90804558a30f3980b2608497a43c06a7 | nipy/testing/__init__.py | nipy/testing/__init__.py | """The testing directory contains a small set of imaging files to be used
for doctests only. More thorough tests and example data will be stored in
a nipy-data-suite to be created later and downloaded separately.
Examples
--------
>>> from nipy.testing import funcfile
>>> from nipy.io.api import load_image
>>> img = load_image(funcfile)
>>> img.shape
(17, 21, 3, 20)
"""
import os
#__all__ = ['funcfile', 'anatfile']
# Discover directory path
filepath = os.path.abspath(__file__)
basedir = os.path.dirname(filepath)
funcfile = os.path.join(basedir, 'functional.nii.gz')
anatfile = os.path.join(basedir, 'anatomical.nii.gz')
from numpy.testing import *
import decorators as dec
from nose.tools import assert_true, assert_false
| """The testing directory contains a small set of imaging files to be
used for doctests only. More thorough tests and example data will be
stored in a nipy data packages that you can download separately - see
:mod:`nipy.utils.data`
.. note:
We use the ``nose`` testing framework for tests.
Nose is a dependency for the tests, but should not be a dependency
for running the algorithms in the NIPY library. This file should
import without nose being present on the python path.
Examples
--------
>>> from nipy.testing import funcfile
>>> from nipy.io.api import load_image
>>> img = load_image(funcfile)
>>> img.shape
(17, 21, 3, 20)
"""
import os
#__all__ = ['funcfile', 'anatfile']
# Discover directory path
filepath = os.path.abspath(__file__)
basedir = os.path.dirname(filepath)
funcfile = os.path.join(basedir, 'functional.nii.gz')
anatfile = os.path.join(basedir, 'anatomical.nii.gz')
from numpy.testing import *
import decorators as dec
# Allow failed import of nose if not now running tests
try:
from nose.tools import assert_true, assert_false
except ImportError:
pass
| Allow failed nose import without breaking nipy import | Allow failed nose import without breaking nipy import | Python | bsd-3-clause | bthirion/nipy,nipy/nipy-labs,alexis-roche/register,arokem/nipy,alexis-roche/niseg,alexis-roche/nireg,alexis-roche/niseg,alexis-roche/nipy,alexis-roche/register,nipy/nipy-labs,nipy/nireg,nipy/nireg,alexis-roche/register,alexis-roche/nipy,bthirion/nipy,arokem/nipy,alexis-roche/nireg,bthirion/nipy,arokem/nipy,bthirion/nipy,arokem/nipy,alexis-roche/nipy,alexis-roche/nipy |
04fbd65f90a3ce821fed76377ce7858ae0dd56ee | masters/master.chromium.webrtc/master_builders_cfg.py | masters/master.chromium.webrtc/master_builders_cfg.py | # Copyright (c) 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from buildbot.changes.filter import ChangeFilter
from buildbot.schedulers.basic import SingleBranchScheduler
from master.factory import annotator_factory
m_annotator = annotator_factory.AnnotatorFactory()
def Update(c):
c['schedulers'].append(
SingleBranchScheduler(name='chromium_scheduler',
change_filter=ChangeFilter(project='chromium',
branch='master'),
treeStableTimer=60,
builderNames=[
'Win Builder',
'Mac Builder',
'Linux Builder',
]),
)
specs = [
{'name': 'Win Builder', 'category': 'win'},
{'name': 'WinXP Tester', 'category': 'win'},
{'name': 'Win7 Tester', 'category': 'win'},
{'name': 'Win8 Tester', 'category': 'win'},
{'name': 'Win10 Tester', 'category': 'win'},
{'name': 'Mac Builder', 'category': 'mac'},
{'name': 'Mac Tester', 'category': 'mac'},
{'name': 'Linux Builder', 'recipe': 'chromium', 'category': 'linux'},
{'name': 'Linux Tester', 'recipe': 'chromium', 'category': 'linux'},
]
c['builders'].extend([
{
'name': spec['name'],
'factory': m_annotator.BaseFactory(spec.get('recipe',
'webrtc/chromium')),
'category': spec['category'],
'notify_on_missing': True,
} for spec in specs
])
| # Copyright (c) 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from buildbot.changes.filter import ChangeFilter
from buildbot.schedulers.basic import SingleBranchScheduler
from master.factory import annotator_factory
m_annotator = annotator_factory.AnnotatorFactory()
def Update(c):
c['schedulers'].append(
SingleBranchScheduler(name='chromium_scheduler',
change_filter=ChangeFilter(project='chromium',
branch='master'),
treeStableTimer=60,
builderNames=[
'Win Builder',
'Mac Builder',
'Linux Builder',
]),
)
specs = [
{'name': 'Win Builder', 'category': 'win'},
{'name': 'WinXP Tester', 'category': 'win'},
{'name': 'Win7 Tester', 'category': 'win'},
{'name': 'Win8 Tester', 'category': 'win'},
{'name': 'Win10 Tester', 'category': 'win'},
{'name': 'Mac Builder', 'category': 'mac'},
{'name': 'Mac Tester', 'category': 'mac'},
{'name': 'Linux Builder', 'category': 'linux'},
{'name': 'Linux Tester', 'category': 'linux'},
]
c['builders'].extend([
{
'name': spec['name'],
'factory': m_annotator.BaseFactory('chromium'),
'category': spec['category'],
'notify_on_missing': True,
} for spec in specs
])
| Switch remaining chromium.webrtc builders to chromium recipe. | WebRTC: Switch remaining chromium.webrtc builders to chromium recipe.
Linux was switched in https://codereview.chromium.org/1508933002/
This switches the rest over to the chromium recipe.
BUG=538259
TBR=phajdan.jr@chromium.org
Review URL: https://codereview.chromium.org/1510853002 .
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@297886 0039d316-1c4b-4281-b951-d872f2087c98
| Python | bsd-3-clause | eunchong/build,eunchong/build,eunchong/build,eunchong/build |
45e86667311f4c9b79d90a3f86e71ffc072b1219 | oneflow/landing/admin.py | oneflow/landing/admin.py | # -*- coding: utf-8 -*-
from django.contrib import admin
from django.conf import settings
from .models import LandingContent
TRUNCATE_LENGTH = 50
content_fields_names = tuple(('content_' + code)
for code, lang
in settings.LANGUAGES)
content_fields_displays = tuple((field + '_display')
for field in content_fields_names)
class LandingContentAdmin(admin.ModelAdmin):
list_display = ('name', ) + content_fields_displays
#list_display_links = ('name')
#list_filter = (HasTranslationFilter(lang)
# for lang, lang_name in settings.LANGUAGES)
ordering = ('name',)
search_fields = ('name', ) + content_fields_names
def truncated(cls, field_name):
def wrapped(self, obj):
value = getattr(obj, field_name)
return value[:TRUNCATE_LENGTH] + (value[TRUNCATE_LENGTH:] and u'…')
wrapped.short_description = cls._meta.get_field_by_name(
field_name)[0].verbose_name
wrapped.admin_order_field = field_name
return wrapped
for attr, attr_name in zip(content_fields_names,
content_fields_displays):
setattr(LandingContentAdmin, attr_name,
truncated(LandingContent, attr))
admin.site.register(LandingContent, LandingContentAdmin)
| # -*- coding: utf-8 -*-
from django.contrib import admin
from django.conf import settings
from .models import LandingContent
from sparks.django.admin import truncate_field
content_fields_names = tuple(('content_' + code)
for code, lang
in settings.LANGUAGES)
content_fields_displays = tuple((field + '_display')
for field in content_fields_names)
class LandingContentAdmin(admin.ModelAdmin):
#
#list_display_links = ('name')
#list_filter = (HasTranslationFilter(lang)
# for lang, lang_name in settings.LANGUAGES)
#
list_display = ('name', ) + content_fields_displays
ordering = ('name',)
search_fields = ('name', ) + content_fields_names
for attr, attr_name in zip(content_fields_names,
content_fields_displays):
setattr(LandingContentAdmin, attr_name,
truncate_field(LandingContent, attr))
admin.site.register(LandingContent, LandingContentAdmin)
| Move the `truncate_field` pseudo-decorator to sparks (which just released 1.17). | Move the `truncate_field` pseudo-decorator to sparks (which just released 1.17). | Python | agpl-3.0 | WillianPaiva/1flow,WillianPaiva/1flow,1flow/1flow,WillianPaiva/1flow,WillianPaiva/1flow,1flow/1flow,1flow/1flow,1flow/1flow,WillianPaiva/1flow,1flow/1flow |
b7e657134c21b62e78453b11f0745e0048e346bf | examples/simple_distribution.py | examples/simple_distribution.py | import sys
import time
from random import shuffle
from vania.fair_distributor import FairDistributor
def main():
# User input for the number of targets and objects.
users = ['user1', 'user2']
tasks = ['task1', 'task2']
preferences = [
[1, 2],
[2, 1],
]
# Run solver
start_time = time.time()
distributor = FairDistributor(users, tasks, preferences)
output = distributor.distribute(output='problem.lp')
elapsed_time = time.time() - start_time
# Output
print(output)
if __name__ == '__main__':
main()
| import sys
import time
from random import shuffle
from vania.fair_distributor import FairDistributor
def main():
# User input for the number of targets and objects.
users = ['user1', 'user2']
tasks = ['task1', 'task2']
preferences = [
[1, 2],
[2, 1],
]
# Run solver
distributor = FairDistributor(users, tasks, preferences)
output = distributor.distribute(output='problem.lp')
# Output
print(output)
if __name__ == '__main__':
main()
| Remove time metrics from the simple example | Remove time metrics from the simple example
| Python | mit | Hackathonners/vania |
a6e2c0fc837b17321e2979cb12ba2d0e69603eac | orderedmodel/__init__.py | orderedmodel/__init__.py | __all__ = ['OrderedModel', 'OrderedModelAdmin']
from models import OrderedModel
from admin import OrderedModelAdmin
| from .models import OrderedModel
from .admin import OrderedModelAdmin
__all__ = ['OrderedModel', 'OrderedModelAdmin']
try:
from django.conf import settings
except ImportError:
pass
else:
if 'mptt' in settings.INSTALLED_APPS:
from .mptt_models import OrderableMPTTModel
from .mptt_admin import OrderedMPTTModelAdmin
__all__ += ['OrderableMPTTModel', 'OrderedMPTTModelAdmin']
| Make it easy importing of OrderableMPTTModel and OrderedMPTTModelAdmin in from orderedmodel module | Make it easy importing of OrderableMPTTModel and OrderedMPTTModelAdmin in from orderedmodel module
| Python | bsd-3-clause | MagicSolutions/django-orderedmodel,MagicSolutions/django-orderedmodel |
163cfea2a0c5e7d96dd870aa540c95a2ffa139f9 | appstats/filters.py | appstats/filters.py | # encoding: utf-8
import json
def json_filter(value):
return json.dumps(value)
def count_filter(value):
if value is None:
return ""
count = float(value)
base = 1000
prefixes = [
('K'),
('M'),
('G'),
('T'),
('P'),
('E'),
('Z'),
('Y')
]
if count < base:
return '%.1f' % count
else:
for i, prefix in enumerate(prefixes):
unit = base ** (i + 2)
if count < unit:
return '%.1f %s' % ((base * count / unit), prefix)
return '%.1f %s' % ((base * count / unit), prefix)
def time_filter(value):
if value is None:
return ""
time = float(value)
# Transform secs into ms
time = value * 1000
if time < 1000:
return '%.1f ms' % time
else:
time /= 1000
if time < 60:
return '%.1f s' % time
else:
time /= 60
if time < 60:
return '%.1f m' % time
else:
time /= 60
if time < 24:
return '%.1f h' % time
else:
time /= 24
return'%.1f d' % time
def default_filter(value):
if value is None:
return ""
return value
| # encoding: utf-8
import json
def json_filter(value):
return json.dumps(value)
def count_filter(value):
if value is None:
return ""
count = float(value)
base = 1000
prefixes = [
('K'),
('M'),
('G'),
('T'),
('P'),
('E'),
('Z'),
('Y')
]
if count < base:
return '%.1f' % count
else:
for i, prefix in enumerate(prefixes):
unit = base ** (i + 2)
if count < unit:
return '%.1f %s' % ((base * count / unit), prefix)
return '%.1f %s' % ((base * count / unit), prefix)
def time_filter(value):
if value is None:
return ""
# Transform secs into ms
time = float(value) * 1000
if time < 1000:
return '%.1f ms' % time
else:
time /= 1000
if time < 60:
return '%.1f s' % time
else:
time /= 60
if time < 60:
return '%.1f m' % time
else:
time /= 60
if time < 24:
return '%.1f h' % time
else:
time /= 24
return'%.1f d' % time
def default_filter(value):
if value is None:
return ""
return value
| Join two lines in one | Join two lines in one
| Python | mit | uvNikita/appstats,uvNikita/appstats,uvNikita/appstats |
fc94d60066692e6e8dc496bb854039bb66af3311 | scout.py | scout.py |
# Python does not require explicit interfaces,
# but I believe that code which does is more
# maintainable. Thus I include this explicit
# interface for Problems.
class Problem:
def getStartState(self):
return None
def getEndState(self):
return None
def isValidState(self, state):
return False
def getSuccessors(self, state):
return []
def getStringRepr(self, state):
return "BadProblem"
def search(problem):
print "Searching..."
if (__name__ == '__main__'):
problem = Problem();
search(problem)
|
# Python does not require explicit interfaces,
# but I believe that code which does is more
# maintainable. Thus I include this explicit
# interface for Problems.
class Problem:
def getStartState(self):
return None
def getEndState(self):
return None
def isValidState(self, state):
return False
def getSuccessors(self, state):
return []
def getStringRepr(self, state):
return "BadProblem"
class SquareProblem(Problem):
def __init__(self, size):
self.size = size
def getStartState(self):
return (0, 0)
def getEndState(self):
return (self.size, self.size)
def isValidState(self, state):
return 0 <= state[0] <= self.size and
0 <= state[1] <= self.size
def getSuccessors(self, state):
return [(state[0]+dx, state[1]+dy) for (dx, dy) in [(1, 0), (0, 1), (-1, 0), (0, -1)]]
def getStringRepr(self, state):
return "(%d, %d)" % state
def search(problem):
print "Searching..."
if (__name__ == '__main__'):
problem = SquareProblem(2);
search(problem)
| Add a simple problem for testing | Add a simple problem for testing
| Python | mit | SpexGuy/Scout |
7caf008f5442baff92cd820d3fd3a059293a3e5d | setup.py | setup.py | #!/usr/bin/env python
from distutils.core import setup
setup(name='icalendar',
version='0.10',
description='iCalendar support module',
package_dir = {'': 'src'},
packages=['icalendar'],
)
| #!/usr/bin/env python
from distutils.core import setup
f = open('version.txt', 'r')
version = f.read().strip()
f.close()
setup(name='icalendar',
version=version,
description='iCalendar support module',
package_dir = {'': 'src'},
packages=['icalendar'],
)
| Tweak so that version information is picked up from version.txt. | Tweak so that version information is picked up from version.txt.
git-svn-id: aa2e0347f72f9208cad9c7a63777f32311fef72e@11576 fd0d7bf2-dfb6-0310-8d31-b7ecfe96aada
| Python | lgpl-2.1 | greut/iCalendar,ryba-xek/iCalendar,offby1/icalendar |
0574705dcbc473805aee35b482a41bdef060b0c9 | setup.py | setup.py | from distutils.core import setup
import py2pack
with open('README') as f:
README = f.read()
setup(
name = py2pack.__name__,
version = py2pack.__version__,
license = "GPLv2",
description = py2pack.__doc__,
long_description = README,
author = py2pack.__author__.rsplit(' ', 1)[0],
author_email = py2pack.__author__.rsplit(' ', 1)[1][1:-1],
url = 'http://github.com/saschpe/py2pack',
scripts = ['scripts/py2pack'],
packages = ['py2pack'],
package_data = {'py2pack': ['templates/*']},
#data_files = [('doc/py2pack', ['AUTHORS', 'LICENSE', 'README'])],
requires = ['argparse', 'Jinja2'],
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Code Generators',
'Topic :: Software Development :: Pre-processors',
],
)
| from distutils.core import setup
import py2pack
setup(
name = py2pack.__name__,
version = py2pack.__version__,
license = "GPLv2",
description = py2pack.__doc__,
long_description = open('README').read(),
author = py2pack.__author__.rsplit(' ', 1)[0],
author_email = py2pack.__author__.rsplit(' ', 1)[1][1:-1],
url = 'http://github.com/saschpe/py2pack',
scripts = ['scripts/py2pack'],
packages = ['py2pack'],
package_data = {'py2pack': ['templates/*']},
#data_files = [('doc/py2pack', ['AUTHORS', 'LICENSE', 'README'])],
requires = ['argparse', 'Jinja2'],
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Code Generators',
'Topic :: Software Development :: Pre-processors',
],
)
| Load README file traditionally, with-statement is not supported by older Python releases. | Load README file traditionally, with-statement is not supported by older
Python releases.
| Python | apache-2.0 | saschpe/py2pack,toabctl/py2pack |
6bece40a1a0c8977c6211234e5aa4e64ad5b01a2 | linguine/ops/StanfordCoreNLP.py | linguine/ops/StanfordCoreNLP.py | #!/usr/bin/env python
import os
"""
Performs some core NLP operations as a proof of concept for the library.
"""
from stanford_corenlp_pywrapper import CoreNLP
class StanfordCoreNLP:
proc = None
"""
When the JSON segments return from the CoreNLP library, they
separate the data acquired from each word into their own element.
For readability's sake, it would be nice to pair all of the information
for a given word with that word, making a list of words with their
part of speech tags
"""
def jsonCleanup(self, data, analysisTypes):
for corpus in data:
res = StanfordCoreNLP.proc.parse_doc(corpus.contents)
print(str(res));
for sentence in res["sentences"]:
words = []
for index, token in enumerate(sentence["tokens"]):
word = {}
word["token"] = sentence["tokens"][index]
for atype in analysisTypes:
word[atype] = sentence[atype][index]
words.append(word)
return words
def __init__(self, analysisType):
self.analysisType = analysisType
if StanfordCoreNLP.proc == None:
StanfordCoreNLP.proc = CoreNLP(configdict={'annotators':'tokenize, ssplit, pos, lemma, ner'},
corenlp_jars=[os.path.join(os.path.dirname(__file__), '../../lib/*')])
def run(self, data):
return self.jsonCleanup(data, self.analysisType)
| #!/usr/bin/env python
import os
"""
Performs some core NLP operations as a proof of concept for the library.
"""
from stanford_corenlp_pywrapper import CoreNLP
class StanfordCoreNLP:
proc = None
"""
When the JSON segments return from the CoreNLP library, they
separate the data acquired from each word into their own element.
For readability's sake, it would be nice to pair all of the information
for a given word with that word, making a list of words with their
part of speech tags
"""
def jsonCleanup(self, data, analysisTypes):
for corpus in data:
res = StanfordCoreNLP.proc.parse_doc(corpus.contents)
words = []
for sentence in res["sentences"]:
for index, token in enumerate(sentence["tokens"]):
word = {}
word["token"] = sentence["tokens"][index]
for atype in analysisTypes:
word[atype] = sentence[atype][index]
words.append(word)
return words
def __init__(self, analysisType):
self.analysisType = analysisType
if StanfordCoreNLP.proc == None:
StanfordCoreNLP.proc = CoreNLP(configdict={'annotators':'tokenize, ssplit, pos, lemma, ner'},
corenlp_jars=[os.path.join(os.path.dirname(__file__), '../../lib/*')])
def run(self, data):
return self.jsonCleanup(data, self.analysisType)
| Return entire corpus from corenlp analysis | Return entire corpus from corenlp analysis
| Python | mit | Pastafarians/linguine-python,rigatoni/linguine-python |
468a66c0945ce9e78fb5da8a6a628ce581949759 | livinglots_usercontent/views.py | livinglots_usercontent/views.py | from django.views.generic import CreateView
from braces.views import FormValidMessageMixin
from livinglots_genericviews import AddGenericMixin
class AddContentView(FormValidMessageMixin, AddGenericMixin, CreateView):
def _get_content_name(self):
return self.form_class._meta.model._meta.object_name
def get_form_valid_message(self):
return '%s added successfully.' % self._get_content_name()
def get_success_url(self):
return self.get_content_object().get_absolute_url()
def get_template_names(self):
return [
'livinglots/usercontent/add_%s.html' % self._get_content_name().lower(),
]
def form_valid(self, form):
"""
Save the content and notify participants who are following the target
lot.
"""
self.object = form.save()
# NB: Notifications are sent to followers using a descendant of
# NotifyParticipantsOnCreationForm
return super(AddContentView, self).form_valid(form)
| from django.views.generic import CreateView
from braces.views import FormValidMessageMixin
from livinglots_genericviews import AddGenericMixin
class AddContentView(FormValidMessageMixin, AddGenericMixin, CreateView):
def _get_content_name(self):
return self.form_class._meta.model._meta.object_name
def get_form_valid_message(self):
return '%s added successfully.' % self._get_content_name()
def get_initial(self):
initial = super(AddContentView, self).get_initial()
user = self.request.user
# If user has name, set that for them
try:
initial['added_by_name'] = user.first_name or user.username
except AttributeError:
pass
return initial
def get_success_url(self):
return self.get_content_object().get_absolute_url()
def get_template_names(self):
return [
'livinglots/usercontent/add_%s.html' % self._get_content_name().lower(),
]
def form_valid(self, form):
"""
Save the content and notify participants who are following the target
lot.
"""
self.object = form.save()
# NB: Notifications are sent to followers using a descendant of
# NotifyParticipantsOnCreationForm
return super(AddContentView, self).form_valid(form)
| Set added_by_name if we can | Set added_by_name if we can
| Python | agpl-3.0 | 596acres/django-livinglots-usercontent,596acres/django-livinglots-usercontent |
c470da4fcf5bec84c255aa4514f6fd764781eb1a | setup.py | setup.py | from distutils.core import setup
ext_files = ["pyreBloom/bloom.c"]
kwargs = {}
try:
from Cython.Distutils import build_ext
from Cython.Distutils import Extension
print "Building from Cython"
ext_files.append("pyreBloom/pyreBloom.pyx")
kwargs['cmdclass'] = {'build_ext': build_ext}
except ImportError:
from distutils.core import Extension
ext_files.append("pyreBloom/pyreBloom.c")
print "Building from C"
ext_modules = [Extension("pyreBloom", ext_files, libraries=['hiredis'])]
setup(
name = 'pyreBloom',
version = "1.0.1",
author = "Dan Lecocq",
author_email = "dan@seomoz.org",
license = "MIT License",
ext_modules = ext_modules,
classifiers = [
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: C',
'Programming Language :: Cython',
'Topic :: Software Development :: Libraries :: Python Modules',
],
**kwargs
)
| from distutils.core import setup
ext_files = ["pyreBloom/bloom.c"]
kwargs = {}
try:
from Cython.Distutils import build_ext
from Cython.Distutils import Extension
print "Building from Cython"
ext_files.append("pyreBloom/pyreBloom.pyx")
kwargs['cmdclass'] = {'build_ext': build_ext}
except ImportError:
from distutils.core import Extension
ext_files.append("pyreBloom/pyreBloom.c")
print "Building from C"
ext_modules = [Extension("pyreBloom", ext_files, libraries=['hiredis'],
library_dirs=['/usr/local/lib'],
include_dirs=['/usr/local/include'])]
setup(
name = 'pyreBloom',
version = "1.0.1",
author = "Dan Lecocq",
author_email = "dan@seomoz.org",
license = "MIT License",
ext_modules = ext_modules,
classifiers = [
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: C',
'Programming Language :: Cython',
'Topic :: Software Development :: Libraries :: Python Modules',
],
**kwargs
)
| Fix build with newer dependencies. | Fix build with newer dependencies.
| Python | mit | seomoz/pyreBloom,seomoz/pyreBloom,seomoz/pyreBloom |
1d5175beedeed2a2ae335a41380280a2ed39901b | lambda/control/commands.py | lambda/control/commands.py | from __future__ import print_function
import shlex
from traceback import format_exception
from obj import Obj
import click
from click.testing import CliRunner
runner = CliRunner()
@click.group(name='')
@click.argument('user', required=True)
@click.pass_context
def command(ctx, user, **kwargs):
ctx.obj = Obj(user=user)
@command.command()
@click.pass_context
def about(ctx, **kwargs):
click.echo('This is the about command.')
@command.command()
@click.pass_context
def echo(ctx, **kwargs):
click.echo('This is the echo command. You are {}.'.format(ctx.obj.user))
def run(user, cmd):
result = runner.invoke(command, [user,] + shlex.split(cmd))
print('run result: {}'.format(result))
if not result.output:
print('Exception: {}\nTraceback:\n {}'.format(result.exception, ''.join(format_exception(*result.exc_info))))
return 'Internal error.'
return result.output
# Import files with subcommands here--we don't use them directly, but we need
# to make sure they're loaded, since that's when they add their commands to
# our command object.
import list_commands
| from __future__ import print_function
import shlex
from traceback import format_exception
from obj import Obj
import click
from click.testing import CliRunner
runner = CliRunner()
@click.group(name='')
@click.argument('user', required=True)
@click.pass_context
def command(ctx, user, **kwargs):
ctx.obj = Obj(user=user)
@command.command()
@click.pass_context
def about(ctx, **kwargs):
click.echo('This is the about command.')
@command.command()
@click.argument('stuff', nargs=-1, required=False)
@click.pass_context
def echo(ctx, stuff, **kwargs):
click.echo('This is the echo command. You are {}.'.format(ctx.obj.user))
if stuff:
click.echo(' '.join(stuff))
else:
click.echo('[no parameters]')
def run(user, cmd):
result = runner.invoke(command, [user,] + shlex.split(cmd))
print('run result: {}'.format(result))
if not result.output:
print('Exception: {}\nTraceback:\n {}'.format(result.exception, ''.join(format_exception(*result.exc_info))))
return 'Internal error.'
return result.output
# Import files with subcommands here--we don't use them directly, but we need
# to make sure they're loaded, since that's when they add their commands to
# our command object.
import list_commands
| Make the echo command actually echo all its parameters. | Make the echo command actually echo all its parameters.
| Python | mit | ilg/LambdaMLM |
f80c11efb4bcbca6d20cdbbc1a552ebb04aa8302 | api/config/settings/production.py | api/config/settings/production.py | import os
import dj_database_url
from .base import *
# BASE_NAME and BASE_DOMAIN are intentionally unset
# They are only needed to seed data in staging and local
BASE_URL = "https://voterengagement.com"
###############################################################################
# Core
SECRET_KEY = os.environ['SECRET_KEY']
ALLOWED_HOSTS = [
'127.0.0.1',
'localhost',
# TODO: Prevent access from herokuapp.com when domain is registered
# '.voterengagement.com',
'.herokuapp.com',
]
###############################################################################
# Static files
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
###############################################################################
# Database
DATABASES = {}
DATABASES['default'] = dj_database_url.config()
| import os
import dj_database_url
from .base import *
# BASE_NAME and BASE_DOMAIN are intentionally unset
# They are only needed to seed data in staging and local
BASE_URL = "https://voterengagement.com"
###############################################################################
# Core
SECRET_KEY = os.environ['SECRET_KEY']
ALLOWED_HOSTS = [
'127.0.0.1',
'localhost',
'.citizenlabs.org',
]
###############################################################################
# Static files
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
###############################################################################
# Database
DATABASES = {}
DATABASES['default'] = dj_database_url.config()
| Allow citizenlabs.org as a host | Allow citizenlabs.org as a host
| Python | mit | citizenlabsgr/voter-engagement,citizenlabsgr/voter-engagement,citizenlabsgr/voter-engagement,citizenlabsgr/voter-engagement,citizenlabsgr/voter-engagement |
a8218a1c20ea48a3392ef9e6d898a73eb9642d9c | ui/repository/browse.py | ui/repository/browse.py | from django.shortcuts import render_to_response
from django.template import RequestContext
from registry.models import ResourceCollection
from django.http import HttpResponse, HttpResponseBadRequest
import json
def browse(req):
# Find all the collections that do not have parents
top = ResourceCollection.objects.filter(parents__isnull=True)
# Find the closure (all children) for each top-level collection
result = [ col.jsonClosure(req.user) for col in top ]
collections = json.dumps(result)
return render_to_response('repository/browse.jade', {'collections': collections}, context_instance=RequestContext(req))
def populateCollection(req, collectionId):
# Find this collection
try:
col = ResourceCollection.objects.get(collection_id=collectionId)
except ResourceCollection.DoesNotExist:
return HttpResponseBadRequest('There is no collection with the given ID: %s' % collectionId)
# Find the children for this collection
result = col.jsonClosure(req.user)
collections = json.dumps(result)
return HttpResponse(collections, content_type="application/json")
| from django.shortcuts import render_to_response
from django.template import RequestContext
from registry.models import ResourceCollection
from django.http import HttpResponse, HttpResponseBadRequest
import json
def browse(req):
# Find all the collections that do not have parents
top = ResourceCollection.objects.filter(parents="Top")
# Find the closure (all children) for each top-level collection
result = [ col.jsonClosure(req.user) for col in top ]
collections = json.dumps(result)
return render_to_response('repository/browse.jade', {'collections': collections}, context_instance=RequestContext(req))
def populateCollection(req, collectionId):
# Find this collection
try:
col = ResourceCollection.objects.get(collection_id=collectionId)
except ResourceCollection.DoesNotExist:
return HttpResponseBadRequest('There is no collection with the given ID: %s' % collectionId)
# Find the children for this collection
result = col.jsonClosure(req.user)
collections = json.dumps(result)
return HttpResponse(collections, content_type="application/json")
| Adjust parent filter on collections. Now top-level collections should specific 'top' as their parent instead of being null. This helps get rid of the problem of collections ending up being top-level when removed from their old parent | Adjust parent filter on collections. Now top-level collections should specific 'top' as their parent instead of being null. This helps get rid of the problem of collections ending up being top-level when removed from their old parent
| Python | bsd-3-clause | usgin/nrrc-repository,usgin/nrrc-repository,usgin/metadata-repository,usgin/metadata-repository |
142022516f310aeb58f3560031b2266f39a0f2e5 | erpnext_ebay/tasks.py | erpnext_ebay/tasks.py | # -*- coding: utf-8 -*-
"""Scheduled tasks to be run by erpnext_ebay"""
from frappe.utils.background_jobs import enqueue
def all():
pass
def hourly():
enqueue('erpnext_ebay.sync_orders.sync',
queue='long', job_name='Sync eBay Orders')
def daily():
enqueue('erpnext_ebay.ebay_active_listings.update_ebay_data',
queue='long', job_name='Update eBay Data',
multiple_error_sites=['UK'])
enqueue('erpnext_ebay.ebay_categories.category_sync',
queue='long', job_name='eBay Category Sync')
def weekly():
pass
def monthly():
pass
| # -*- coding: utf-8 -*-
"""Scheduled tasks to be run by erpnext_ebay"""
from frappe.utils.background_jobs import enqueue
def all():
pass
def hourly():
pass
def daily():
enqueue('erpnext_ebay.ebay_categories.category_sync',
queue='long', job_name='eBay Category Sync')
def weekly():
pass
def monthly():
pass
| Remove sync_orders and update_ebay_listings from hooks scheduler | fix(hooks): Remove sync_orders and update_ebay_listings from hooks scheduler
| Python | mit | bglazier/erpnext_ebay,bglazier/erpnext_ebay |
4eba105663ba8d0323559b095055b3f89521ea07 | demo/ubergui.py | demo/ubergui.py | #!/usr/bin/env python
import sys
import Pyro
import Tkinter, tkMessageBox
from VisionEgg.PyroApps.UberClientGUI import client_list, AppWindow
# You can add your own controllers and GUIs to client_list
try:
app_window = AppWindow(client_list=client_list)
except Pyro.errors.ProtocolError, x:
if str(x) == 'connection failed': # Can't find UberServer running on network
try:
tkMessageBox.showerror("Can't find UberServer","Can't find UberServer running on Pyro network.")
sys.exit(1)
except:
raise # Can't find UberServer running on network
else:
raise
except Pyro.errors.PyroError, x:
if str(x) in ["Name Server not responding","connection failed"]:
try:
tkMessageBox.showerror("Can't find Pyro Name Server","Can't find Pyro Name Server on network.")
sys.exit(1)
except:
raise # Can't find Pyro Name Server on network
else:
raise
app_window.winfo_toplevel().wm_iconbitmap()
app_window.pack(expand=1,fill=Tkinter.BOTH)
app_window.winfo_toplevel().title("Vision Egg")
app_window.winfo_toplevel().minsize(1,1)
app_window.mainloop()
| #!/usr/bin/env python
import sys
import Pyro
import Tkinter, tkMessageBox
from VisionEgg.PyroApps.UberClientGUI import client_list, AppWindow
# You can add your own controllers and GUIs to client_list
try:
app_window = AppWindow(client_list=client_list)
except Pyro.errors.PyroError, x:
uber_server_error = 0
if isinstance(x, Pyro.errors.ProtocolError) and str(x) == 'connection failed': # Can't find UberServer running on network
uber_server_error = 1
if isinstance(x, Pyro.errors.NamingError) and str(x) == 'name not found': # Can't find UberServer running on network
uber_server_error = 1
if uber_server_error:
tkMessageBox.showerror("Can't find UberServer","Can't find UberServer running on Pyro network.")
sys.exit(1)
elif str(x) in ["Name Server not responding","connection failed"]:
try:
tkMessageBox.showerror("Can't find Pyro Name Server","Can't find Pyro Name Server on network.")
sys.exit(1)
except:
raise # Can't find Pyro Name Server on network
else:
raise
app_window.winfo_toplevel().wm_iconbitmap()
app_window.pack(expand=1,fill=Tkinter.BOTH)
app_window.winfo_toplevel().title("Vision Egg")
app_window.winfo_toplevel().minsize(1,1)
app_window.mainloop()
| Update errors for other versions of Pyro | Minor: Update errors for other versions of Pyro
git-svn-id: 033d166fe8e629f6cbcd3c0e2b9ad0cffc79b88b@775 3a63a0ee-37fe-0310-a504-e92b6e0a3ba7
| Python | lgpl-2.1 | visionegg/visionegg,visionegg/visionegg,visionegg/visionegg,visionegg/visionegg,visionegg/visionegg |
83b060b573bee654708e5fbb41c9e3b2913e4d9c | generatechangedfilelist.py | generatechangedfilelist.py | import sys
import os
import commands
import fnmatch
import re
import subprocess, shlex
def cmdsplit(args):
if os.sep == '\\':
args = args.replace('\\', '\\\\')
return shlex.split(args)
def main():
md5dir = os.path.abspath(sys.argv[1])
list_file = os.path.abspath(sys.argv[2])
prelist = os.path.join(md5dir,"temp","server.md5")
postlist = os.path.join(md5dir,"temp","server_reobf.md5")
cmd = 'diff --unchanged-group-format='' --old-group-format='' --new-group-format=\'%%>\' --changed-group-format=\'%%>\' %s %s' % (prelist, postlist)
process = subprocess.Popen(cmdsplit(cmd), stdout=subprocess.PIPE, bufsize=-1)
difflist,_= process.communicate()
with open(list_file, 'w') as fh:
fh.write(difflist)
if __name__ == '__main__':
main()
| import sys
import os
import commands
import fnmatch
import re
import subprocess, shlex
mcp_root = os.path.abspath(sys.argv[1])
sys.path.append(os.path.join(mcp_root,"runtime"))
from filehandling.srgshandler import parse_srg
def cmdsplit(args):
if os.sep == '\\':
args = args.replace('\\', '\\\\')
return shlex.split(args)
def main():
list_file = os.path.abspath(sys.argv[2])
prelist = os.path.join(mcp_root,"temp","server.md5")
postlist = os.path.join(mcp_root,"temp","server_reobf.md5")
cmd = 'diff --unchanged-group-format='' --old-group-format='' --new-group-format=\'%%>\' --changed-group-format=\'%%>\' %s %s' % (prelist, postlist)
process = subprocess.Popen(cmdsplit(cmd), stdout=subprocess.PIPE, bufsize=-1)
difflist,_= process.communicate()
srg_data = parse_srg(os.path.join(mcp_root,"temp","server_rg.srg")
classes=dict()
for row in srg_data['CL']:
classes[row['deobf_name']] = row['obf_name']
with open(list_file, 'w') as fh:
for diff in difflist:
(clazz,md5)=diff.strip().split()
if clazz in classes:
clazz=classes[clazz]
fh.write("%s\n" %(clazz))
if __name__ == '__main__':
main()
| Tweak file list script to print obf names | Tweak file list script to print obf names
| Python | lgpl-2.1 | MinecraftForge/FML,aerospark/FML,aerospark/FML,aerospark/FML |
139e6acc19040d89f304875c533513c9651f2906 | budget_proj/budget_app/filters.py | budget_proj/budget_app/filters.py | from django.db.models import CharField
from django_filters import rest_framework as filters
from . import models
class DefaultFilterMeta:
"""
Set our default Filter configurations to DRY up the FilterSet Meta classes.
"""
# Let us filter by all fields except id
exclude = ('id',)
# We prefer case insensitive matching on CharFields
filter_overrides = {
CharField: {
'filter_class': filters.CharFilter,
'extra': lambda f: {
'lookup_expr': 'iexact',
},
},
}
class OcrbFilter(filters.FilterSet):
class Meta(DefaultFilterMeta):
model = models.OCRB
class KpmFilter(filters.FilterSet):
class Meta(DefaultFilterMeta):
model = models.KPM
class BudgetHistoryFilter(filters.FilterSet):
class Meta(DefaultFilterMeta):
model = models.BudgetHistory
class LookupCodeFilter(filters.FilterSet):
class Meta(DefaultFilterMeta):
model = models.LookupCode
| from django.db.models import CharField
from django_filters import rest_framework as filters
from . import models
class CustomFilterBase(filters.FilterSet):
"""
Extends Filterset to populate help_text from the associated model field.
Works with swagger but not the builtin docs.
"""
@classmethod
def filter_for_field(cls, f, name, lookup_expr):
result = super().filter_for_field(f, name, lookup_expr)
if 'help_text' not in result.extra:
result.extra['help_text'] = f.help_text
return result
class DefaultFilterMeta:
"""
Defaults for:
- enable filtering by all model fields except `id`
- ignoring upper/lowercase when on CharFields
"""
# Let us filter by all fields except id
exclude = ('id',)
# We prefer case insensitive matching on CharFields
filter_overrides = {
CharField: {
'filter_class': filters.CharFilter,
'extra': lambda f: {
'lookup_expr': 'iexact',
},
},
}
class OcrbFilter(CustomFilterBase):
class Meta(DefaultFilterMeta):
model = models.OCRB
class KpmFilter(CustomFilterBase):
class Meta(DefaultFilterMeta):
model = models.KPM
class BudgetHistoryFilter(CustomFilterBase):
class Meta(DefaultFilterMeta):
model = models.BudgetHistory
class LookupCodeFilter(CustomFilterBase):
class Meta(DefaultFilterMeta):
model = models.LookupCode
| Upgrade Filters fields to use docs from model fields | Upgrade Filters fields to use docs from model fields
| Python | mit | jimtyhurst/team-budget,hackoregon/team-budget,hackoregon/team-budget,hackoregon/team-budget,jimtyhurst/team-budget,jimtyhurst/team-budget |
891ca8ee117f462a1648e954b756f1d29a5f527c | tests/test_errors.py | tests/test_errors.py | """Tests for errors.py"""
import aiohttp
def test_bad_status_line1():
err = aiohttp.BadStatusLine(b'')
assert str(err) == "b''"
def test_bad_status_line2():
err = aiohttp.BadStatusLine('Test')
assert str(err) == 'Test'
| """Tests for errors.py"""
import aiohttp
def test_bad_status_line1():
err = aiohttp.BadStatusLine(b'')
assert str(err) == "b''"
def test_bad_status_line2():
err = aiohttp.BadStatusLine('Test')
assert str(err) == 'Test'
def test_fingerprint_mismatch():
err = aiohttp.FingerprintMismatch('exp', 'got', 'host', 8888)
expected = '<FingerprintMismatch expected=exp got=got host=host port=8888>'
assert expected == repr(err)
| Add a test for FingerprintMismatch repr | Add a test for FingerprintMismatch repr
| Python | apache-2.0 | jettify/aiohttp,esaezgil/aiohttp,z2v/aiohttp,arthurdarcet/aiohttp,pfreixes/aiohttp,z2v/aiohttp,mind1master/aiohttp,KeepSafe/aiohttp,mind1master/aiohttp,juliatem/aiohttp,hellysmile/aiohttp,esaezgil/aiohttp,esaezgil/aiohttp,arthurdarcet/aiohttp,panda73111/aiohttp,pfreixes/aiohttp,z2v/aiohttp,alex-eri/aiohttp-1,singulared/aiohttp,moden-py/aiohttp,singulared/aiohttp,AraHaanOrg/aiohttp,KeepSafe/aiohttp,arthurdarcet/aiohttp,hellysmile/aiohttp,alex-eri/aiohttp-1,singulared/aiohttp,jettify/aiohttp,panda73111/aiohttp,alex-eri/aiohttp-1,moden-py/aiohttp,playpauseandstop/aiohttp,jettify/aiohttp,KeepSafe/aiohttp,rutsky/aiohttp,juliatem/aiohttp,AraHaanOrg/aiohttp,mind1master/aiohttp,rutsky/aiohttp,panda73111/aiohttp,Eyepea/aiohttp,moden-py/aiohttp,rutsky/aiohttp |
4bcc0aae53def04e16e87499b1321256ff35a7c1 | pyconll/__init__.py | pyconll/__init__.py | """
A library whose purpose is to provide a low level layer between the CoNLL format
and python code.
"""
__all__ = ['exception', 'load', 'tree', 'unit', 'util']
from .load import load_from_string, load_from_file, load_from_url, \
iter_from_string, iter_from_file, iter_from_url
| """
A library whose purpose is to provide a low level layer between the CoNLL format
and python code.
"""
__all__ = ['conllable', 'exception', 'load', 'tree', 'unit', 'util']
from .load import load_from_string, load_from_file, load_from_url, \
iter_from_string, iter_from_file, iter_from_url
| Add conllable to all list. | Add conllable to all list.
| Python | mit | pyconll/pyconll,pyconll/pyconll |
e056dc3581785fe34123189cccd9901e1e9afe71 | pylatex/__init__.py | pylatex/__init__.py | # flake8: noqa
"""
A library for creating Latex files.
.. :copyright: (c) 2014 by Jelte Fennema.
:license: MIT, see License for more details.
"""
from .document import Document
from .math import Math, VectorName, Matrix
from .package import Package
from .section import Section, Subsection, Subsubsection
from .table import Table, MultiColumn, MultiRow, Tabular
from .pgfplots import TikZ, Axis, Plot
from .graphics import Figure, SubFigure, MatplotlibFigure
from .lists import Enumerate, Itemize, Description
from .quantities import Quantity
from .base_classes import Command
| # flake8: noqa
"""
A library for creating Latex files.
.. :copyright: (c) 2014 by Jelte Fennema.
:license: MIT, see License for more details.
"""
from .document import Document
from .math import Math, VectorName, Matrix
from .package import Package
from .section import Section, Subsection, Subsubsection
from .table import Table, MultiColumn, MultiRow, Tabular, Tabu, LongTable, \
LongTabu
from .pgfplots import TikZ, Axis, Plot
from .graphics import Figure, SubFigure, MatplotlibFigure
from .lists import Enumerate, Itemize, Description
from .quantities import Quantity
from .base_classes import Command
| Add Tabu, LongTable and LongTabu global import | Add Tabu, LongTable and LongTabu global import
| Python | mit | sebastianhaas/PyLaTeX,sebastianhaas/PyLaTeX,votti/PyLaTeX,ovaskevich/PyLaTeX,JelteF/PyLaTeX,bjodah/PyLaTeX,votti/PyLaTeX,jendas1/PyLaTeX,bjodah/PyLaTeX,jendas1/PyLaTeX,JelteF/PyLaTeX,ovaskevich/PyLaTeX |
117e4f59720de9d13ddb4eaa439915addb616f1d | tests/cli/test_pinout.py | tests/cli/test_pinout.py | from __future__ import (
unicode_literals,
absolute_import,
print_function,
division,
)
str = type('')
import pytest
import gpiozero.cli.pinout as pinout
def test_args_incorrect():
with pytest.raises(SystemExit) as ex:
pinout.parse_args(['--nonexistentarg'])
assert ex.value.code == 2
def test_args_color():
args = pinout.parse_args([])
assert args.color is None
args = pinout.parse_args(['--color'])
assert args.color is True
args = pinout.parse_args(['--monochrome'])
assert args.color is False
def test_args_revision():
args = pinout.parse_args(['--revision', '000d'])
assert args.revision == '000d'
def test_help(capsys):
with pytest.raises(SystemExit) as ex:
pinout.parse_args(['--help'])
out, err = capsys.readouterr()
assert 'GPIO pinout' in out
assert ex.value.code == 0
| from __future__ import (
unicode_literals,
absolute_import,
print_function,
division,
)
str = type('')
import pytest
from gpiozero.cli import pinout
def test_args_incorrect():
with pytest.raises(SystemExit) as ex:
pinout.parse_args(['--nonexistentarg'])
assert ex.value.code == 2
def test_args_color():
args = pinout.parse_args([])
assert args.color is None
args = pinout.parse_args(['--color'])
assert args.color is True
args = pinout.parse_args(['--monochrome'])
assert args.color is False
def test_args_revision():
args = pinout.parse_args(['--revision', '000d'])
assert args.revision == '000d'
def test_help(capsys):
with pytest.raises(SystemExit) as ex:
pinout.parse_args(['--help'])
out, err = capsys.readouterr()
assert 'GPIO pinout' in out
assert ex.value.code == 0
| Use from to import rather than rename | Use from to import rather than rename
| Python | bsd-3-clause | waveform80/gpio-zero,MrHarcombe/python-gpiozero,RPi-Distro/python-gpiozero |
d814c9c131f2c2957173302f7c4c1cbf2b719b45 | check_rfc_header.py | check_rfc_header.py | #!/usr/bin/env python
# -*- encoding: utf-8
import os
from travistooling import ROOT
def get_rfc_readmes(repo):
rfcs_dir = os.path.join(repo, 'docs', 'rfcs')
for root, _, filenames in os.walk(rfcs_dir):
for f in filenames:
if f == 'README.md':
yield os.path.join(root, f)
print('*** Checking RFC headers')
for f in get_rfc_readmes(ROOT):
print('*** Checking header for %s' % os.path.relpath(f, start=ROOT))
filename = os.path.basename(os.path.dirname(f))
number, name = filename.split('-', 1)
contents = open(f).read()
header = contents.splitlines()[:3]
assert header[0].startswith('# RFC %03d: ' % int(number))
assert header[1] == ''
print(f, name)
print(header)
| #!/usr/bin/env python
# -*- encoding: utf-8
import datetime as dt
import os
from travistooling import git, ROOT
def get_rfc_readmes(repo):
rfcs_dir = os.path.join(repo, 'docs', 'rfcs')
for root, _, filenames in os.walk(rfcs_dir):
for f in filenames:
if f == 'README.md':
yield os.path.join(root, f)
if __name__ == '__main__':
print('*** Checking RFC headers')
for f in get_rfc_readmes(ROOT):
print('*** Checking header for %s' % os.path.relpath(f, start=ROOT))
filename = os.path.basename(os.path.dirname(f))
number, name = filename.split('-', 1)
contents = open(f).read()
header = contents.splitlines()[:3]
update_timestamp = git('log', '-1', '--format=%ct', f)
last_updated = dt.datetime.fromtimestamp(int(update_timestamp))
assert header[0].startswith('# RFC %03d: ' % int(number))
assert header[1] == ''
expected_date_str = '**Last updated: %s.**' % last_updated.strftime('%d %B %Y')
assert header[2] == expected_date_str, (header[2], expected_date_str)
| Check update dates in the RFC headers | Check update dates in the RFC headers
| Python | mit | wellcometrust/platform-api,wellcometrust/platform-api,wellcometrust/platform-api,wellcometrust/platform-api |
d29410b39af1165ba520e7ecad7e6e9c36a7fd2f | test/test_basic.py | test/test_basic.py | #!/usr/bin/env python3
#coding=UTF-8
import os
import sys
#installed
import pytest
#local
sys.path.append(os.path.split(os.path.split(__file__)[0])[0])
import searchcolor
from api_keys import GoogleKeyLocker as Key
Key = Key()
def test_google_average():
result = searchcolor.google_average('Death', 10, Key.api(), Key.cse())
assert result.get('name') == 'Death'
assert result.get('red') >= 0 and result.get('red') <= 255
assert result.get('green') >= 0 and result.get('green') <= 255
assert result.get('blue') >= 0 and result.get('blue') <= 255
| #!/usr/bin/env python3
#coding=UTF-8
import os
import sys
#installed
import pytest
#local
sys.path.append(os.path.split(os.path.split(__file__)[0])[0])
import searchcolor
from api_keys import GoogleKeyLocker
from api_keys import BingKeyLocker
from api_keys import MSCSKeyLocker
GKL = GoogleKeyLocker()
BKL = BingKeyLocker()
MSCSKL = MSCSKeyLocker()
def test_google_average():
result = searchcolor.google_average('Death', 10, GKL.api(), GKL.cse(), max_threads=8)
assert result.get('name') == 'Death'
assert result.get('red') >= 0 and result.get('red') <= 255
assert result.get('green') >= 0 and result.get('green') <= 255
assert result.get('blue') >= 0 and result.get('blue') <= 255
def test_bing_average():
result = searchcolor.bing_average('Death', 10, BKL.api(), max_threads=8)
assert result.get('name') == 'Death'
assert result.get('red') >= 0 and result.get('red') <= 255
assert result.get('green') >= 0 and result.get('green') <= 255
assert result.get('blue') >= 0 and result.get('blue') <= 255
def test_mscs_average():
result = searchcolor.mscs_average('Death', 10, MSCSKL.api(), max_threads=8)
assert result.get('name') == 'Death'
assert result.get('red') >= 0 and result.get('red') <= 255
assert result.get('green') >= 0 and result.get('green') <= 255
assert result.get('blue') >= 0 and result.get('blue') <= 255
| Add tests for bing and mscs | Add tests for bing and mscs
| Python | mit | Tathorack/searchcolor,Tathorack/searchcolor |
01f3aaf8c0b2351ea41b854142263f2d52c03239 | comics/comics/perrybiblefellowship.py | comics/comics/perrybiblefellowship.py | from comics.aggregator.crawler import CrawlerBase, CrawlerImage
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = "The Perry Bible Fellowship"
language = "en"
url = "http://www.pbfcomics.com/"
start_date = "2001-01-01"
rights = "Nicholas Gurewitch"
class Crawler(CrawlerBase):
history_capable_date = "2019-06-12"
time_zone = "US/Eastern"
def crawl(self, pub_date):
feed = self.parse_feed("http://www.pbfcomics.com/feed/feed.xml")
for entry in feed.for_date(pub_date):
page = self.parse_page(entry.link)
images = page.root.xpath("//div[@id='comic']/img")
crawler_images = []
for image in images:
title = entry.title
crawler_images.append(CrawlerImage(image.get("src"), title))
| from comics.aggregator.crawler import CrawlerBase, CrawlerImage
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = "The Perry Bible Fellowship"
language = "en"
url = "http://www.pbfcomics.com/"
start_date = "2001-01-01"
rights = "Nicholas Gurewitch"
class Crawler(CrawlerBase):
history_capable_date = "2019-06-12"
time_zone = "US/Eastern"
def crawl(self, pub_date):
feed = self.parse_feed("http://www.pbfcomics.com/feed/feed.xml")
for entry in feed.for_date(pub_date):
page = self.parse_page(entry.link)
images = page.src("div#comic img", allow_multiple=True)
crawler_images = []
for image in images:
title = entry.title
crawler_images.append(CrawlerImage(image, title))
return crawler_images
| Use CSS selector instead of xpath for "The Perry Bible Fellowship" | Use CSS selector instead of xpath for "The Perry Bible Fellowship"
| Python | agpl-3.0 | jodal/comics,datagutten/comics,datagutten/comics,jodal/comics,datagutten/comics,jodal/comics,jodal/comics,datagutten/comics |
cde63b076027345486e4e836a02811962ad5bcaa | tests/test_completion.py | tests/test_completion.py | import os
import subprocess
import sys
from pathlib import Path
import typer
from typer.testing import CliRunner
from first_steps import tutorial001 as mod
runner = CliRunner()
app = typer.Typer()
app.command()(mod.main)
def test_show_completion():
result = subprocess.run(
[
"bash",
"-c",
f"{sys.executable} -m coverage run {mod.__file__} --show-completion",
],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
encoding="utf-8",
env={**os.environ, "SHELL": "/bin/bash"},
)
assert "_TUTORIAL001.PY_COMPLETE=complete-bash" in result.stdout
def test_install_completion():
bash_completion_path: Path = Path.home() / ".bash_completion"
text = bash_completion_path.read_text()
result = subprocess.run(
[
"bash",
"-c",
f"{sys.executable} -m coverage run {mod.__file__} --install-completion",
],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
encoding="utf-8",
env={**os.environ, "SHELL": "/bin/bash"},
)
new_text = bash_completion_path.read_text()
assert "_TUTORIAL001.PY_COMPLETE=complete-bash" in new_text
bash_completion_path.write_text(text)
| import os
import subprocess
import sys
from pathlib import Path
import typer
from typer.testing import CliRunner
from first_steps import tutorial001 as mod
runner = CliRunner()
app = typer.Typer()
app.command()(mod.main)
def test_show_completion():
result = subprocess.run(
[
"bash",
"-c",
f"{sys.executable} -m coverage run {mod.__file__} --show-completion",
],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
encoding="utf-8",
env={**os.environ, "SHELL": "/bin/bash"},
)
assert "_TUTORIAL001.PY_COMPLETE=complete-bash" in result.stdout
def test_install_completion():
bash_completion_path: Path = Path.home() / ".bash_completion"
text = ""
if bash_completion_path.is_file():
text = bash_completion_path.read_text()
result = subprocess.run(
[
"bash",
"-c",
f"{sys.executable} -m coverage run {mod.__file__} --install-completion",
],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
encoding="utf-8",
env={**os.environ, "SHELL": "/bin/bash"},
)
new_text = bash_completion_path.read_text()
assert "_TUTORIAL001.PY_COMPLETE=complete-bash" in new_text
bash_completion_path.write_text(text)
| Fix test completion, check for bash completion file before running | :bug: Fix test completion, check for bash completion file before running
| Python | mit | tiangolo/typer,tiangolo/typer |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.