commit
stringlengths 40
40
| old_file
stringlengths 4
236
| new_file
stringlengths 4
236
| old_contents
stringlengths 1
3.26k
| new_contents
stringlengths 16
4.43k
| subject
stringlengths 16
624
| message
stringlengths 17
3.29k
| lang
stringclasses 5
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
|
|---|---|---|---|---|---|---|---|---|---|
1648e071fe69ba159261f27e4b2d0e2b977d6d83
|
zou/app/models/working_file.py
|
zou/app/models/working_file.py
|
from sqlalchemy_utils import UUIDType
from zou.app import db
from zou.app.models.serializer import SerializerMixin
from zou.app.models.base import BaseMixin
class WorkingFile(db.Model, BaseMixin, SerializerMixin):
shotgun_id = db.Column(db.Integer())
name = db.Column(db.String(250))
description = db.Column(db.String(200))
comment = db.Column(db.Text())
revision = db.Column(db.Integer())
size = db.Column(db.Integer())
checksum = db.Column(db.Integer())
task_id = db.Column(UUIDType(binary=False), db.ForeignKey("task.id"))
entity_id = db.Column(UUIDType(binary=False), db.ForeignKey("entity.id"))
person_id = \
db.Column(UUIDType(binary=False), db.ForeignKey("person.id"))
__table_args__ = (
db.UniqueConstraint(
"name",
"task_id",
"entity_id",
"revision",
name="working_file_uc"
),
)
def __repr__(self):
return "<WorkingFile %s>" % self.id
|
from sqlalchemy.orm import relationship
from sqlalchemy_utils import UUIDType
from zou.app import db
from zou.app.models.serializer import SerializerMixin
from zou.app.models.base import BaseMixin
class WorkingFile(db.Model, BaseMixin, SerializerMixin):
shotgun_id = db.Column(db.Integer())
name = db.Column(db.String(250))
description = db.Column(db.String(200))
comment = db.Column(db.Text())
revision = db.Column(db.Integer())
size = db.Column(db.Integer())
checksum = db.Column(db.Integer())
path = db.Column(db.String(400))
task_id = db.Column(UUIDType(binary=False), db.ForeignKey("task.id"))
entity_id = db.Column(UUIDType(binary=False), db.ForeignKey("entity.id"))
person_id = \
db.Column(UUIDType(binary=False), db.ForeignKey("person.id"))
software_id = \
db.Column(UUIDType(binary=False), db.ForeignKey("software.id"))
outputs = relationship(
"OutputFile",
back_populates="source_file"
)
__table_args__ = (
db.UniqueConstraint(
"name",
"task_id",
"entity_id",
"revision",
name="working_file_uc"
),
)
def __repr__(self):
return "<WorkingFile %s>" % self.id
|
Add fields to working file model
|
Add fields to working file model
* Software
* List of output files generated
* Path used to store the working file
|
Python
|
agpl-3.0
|
cgwire/zou
|
afb195b1ca647d776f29fbc1d68a495190caec59
|
astropy/time/setup_package.py
|
astropy/time/setup_package.py
|
import os
import numpy
from distutils.extension import Extension
TIMEROOT = os.path.relpath(os.path.dirname(__file__))
def get_extensions():
time_ext = Extension(
name="astropy.time.sofa_time",
sources=[os.path.join(TIMEROOT, "sofa_time.pyx"), "cextern/sofa/sofa.c"],
include_dirs=[numpy.get_include(), 'cextern/sofa'],
language="c",)
return [time_ext]
|
import os
from distutils.extension import Extension
TIMEROOT = os.path.relpath(os.path.dirname(__file__))
def get_extensions():
time_ext = Extension(
name="astropy.time.sofa_time",
sources=[os.path.join(TIMEROOT, "sofa_time.pyx"), "cextern/sofa/sofa.c"],
include_dirs=['numpy', 'cextern/sofa'],
language="c",)
return [time_ext]
|
Fix remaining include_dirs that imported numpy ('numpy' gets replaced at build-time). This is necessary for egg_info to work.
|
Fix remaining include_dirs that imported numpy ('numpy' gets replaced at build-time). This is necessary for egg_info to work.
|
Python
|
bsd-3-clause
|
kelle/astropy,AustereCuriosity/astropy,joergdietrich/astropy,stargaser/astropy,astropy/astropy,bsipocz/astropy,bsipocz/astropy,kelle/astropy,larrybradley/astropy,StuartLittlefair/astropy,DougBurke/astropy,mhvk/astropy,stargaser/astropy,aleksandr-bakanov/astropy,tbabej/astropy,dhomeier/astropy,lpsinger/astropy,DougBurke/astropy,astropy/astropy,funbaker/astropy,pllim/astropy,pllim/astropy,tbabej/astropy,dhomeier/astropy,aleksandr-bakanov/astropy,larrybradley/astropy,MSeifert04/astropy,pllim/astropy,DougBurke/astropy,DougBurke/astropy,AustereCuriosity/astropy,lpsinger/astropy,mhvk/astropy,funbaker/astropy,bsipocz/astropy,astropy/astropy,funbaker/astropy,lpsinger/astropy,saimn/astropy,saimn/astropy,astropy/astropy,AustereCuriosity/astropy,MSeifert04/astropy,joergdietrich/astropy,dhomeier/astropy,MSeifert04/astropy,larrybradley/astropy,kelle/astropy,saimn/astropy,mhvk/astropy,tbabej/astropy,kelle/astropy,kelle/astropy,joergdietrich/astropy,astropy/astropy,joergdietrich/astropy,dhomeier/astropy,larrybradley/astropy,aleksandr-bakanov/astropy,bsipocz/astropy,stargaser/astropy,saimn/astropy,mhvk/astropy,funbaker/astropy,stargaser/astropy,dhomeier/astropy,pllim/astropy,AustereCuriosity/astropy,pllim/astropy,StuartLittlefair/astropy,StuartLittlefair/astropy,mhvk/astropy,StuartLittlefair/astropy,saimn/astropy,joergdietrich/astropy,aleksandr-bakanov/astropy,StuartLittlefair/astropy,larrybradley/astropy,lpsinger/astropy,tbabej/astropy,AustereCuriosity/astropy,MSeifert04/astropy,lpsinger/astropy,tbabej/astropy
|
55d22f95301c4c96c42e30fa037df5bc957dc7b4
|
incunafein/module/page/extensions/prepared_date.py
|
incunafein/module/page/extensions/prepared_date.py
|
from django.db import models
def register(cls, admin_cls):
cls.add_to_class('prepared_date', models.TextField('Date of Preparation', blank=True, null=True))
|
from django.db import models
def get_prepared_date(cls):
return cls.prepared_date or cls.parent.prepared_date
def register(cls, admin_cls):
cls.add_to_class('prepared_date', models.TextField('Date of Preparation', blank=True, null=True))
cls.add_to_class('get_prepared_date', get_prepared_date)
|
Add a get prepared date method
|
Add a get prepared date method
Child pages won't necessarily have a prepared date and it makes sense to
use the parent date to avoid repetition.
|
Python
|
bsd-2-clause
|
incuna/incuna-feincms,incuna/incuna-feincms,incuna/incuna-feincms
|
0fdb33dc0da1aa953e91e71b0e0cfa75fca3d639
|
skylines/views/__init__.py
|
skylines/views/__init__.py
|
from flask import redirect
from skylines import app
import skylines.views.i18n
import skylines.views.login
import skylines.views.search
from skylines.views.about import about_blueprint
from skylines.views.api import api_blueprint
from skylines.views.flights import flights_blueprint
from skylines.views.notifications import notifications_blueprint
from skylines.views.ranking import ranking_blueprint
from skylines.views.statistics import statistics_blueprint
from skylines.views.upload import upload_blueprint
from skylines.views.users import users_blueprint
app.register_blueprint(about_blueprint, url_prefix='/about')
app.register_blueprint(api_blueprint, url_prefix='/api')
app.register_blueprint(flights_blueprint, url_prefix='/flights')
app.register_blueprint(notifications_blueprint, url_prefix='/notifications')
app.register_blueprint(ranking_blueprint, url_prefix='/ranking')
app.register_blueprint(statistics_blueprint, url_prefix='/statistics')
app.register_blueprint(upload_blueprint, url_prefix='/flights/upload')
app.register_blueprint(users_blueprint, url_prefix='/users')
@app.route('/')
def index():
return redirect('/flights/latest')
|
from flask import redirect, url_for
from skylines import app
import skylines.views.i18n
import skylines.views.login
import skylines.views.search
from skylines.views.about import about_blueprint
from skylines.views.api import api_blueprint
from skylines.views.flights import flights_blueprint
from skylines.views.notifications import notifications_blueprint
from skylines.views.ranking import ranking_blueprint
from skylines.views.statistics import statistics_blueprint
from skylines.views.upload import upload_blueprint
from skylines.views.users import users_blueprint
app.register_blueprint(about_blueprint, url_prefix='/about')
app.register_blueprint(api_blueprint, url_prefix='/api')
app.register_blueprint(flights_blueprint, url_prefix='/flights')
app.register_blueprint(notifications_blueprint, url_prefix='/notifications')
app.register_blueprint(ranking_blueprint, url_prefix='/ranking')
app.register_blueprint(statistics_blueprint, url_prefix='/statistics')
app.register_blueprint(upload_blueprint, url_prefix='/flights/upload')
app.register_blueprint(users_blueprint, url_prefix='/users')
@app.route('/')
def index():
return redirect(url_for('flights.latest'))
|
Use url_for for base redirection
|
views: Use url_for for base redirection
|
Python
|
agpl-3.0
|
shadowoneau/skylines,Turbo87/skylines,snip/skylines,shadowoneau/skylines,Harry-R/skylines,TobiasLohner/SkyLines,RBE-Avionik/skylines,RBE-Avionik/skylines,kerel-fs/skylines,Turbo87/skylines,snip/skylines,kerel-fs/skylines,skylines-project/skylines,RBE-Avionik/skylines,TobiasLohner/SkyLines,skylines-project/skylines,RBE-Avionik/skylines,skylines-project/skylines,Harry-R/skylines,Turbo87/skylines,Harry-R/skylines,Turbo87/skylines,kerel-fs/skylines,shadowoneau/skylines,skylines-project/skylines,shadowoneau/skylines,TobiasLohner/SkyLines,snip/skylines,Harry-R/skylines
|
cc3ab3af17e30e7dd9991d68f01eaa4535b64e6b
|
djangae/models.py
|
djangae/models.py
|
from django.db import models
class CounterShard(models.Model):
count = models.PositiveIntegerField()
|
from django.db import models
class CounterShard(models.Model):
count = models.PositiveIntegerField()
#Apply our django patches
from .patches import *
|
Patch update_contenttypes so that it's less likely to fail due to eventual consistency
|
Patch update_contenttypes so that it's less likely to fail due to eventual consistency
|
Python
|
bsd-3-clause
|
nealedj/djangae,martinogden/djangae,grzes/djangae,stucox/djangae,asendecka/djangae,trik/djangae,trik/djangae,wangjun/djangae,armirusco/djangae,b-cannon/my_djae,jscissr/djangae,grzes/djangae,wangjun/djangae,chargrizzle/djangae,chargrizzle/djangae,leekchan/djangae,kirberich/djangae,martinogden/djangae,pablorecio/djangae,nealedj/djangae,armirusco/djangae,asendecka/djangae,nealedj/djangae,jscissr/djangae,stucox/djangae,potatolondon/djangae,leekchan/djangae,grzes/djangae,jscissr/djangae,asendecka/djangae,kirberich/djangae,leekchan/djangae,SiPiggles/djangae,kirberich/djangae,martinogden/djangae,armirusco/djangae,SiPiggles/djangae,chargrizzle/djangae,pablorecio/djangae,stucox/djangae,SiPiggles/djangae,trik/djangae,potatolondon/djangae,wangjun/djangae,pablorecio/djangae
|
776c3b0df6136606b8b7474418fd5d078457bd0a
|
test/persistence_test.py
|
test/persistence_test.py
|
from os.path import exists, join
import shutil
import tempfile
import time
from lwr.managers.queued import QueueManager
from lwr.managers.stateful import StatefulManagerProxy
from lwr.tools.authorization import get_authorizer
from .test_utils import TestDependencyManager
from galaxy.util.bunch import Bunch
def test_persistence():
"""
Tests persistence of a managers jobs.
"""
staging_directory = tempfile.mkdtemp()
try:
app = Bunch(staging_directory=staging_directory,
persistence_directory=staging_directory,
authorizer=get_authorizer(None),
dependency_manager=TestDependencyManager(),
)
assert not exists(join(staging_directory, "queued_jobs"))
queue1 = StatefulManagerProxy(QueueManager('test', app, num_concurrent_jobs=0))
job_id = queue1.setup_job('4', 'tool1', '1.0.0')
touch_file = join(staging_directory, 'ran')
queue1.launch(job_id, 'touch %s' % touch_file)
time.sleep(.4)
assert (not(exists(touch_file)))
queue1.shutdown()
queue2 = StatefulManagerProxy(QueueManager('test', app, num_concurrent_jobs=1))
time.sleep(1)
assert exists(touch_file)
finally:
shutil.rmtree(staging_directory)
try:
queue2.shutdown()
except:
pass
|
from os.path import exists, join
import shutil
import tempfile
import time
from lwr.managers.queued import QueueManager
from lwr.managers.stateful import StatefulManagerProxy
from lwr.tools.authorization import get_authorizer
from .test_utils import TestDependencyManager
from galaxy.util.bunch import Bunch
from galaxy.jobs.metrics import NULL_JOB_INSTRUMENTER
def test_persistence():
"""
Tests persistence of a managers jobs.
"""
staging_directory = tempfile.mkdtemp()
try:
app = Bunch(staging_directory=staging_directory,
persistence_directory=staging_directory,
authorizer=get_authorizer(None),
dependency_manager=TestDependencyManager(),
job_metrics=Bunch(default_job_instrumenter=NULL_JOB_INSTRUMENTER),
)
assert not exists(join(staging_directory, "queued_jobs"))
queue1 = StatefulManagerProxy(QueueManager('test', app, num_concurrent_jobs=0))
job_id = queue1.setup_job('4', 'tool1', '1.0.0')
touch_file = join(staging_directory, 'ran')
queue1.launch(job_id, 'touch %s' % touch_file)
time.sleep(.4)
assert (not(exists(touch_file)))
queue1.shutdown()
queue2 = StatefulManagerProxy(QueueManager('test', app, num_concurrent_jobs=1))
time.sleep(1)
assert exists(touch_file)
finally:
shutil.rmtree(staging_directory)
try:
queue2.shutdown()
except:
pass
|
Fix another failing unit test (from metrics work).
|
Fix another failing unit test (from metrics work).
|
Python
|
apache-2.0
|
jmchilton/lwr,natefoo/pulsar,natefoo/pulsar,jmchilton/pulsar,galaxyproject/pulsar,jmchilton/pulsar,ssorgatem/pulsar,galaxyproject/pulsar,ssorgatem/pulsar,jmchilton/lwr
|
3ee7d716f0eb3202ccf7ca213747eb903f9bb471
|
__init__.py
|
__init__.py
|
from .Averager import Averager
from .Config import Config
from .RateTicker import RateTicker
from .Ring import Ring
from .SortedList import SortedList
from .String import string2time, time2string
from .Timer import Timer
from .UserInput import user_input
|
from .Averager import Averager
from .Config import Config
from .RateTicker import RateTicker
from .Ring import Ring
from .SortedList import SortedList
from .String import string2time, time2string, time2levels, time2dir, time2fname
from .Timer import Timer
from .UserInput import user_input
|
Add missing names to module namespace.
|
Add missing names to module namespace.
|
Python
|
mit
|
vmlaker/coils
|
c05fc3ae4d6ac0ed459150acf2c19fd892c2ea9f
|
bumblebee/modules/caffeine.py
|
bumblebee/modules/caffeine.py
|
#pylint: disable=C0111,R0903
"""Enable/disable automatic screen locking.
Requires the following executables:
* xdg-screensaver
* notify-send
"""
import bumblebee.input
import bumblebee.output
import bumblebee.engine
class Module(bumblebee.engine.Module):
def __init__(self, engine, config):
super(Module, self).__init__(engine, config,
bumblebee.output.Widget(full_text="")
)
self._active = False
self.interval(1)
engine.input.register_callback(self, button=bumblebee.input.LEFT_MOUSE,
cmd=self._toggle
)
def state(self, widget):
if self._active:
return "activated"
return "deactivated"
def _toggle(self, event):
self._active = not self._active
if self._active:
bumblebee.util.execute("xdg-screensaver reset")
bumblebee.util.execute("notify-send \"Consuming caffeine\"")
else:
bumblebee.util.execute("notify-send \"Out of coffee\"")
def update(self, widgets):
if self._active:
bumblebee.util.execute("xdg-screensaver reset")
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
|
#pylint: disable=C0111,R0903
"""Enable/disable automatic screen locking.
Requires the following executables:
* xdg-screensaver
* notify-send
"""
import bumblebee.input
import bumblebee.output
import bumblebee.engine
class Module(bumblebee.engine.Module):
def __init__(self, engine, config):
super(Module, self).__init__(engine, config,
bumblebee.output.Widget(full_text="")
)
self._active = False
self.interval(1)
engine.input.register_callback(self, button=bumblebee.input.LEFT_MOUSE,
cmd=self._toggle
)
def state(self, widget):
if self._active:
return "activated"
return "deactivated"
def _toggle(self, event):
self._active = not self._active
try:
if self._active:
bumblebee.util.execute("xdg-screensaver reset")
bumblebee.util.execute("notify-send \"Consuming caffeine\"")
else:
bumblebee.util.execute("notify-send \"Out of coffee\"")
except:
self._active = not self._active
def update(self, widgets):
if self._active:
bumblebee.util.execute("xdg-screensaver reset")
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
|
Add some basic error handling in case the executables don't exist
|
Add some basic error handling in case the executables don't exist
|
Python
|
mit
|
tobi-wan-kenobi/bumblebee-status,tobi-wan-kenobi/bumblebee-status
|
ffadde617db8ac3d0d5362b4a521dd4e9839710f
|
order/order_2_login_system_by_https.py
|
order/order_2_login_system_by_https.py
|
import json
import requests
""" Order 2: Login system by https
```
curl -k https://192.168.105.88/axapi/v3/auth -H "Content-type:application/json" -d '{
"credentials": {
"username": "admin",
"password": "a10"
}
}'
```
"""
class LoginSystemByHttps(object):
login_url = 'http://192.168.105.88/axapi/v3/auth'
def login(self):
"""
Note: the dict playload must be use json.dumps() to turn to str.
:return: Result string data
"""
payload = {'credentials': {'username': "admin", 'password': "a10"}}
headers = {'content-type': 'application/json', 'Connection': 'keep-alive'}
response = requests.post(self.login_url, data=json.dumps(payload), verify=False, headers=headers)
print(response.text)
return response.text
# login = LoginSystemByHttps()
# login.login()
|
import json
import requests
""" Order 2: Login system by https
This is the code which use curl to login system
```
curl -k https://192.168.105.88/axapi/v3/auth -H "Content-type:application/json" -d '{
"credentials": {
"username": "admin",
"password": "a10"
}
}'
```
"""
class LoginSystemByHttps(object):
login_url = 'http://192.168.105.88/axapi/v3/auth'
def login(self):
"""
Note: the dict playload must be use json.dumps() to turn to str.
:return: Result string data
"""
payload = {'credentials': {'username': "admin", 'password': "a10"}}
headers = {'content-type': 'application/json', 'Connection': 'keep-alive'}
response = requests.post(self.login_url, data=json.dumps(payload), verify=False, headers=headers)
print(response.text)
return response.text
# login = LoginSystemByHttps()
# login.login()
|
Order 2: Login system by https
|
[Order] Order 2: Login system by https
|
Python
|
mit
|
flyingSprite/spinelle
|
646a248d59f835264729b48a0116d51089f6113e
|
oscar/templatetags/currency_filters.py
|
oscar/templatetags/currency_filters.py
|
from decimal import Decimal as D, InvalidOperation
from django import template
from django.conf import settings
from babel.numbers import format_currency
register = template.Library()
@register.filter(name='currency')
def currency(value):
"""
Format decimal value as currency
"""
try:
value = D(value)
except (TypeError, InvalidOperation):
return u""
# Using Babel's currency formatting
# http://packages.python.org/Babel/api/babel.numbers-module.html#format_currency
kwargs = {
'currency': settings.OSCAR_DEFAULT_CURRENCY,
'format': getattr(settings, 'OSCAR_CURRENCY_FORMAT', None)}
locale = getattr(settings, 'OSCAR_CURRENCY_LOCALE', None)
if locale:
kwargs['locale'] = locale
return format_currency(value, **kwargs)
|
from decimal import Decimal as D, InvalidOperation
from django import template
from django.conf import settings
from babel.numbers import format_currency
register = template.Library()
@register.filter(name='currency')
def currency(value):
"""
Format decimal value as currency
"""
try:
value = D(value)
except (TypeError, InvalidOperation):
return u""
# Using Babel's currency formatting
# http://babel.pocoo.org/docs/api/numbers/#babel.numbers.format_currency
kwargs = {
'currency': settings.OSCAR_DEFAULT_CURRENCY,
'format': getattr(settings, 'OSCAR_CURRENCY_FORMAT', None)}
locale = getattr(settings, 'OSCAR_CURRENCY_LOCALE', None)
if locale:
kwargs['locale'] = locale
return format_currency(value, **kwargs)
|
Replace broken babel documentation link
|
Replace broken babel documentation link
According to Babel's PyPI package page, http://babel.pocoo.org/docs/ is
the official documentation website.
|
Python
|
bsd-3-clause
|
lijoantony/django-oscar,faratro/django-oscar,michaelkuty/django-oscar,MatthewWilkes/django-oscar,django-oscar/django-oscar,dongguangming/django-oscar,taedori81/django-oscar,pasqualguerrero/django-oscar,marcoantoniooliveira/labweb,faratro/django-oscar,Jannes123/django-oscar,binarydud/django-oscar,Jannes123/django-oscar,solarissmoke/django-oscar,faratro/django-oscar,pdonadeo/django-oscar,ademuk/django-oscar,vovanbo/django-oscar,michaelkuty/django-oscar,okfish/django-oscar,thechampanurag/django-oscar,sasha0/django-oscar,rocopartners/django-oscar,binarydud/django-oscar,pdonadeo/django-oscar,elliotthill/django-oscar,john-parton/django-oscar,adamend/django-oscar,ahmetdaglarbas/e-commerce,mexeniz/django-oscar,Jannes123/django-oscar,monikasulik/django-oscar,rocopartners/django-oscar,josesanch/django-oscar,QLGu/django-oscar,Bogh/django-oscar,dongguangming/django-oscar,spartonia/django-oscar,bschuon/django-oscar,vovanbo/django-oscar,marcoantoniooliveira/labweb,WadeYuChen/django-oscar,manevant/django-oscar,anentropic/django-oscar,django-oscar/django-oscar,thechampanurag/django-oscar,Jannes123/django-oscar,solarissmoke/django-oscar,marcoantoniooliveira/labweb,jinnykoo/wuyisj,monikasulik/django-oscar,Bogh/django-oscar,bnprk/django-oscar,lijoantony/django-oscar,adamend/django-oscar,okfish/django-oscar,itbabu/django-oscar,jinnykoo/wuyisj,kapt/django-oscar,jinnykoo/wuyisj.com,pasqualguerrero/django-oscar,MatthewWilkes/django-oscar,anentropic/django-oscar,DrOctogon/unwash_ecom,josesanch/django-oscar,WadeYuChen/django-oscar,makielab/django-oscar,WillisXChen/django-oscar,manevant/django-oscar,faratro/django-oscar,john-parton/django-oscar,itbabu/django-oscar,spartonia/django-oscar,kapari/django-oscar,WillisXChen/django-oscar,ademuk/django-oscar,thechampanurag/django-oscar,kapari/django-oscar,saadatqadri/django-oscar,machtfit/django-oscar,makielab/django-oscar,sonofatailor/django-oscar,jinnykoo/christmas,michaelkuty/django-oscar,makielab/django-oscar,okfish/django-oscar,manevant/django-oscar,DrOctogon/unwash_ecom,taedori81/django-oscar,Idematica/django-oscar,Idematica/django-oscar,nickpack/django-oscar,jinnykoo/christmas,ahmetdaglarbas/e-commerce,mexeniz/django-oscar,vovanbo/django-oscar,jinnykoo/wuyisj.com,okfish/django-oscar,mexeniz/django-oscar,jinnykoo/wuyisj.com,machtfit/django-oscar,itbabu/django-oscar,bschuon/django-oscar,dongguangming/django-oscar,MatthewWilkes/django-oscar,anentropic/django-oscar,machtfit/django-oscar,Idematica/django-oscar,michaelkuty/django-oscar,jinnykoo/wuyisj.com,sasha0/django-oscar,amirrpp/django-oscar,monikasulik/django-oscar,bnprk/django-oscar,elliotthill/django-oscar,spartonia/django-oscar,lijoantony/django-oscar,jinnykoo/wuyisj,nickpack/django-oscar,sonofatailor/django-oscar,nfletton/django-oscar,jmt4/django-oscar,ka7eh/django-oscar,WadeYuChen/django-oscar,WillisXChen/django-oscar,eddiep1101/django-oscar,rocopartners/django-oscar,saadatqadri/django-oscar,binarydud/django-oscar,jlmadurga/django-oscar,django-oscar/django-oscar,itbabu/django-oscar,ademuk/django-oscar,jmt4/django-oscar,sasha0/django-oscar,nfletton/django-oscar,DrOctogon/unwash_ecom,taedori81/django-oscar,Bogh/django-oscar,ahmetdaglarbas/e-commerce,anentropic/django-oscar,binarydud/django-oscar,WadeYuChen/django-oscar,jlmadurga/django-oscar,makielab/django-oscar,marcoantoniooliveira/labweb,nfletton/django-oscar,manevant/django-oscar,nickpack/django-oscar,lijoantony/django-oscar,taedori81/django-oscar,sonofatailor/django-oscar,ahmetdaglarbas/e-commerce,kapari/django-oscar,ka7eh/django-oscar,saadatqadri/django-oscar,bnprk/django-oscar,solarissmoke/django-oscar,john-parton/django-oscar,solarissmoke/django-oscar,QLGu/django-oscar,kapt/django-oscar,john-parton/django-oscar,WillisXChen/django-oscar,jlmadurga/django-oscar,elliotthill/django-oscar,pdonadeo/django-oscar,pasqualguerrero/django-oscar,amirrpp/django-oscar,nickpack/django-oscar,bschuon/django-oscar,kapari/django-oscar,sasha0/django-oscar,MatthewWilkes/django-oscar,Bogh/django-oscar,nfletton/django-oscar,pasqualguerrero/django-oscar,dongguangming/django-oscar,amirrpp/django-oscar,saadatqadri/django-oscar,josesanch/django-oscar,QLGu/django-oscar,monikasulik/django-oscar,ademuk/django-oscar,spartonia/django-oscar,jlmadurga/django-oscar,jinnykoo/christmas,ka7eh/django-oscar,rocopartners/django-oscar,sonofatailor/django-oscar,eddiep1101/django-oscar,QLGu/django-oscar,jmt4/django-oscar,adamend/django-oscar,adamend/django-oscar,eddiep1101/django-oscar,bnprk/django-oscar,jinnykoo/wuyisj,WillisXChen/django-oscar,amirrpp/django-oscar,eddiep1101/django-oscar,vovanbo/django-oscar,bschuon/django-oscar,kapt/django-oscar,mexeniz/django-oscar,WillisXChen/django-oscar,ka7eh/django-oscar,thechampanurag/django-oscar,pdonadeo/django-oscar,jmt4/django-oscar,django-oscar/django-oscar
|
315b581b9b0438389c7f4eb651d2893b805a2369
|
translit.py
|
translit.py
|
class Transliterator(object):
def __init__(self, mapping, invert=False):
self.mapping = [
(v, k) if invert else (k, v)
for k, v in mapping.items()
]
self._rules = sorted(
self.mapping,
key=lambda item: len(item[0]),
reverse=True,
)
@property
def rules(self):
for r in self._rules:
yield r
# Handle the case when one source upper char is represented by
# several latin chars, all uppercase. i.e. "CH" instead of "Ch"
k, v = r
if len(k) > 1 and k[0].isupper():
yield (k.upper(), v.upper())
def convert(self, input_string):
"""Transliterate input string."""
for (source_char, translit_char) in self.rules:
input_string = input_string.replace(source_char, translit_char)
return input_string
|
class Transliterator(object):
def __init__(self, mapping, invert=False):
self.mapping = [
(v, k) if invert else (k, v)
for k, v in mapping.items()
]
self._rules = sorted(
self.mapping,
key=lambda item: len(item[0]),
reverse=True,
)
@property
def rules(self):
for r in self._rules:
k, v = r
if len(k) == 0:
continue # for case when char is removed and mapping inverted
yield r
# Handle the case when one source upper char is represented by
# several latin chars, all uppercase. i.e. "CH" instead of "Ch"
if len(k) > 1 and k[0].isupper():
yield (k.upper(), v.upper())
def convert(self, input_string):
"""Transliterate input string."""
for (source_char, translit_char) in self.rules:
input_string = input_string.replace(source_char, translit_char)
return input_string
|
Handle case when char is mapped to empty (removed) and table is inverted
|
Handle case when char is mapped to empty (removed) and table is inverted
|
Python
|
mit
|
malexer/SublimeTranslit
|
6f8f449316a71dd284d2661d206d88d35c01ea54
|
TrevorNet/tests/test_idx.py
|
TrevorNet/tests/test_idx.py
|
from .. import idx
import os
def test__find_depth():
yield check__find_depth, 9, 0
yield check__find_depth, [1, 2], 1
yield check__find_depth, [[1, 2], [3, 6, 2]], 2
yield check__find_depth, [[[1,2], [2]]], 3
def check__find_depth(lst, i):
assert idx._find_dimensions(lst) == i
# these two are equivalent according to the format on http://yann.lecun.com/exdb/mnist/
_somelist = [[1, 2], [3, 4]]
_somebytes = '\x00\x00\x0C\x02' + '\x01\x02\x03\x04'
_testfolder = os.path.dirname(os.path.realpath(__file__))
_somepath = os.path.join(_testfolder, 'test_idx_file')
def test_list_to_idx():
idx.list_to_idx(_somelist, _somepath, 'i')
with open(_somepath, 'rb') as f:
data = f.read()
os.remove(_somepath)
assert data == _somebytes
def test_idx_to_list():
with open(_somepath, 'wb') as f:
f.write(_somebytes)
lst = idx.idx_to_list(_somepath)
os.remove(_somepath)
assert lst == _somelist
|
from .. import idx
import os
def test__count_dimensions():
yield check__count_dimensions, 9, 0
yield check__count_dimensions, [1, 2], 1
yield check__count_dimensions, [[1, 2], [3, 6, 2]], 2
yield check__count_dimensions, [[[1,2], [2]]], 3
def check__count_dimensions(lst, i):
assert idx._count_dimensions(lst) == i
# these two are equivalent according to the format on http://yann.lecun.com/exdb/mnist/
_somelist = [[1, 2], [3, 4]]
_somebytes = b'\x00\x00\x0C\x02' + b'\x01\x02\x03\x04'
def test_list_to_idx():
data = idx.list_to_idx(_somelist, 'i')
assert data == _somebytes
def test_idx_to_list():
lst = idx.idx_to_list(_somebytes)
assert lst == _somelist
|
Update for python 3 and new idx design
|
Update for python 3 and new idx design
idx no longer writes to files, it only processes bytes
|
Python
|
mit
|
tmerr/trevornet
|
564075cbb66c6e79a6225d7f678aea804075b966
|
api/urls.py
|
api/urls.py
|
from django.conf.urls import patterns, include, url
from django.views.generic import TemplateView
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'fbxnano.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url('^status$', TemplateView.as_view(template_name='api/status.html'), name='status'),
)
|
from django.conf.urls import patterns, include, url
from django.views.generic import TemplateView
from .views import StatusView
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'fbxnano.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url('^status$', StatusView.as_view(), name='status'),
)
|
Switch from generic TemplateView to new StatusView
|
Switch from generic TemplateView to new StatusView
|
Python
|
mit
|
Kromey/fbxnano,Kromey/akwriters,Kromey/akwriters,Kromey/akwriters,Kromey/akwriters,Kromey/fbxnano,Kromey/fbxnano,Kromey/fbxnano
|
e615e2ebf3f364ba093c48d6fb0c988f0b97bc13
|
nyuki/workflow/tasks/__init__.py
|
nyuki/workflow/tasks/__init__.py
|
from .factory import FactoryTask
from .report import ReportTask
from .sleep import SleepTask
# Generic schema to reference a task ID
TASKID_SCHEMA = {
'type': 'string',
'description': 'task_id'
}
|
from .factory import FactoryTask
from .report import ReportTask
from .sleep import SleepTask
# Generic schema to reference a task ID
TASKID_SCHEMA = {
'type': 'string',
'description': 'task_id',
'maxLength': 128
}
|
Add maxlength to taskid schema
|
Add maxlength to taskid schema
|
Python
|
apache-2.0
|
gdraynz/nyuki,optiflows/nyuki,gdraynz/nyuki,optiflows/nyuki
|
fe4ce6dfa26c60747b6024fa9f6d991aa3b95614
|
scripts/codegen_driverwrappers/generate_driver_wrappers.py
|
scripts/codegen_driverwrappers/generate_driver_wrappers.py
|
#!/usr/bin/env python3
import sys
import json
import os
import jinja2
def render(tpl_path):
path, filename = os.path.split(tpl_path)
return jinja2.Environment(
loader=jinja2.FileSystemLoader(path or './')
).get_template(filename).render()
n = len(sys.argv)
if ( n != 3 ):
sys.exit("The template file name and output file name are expected as arguments")
# set template file name, output file name
driver_wrapper_template_filename = sys.argv[1]
driver_wrapper_output_filename = sys.argv[2]
# render the template
result = render(driver_wrapper_template_filename)
# write output to file
outFile = open(driver_wrapper_output_filename,"w")
outFile.write(result)
outFile.close()
|
#!/usr/bin/env python3
import sys
import json
import os
import jinja2
def render(tpl_path):
path, filename = os.path.split(tpl_path)
return jinja2.Environment(
loader=jinja2.FileSystemLoader(path or './'),
keep_trailing_newline=True,
).get_template(filename).render()
n = len(sys.argv)
if ( n != 3 ):
sys.exit("The template file name and output file name are expected as arguments")
# set template file name, output file name
driver_wrapper_template_filename = sys.argv[1]
driver_wrapper_output_filename = sys.argv[2]
# render the template
result = render(driver_wrapper_template_filename)
# write output to file
outFile = open(driver_wrapper_output_filename,"w")
outFile.write(result)
outFile.close()
|
Fix trailing newline getting dropped
|
Fix trailing newline getting dropped
Signed-off-by: Gilles Peskine <f805f64266d288fc5467baa7be6cd0ff366f477b@arm.com>
|
Python
|
apache-2.0
|
Mbed-TLS/mbedtls,NXPmicro/mbedtls,NXPmicro/mbedtls,Mbed-TLS/mbedtls,NXPmicro/mbedtls,NXPmicro/mbedtls,ARMmbed/mbedtls,Mbed-TLS/mbedtls,ARMmbed/mbedtls,ARMmbed/mbedtls,Mbed-TLS/mbedtls,ARMmbed/mbedtls
|
c264e4b19505bfb0ccebc1551c7b82e96b6a2882
|
amqpy/tests/test_version.py
|
amqpy/tests/test_version.py
|
class TestVersion:
def test_version_is_consistent(self):
from .. import VERSION
with open('README.rst') as f:
readme = f.read().split('\n')
version_list = readme[3].split(':')[2].strip().split('.')
version_list = [int(i) for i in version_list]
readme_version = tuple(version_list)
assert VERSION == readme_version
|
import re
def get_field(doc: str, name: str):
match = re.search(':{}: (.*)$'.format(name), doc, re.IGNORECASE | re.MULTILINE)
if match:
return match.group(1).strip()
class TestVersion:
def test_version_is_consistent(self):
from .. import VERSION
with open('README.rst') as f:
readme = f.read()
version = get_field(readme, 'version')
version = version.split('.')
version = [int(i) for i in version]
version = tuple(version)
assert VERSION == version
|
Clean up test for version number
|
Clean up test for version number
A new function is implemented to cleanly extract the version field from the
README.rst field list.
|
Python
|
mit
|
veegee/amqpy,gst/amqpy
|
a7830d85c6966732e46da63903c04234d8d16c39
|
admin/nodes/serializers.py
|
admin/nodes/serializers.py
|
import json
from website.util.permissions import reduce_permissions
from admin.users.serializers import serialize_simple_node
def serialize_node(node):
embargo = node.embargo
if embargo is not None:
embargo = node.embargo.end_date
return {
'id': node._id,
'title': node.title,
'public': node.is_public,
'parent': node.parent_id,
'root': node.root._id,
'is_registration': node.is_registration,
'date_created': node.date_created,
'withdrawn': node.is_retracted,
'embargo': embargo,
'contributors': [serialize_simple_user_and_node_permissions(node, user) for user in node.contributors],
'children': map(serialize_simple_node, node.nodes),
'deleted': node.is_deleted,
'pending_registration': node.is_pending_registration,
'creator': node.creator._id,
'spam_status': node.spam_status,
'spam_pro_tip': node.spam_pro_tip,
'spam_data': json.dumps(node.spam_data, indent=4),
'is_public': node.is_public,
}
def serialize_simple_user_and_node_permissions(node, user):
return {
'id': user._id,
'name': user.fullname,
'permission': reduce_permissions(node.get_permissions(user))
}
|
import json
from website.util.permissions import reduce_permissions
from admin.users.serializers import serialize_simple_node
def serialize_node(node):
embargo = node.embargo
if embargo is not None:
embargo = node.embargo.end_date
return {
'id': node._id,
'title': node.title,
'public': node.is_public,
'parent': node.parent_id,
'root': node.root._id,
'is_registration': node.is_registration,
'date_created': node.date_created,
'withdrawn': node.is_retracted,
'embargo': embargo,
'contributors': [serialize_simple_user_and_node_permissions(node, user) for user in node.contributors],
'children': map(serialize_simple_node, node.nodes),
'deleted': node.is_deleted,
'pending_registration': node.is_pending_registration,
'registered_date': node.registered_date,
'creator': node.creator._id,
'spam_status': node.spam_status,
'spam_pro_tip': node.spam_pro_tip,
'spam_data': json.dumps(node.spam_data, indent=4),
'is_public': node.is_public,
}
def serialize_simple_user_and_node_permissions(node, user):
return {
'id': user._id,
'name': user.fullname,
'permission': reduce_permissions(node.get_permissions(user))
}
|
Add date_registered to node serializer
|
Add date_registered to node serializer
[#OSF-7230]
|
Python
|
apache-2.0
|
mattclark/osf.io,laurenrevere/osf.io,brianjgeiger/osf.io,saradbowman/osf.io,mattclark/osf.io,caseyrollins/osf.io,chennan47/osf.io,adlius/osf.io,leb2dg/osf.io,Johnetordoff/osf.io,cslzchen/osf.io,brianjgeiger/osf.io,hmoco/osf.io,CenterForOpenScience/osf.io,adlius/osf.io,chennan47/osf.io,hmoco/osf.io,caneruguz/osf.io,mfraezz/osf.io,caneruguz/osf.io,cslzchen/osf.io,sloria/osf.io,caneruguz/osf.io,felliott/osf.io,Nesiehr/osf.io,icereval/osf.io,mattclark/osf.io,binoculars/osf.io,aaxelb/osf.io,cwisecarver/osf.io,cwisecarver/osf.io,leb2dg/osf.io,Johnetordoff/osf.io,HalcyonChimera/osf.io,erinspace/osf.io,icereval/osf.io,sloria/osf.io,chennan47/osf.io,pattisdr/osf.io,HalcyonChimera/osf.io,Nesiehr/osf.io,chrisseto/osf.io,TomBaxter/osf.io,CenterForOpenScience/osf.io,leb2dg/osf.io,brianjgeiger/osf.io,baylee-d/osf.io,erinspace/osf.io,caseyrollins/osf.io,HalcyonChimera/osf.io,CenterForOpenScience/osf.io,pattisdr/osf.io,aaxelb/osf.io,binoculars/osf.io,crcresearch/osf.io,felliott/osf.io,cwisecarver/osf.io,Nesiehr/osf.io,TomBaxter/osf.io,baylee-d/osf.io,caneruguz/osf.io,caseyrollins/osf.io,adlius/osf.io,chrisseto/osf.io,binoculars/osf.io,sloria/osf.io,HalcyonChimera/osf.io,Johnetordoff/osf.io,leb2dg/osf.io,laurenrevere/osf.io,felliott/osf.io,mfraezz/osf.io,cslzchen/osf.io,hmoco/osf.io,Nesiehr/osf.io,mfraezz/osf.io,crcresearch/osf.io,aaxelb/osf.io,chrisseto/osf.io,crcresearch/osf.io,cwisecarver/osf.io,cslzchen/osf.io,icereval/osf.io,felliott/osf.io,adlius/osf.io,hmoco/osf.io,CenterForOpenScience/osf.io,baylee-d/osf.io,erinspace/osf.io,saradbowman/osf.io,TomBaxter/osf.io,Johnetordoff/osf.io,aaxelb/osf.io,brianjgeiger/osf.io,mfraezz/osf.io,chrisseto/osf.io,laurenrevere/osf.io,pattisdr/osf.io
|
f625cac0a49bafc96403f5b34c2e138f8d2cfbea
|
dev/lint.py
|
dev/lint.py
|
# coding: utf-8
from __future__ import unicode_literals, division, absolute_import, print_function
import os
from flake8.engine import get_style_guide
cur_dir = os.path.dirname(__file__)
config_file = os.path.join(cur_dir, '..', 'tox.ini')
def run():
"""
Runs flake8 lint
:return:
A bool - if flake8 did not find any errors
"""
print('Running flake8')
flake8_style = get_style_guide(config_file=config_file)
paths = []
for root, _, filenames in os.walk('asn1crypto'):
for filename in filenames:
if not filename.endswith('.py'):
continue
paths.append(os.path.join(root, filename))
report = flake8_style.check_files(paths)
success = report.total_errors == 0
if success:
print('OK')
return success
|
# coding: utf-8
from __future__ import unicode_literals, division, absolute_import, print_function
import os
import flake8
if flake8.__version_info__ < (3,):
from flake8.engine import get_style_guide
else:
from flake8.api.legacy import get_style_guide
cur_dir = os.path.dirname(__file__)
config_file = os.path.join(cur_dir, '..', 'tox.ini')
def run():
"""
Runs flake8 lint
:return:
A bool - if flake8 did not find any errors
"""
print('Running flake8')
flake8_style = get_style_guide(config_file=config_file)
paths = []
for root, _, filenames in os.walk('asn1crypto'):
for filename in filenames:
if not filename.endswith('.py'):
continue
paths.append(os.path.join(root, filename))
report = flake8_style.check_files(paths)
success = report.total_errors == 0
if success:
print('OK')
return success
|
Add support for flake8 3.0
|
Add support for flake8 3.0
|
Python
|
mit
|
wbond/asn1crypto
|
573718a17e5e2d3fe23b1c8cd128a9b46d6076e6
|
example-theme.py
|
example-theme.py
|
# Supported 16 color values:
# 'h0' (color number 0) through 'h15' (color number 15)
# or
# 'default' (use the terminal's default foreground),
# 'black', 'dark red', 'dark green', 'brown', 'dark blue',
# 'dark magenta', 'dark cyan', 'light gray', 'dark gray',
# 'light red', 'light green', 'yellow', 'light blue',
# 'light magenta', 'light cyan', 'white'
#
# Supported 256 color values:
# 'h0' (color number 0) through 'h255' (color number 255)
#
# 256 color chart: http://en.wikipedia.org/wiki/File:Xterm_color_chart.png
#
# "setting_name": (foreground_color, background_color),
palette.update({
"source": (add_setting("black", "underline"), "dark green"),
"comment": ("h250", "default")
})
|
# Supported 16 color values:
# 'h0' (color number 0) through 'h15' (color number 15)
# or
# 'default' (use the terminal's default foreground),
# 'black', 'dark red', 'dark green', 'brown', 'dark blue',
# 'dark magenta', 'dark cyan', 'light gray', 'dark gray',
# 'light red', 'light green', 'yellow', 'light blue',
# 'light magenta', 'light cyan', 'white'
#
# Supported 256 color values:
# 'h0' (color number 0) through 'h255' (color number 255)
#
# 256 color chart: http://en.wikipedia.org/wiki/File:Xterm_color_chart.png
#
# "setting_name": (foreground_color, background_color),
# See this URL to see what keys there are:
# https://github.com/inducer/pudb/blob/master/pudb/theme.py
palette.update({
"source": (add_setting("black", "underline"), "dark green"),
"comment": ("h250", "default")
})
|
Add link to defined colors to example theme
|
Add link to defined colors to example theme
|
Python
|
mit
|
amigrave/pudb,albfan/pudb,amigrave/pudb,albfan/pudb
|
b50b7143185131a81e84f0659ff6405317f7d36f
|
resolwe/flow/execution_engines/base.py
|
resolwe/flow/execution_engines/base.py
|
"""Workflow execution engines."""
from resolwe.flow.engine import BaseEngine
class BaseExecutionEngine(BaseEngine):
"""A workflow execution engine."""
def evaluate(self, data):
"""Return the code needed to compute a given Data object."""
raise NotImplementedError
def get_expression_engine(self, name):
"""Return an expression engine by its name."""
return self.manager.get_expression_engine(name)
def get_output_schema(self, process):
"""Return any additional output schema for the process."""
return []
def discover_process(self, path):
"""Perform process discovery in given path.
This method will be called during process registration and
should return a list of dictionaries with discovered process
schemas.
"""
return []
def prepare_runtime(self, runtime_dir, data):
"""Prepare runtime directory.
This method should return a dictionary of volume maps, where
keys are files or directories relative the the runtime directory
and values are paths under which these should be made available
to the executing program. All volumes will be read-only.
"""
|
"""Workflow execution engines."""
from resolwe.flow.engine import BaseEngine
class BaseExecutionEngine(BaseEngine):
"""A workflow execution engine."""
def evaluate(self, data):
"""Return the code needed to compute a given Data object."""
raise NotImplementedError
def get_expression_engine(self, name):
"""Return an expression engine by its name."""
return self.manager.get_expression_engine(name)
def get_output_schema(self, process):
"""Return any additional output schema for the process."""
return []
def discover_process(self, path):
"""Perform process discovery in given path.
This method will be called during process registration and
should return a list of dictionaries with discovered process
schemas.
"""
return []
def prepare_runtime(self, runtime_dir, data):
"""Prepare runtime directory.
This method should return a dictionary of volume maps, where
keys are files or directories relative the the runtime directory
and values are paths under which these should be made available
to the executing program. All volumes will be read-only.
"""
return {}
|
Return empty dictionary instead of None
|
Return empty dictionary instead of None
|
Python
|
apache-2.0
|
genialis/resolwe,genialis/resolwe
|
b62f52a30404901ff3ffa7af90a3f1bdd7d05401
|
project/hhlcallback/utils.py
|
project/hhlcallback/utils.py
|
# -*- coding: utf-8 -*-
import environ
env = environ.Env()
HOLVI_CNC = False
def get_holvi_singleton():
global HOLVI_CNC
if HOLVI_CNC:
return HOLVI_CNC
holvi_pool = env('HOLVI_POOL', default=None)
holvi_key = env('HOLVI_APIKEY', default=None)
if not holvi_pool or not holvi_key:
return False
import holviapi
HOLVI_CNC = holviapi.Connection(holvi_pool, holvi_key)
return HOLVI_CNC
|
# -*- coding: utf-8 -*-
import holviapi.utils
def get_nordea_payment_reference(member_id, number):
base = member_id + 1000
return holviapi.utils.int2fin_reference(int("%s%s" % (base, number)))
|
Remove copy-pasted code, add helper for making legacy reference number for payments
|
Remove copy-pasted code, add helper for making legacy reference number for payments
|
Python
|
mit
|
HelsinkiHacklab/asylum,HelsinkiHacklab/asylum,HelsinkiHacklab/asylum,HelsinkiHacklab/asylum
|
6f30aed2b5f157bb22c8761a92464302ec5d8911
|
DebianChangesBot/utils/__init__.py
|
DebianChangesBot/utils/__init__.py
|
# -*- coding: utf-8 -*-
import email.quoprimime
def quoted_printable(val):
try:
if type(val) is str:
return email.quoprimime.header_decode(val)
else:
return unicode(email.quoprimime.header_decode(str(val)), 'utf-8')
except Exception, e:
# We ignore errors here. Most of these originate from a spam
# report adding a synopsis of a message with broken encodings.
pass
return val
from parse_mail import parse_mail
|
# -*- coding: utf-8 -*-
import email
import re
def header_decode(s):
def unquote_match(match):
s = match.group(0)
return chr(int(s[1:3], 16))
s = s.replace('_', ' ')
return re.sub(r'=\w{2}', unquote_match, s)
def quoted_printable(val):
try:
if type(val) is str:
save = header_decode(val)
val = ' '.join([chunk.decode(encoding or 'ascii', 'replace') for chunk, encoding in
email.Header.decode_header(val)])
if len(val) > len(save):
val = unicode(save, 'utf-8', 'replace')
else:
return unicode(email.quoprimime.header_decode(str(val)), 'utf-8', 'replace')
except Exception, e:
# We ignore errors here. Most of these originate from a spam
# report adding a synopsis of a message with broken encodings.
pass
return val
from parse_mail import parse_mail
|
Update header_decode to handle bare and non-bare quoted-printable chars
|
Update header_decode to handle bare and non-bare quoted-printable chars
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@chris-lamb.co.uk>
|
Python
|
agpl-3.0
|
xtaran/debian-devel-changes-bot,xtaran/debian-devel-changes-bot,lamby/debian-devel-changes-bot,lamby/debian-devel-changes-bot,sebastinas/debian-devel-changes-bot,lamby/debian-devel-changes-bot
|
b5b17c5152e969ed4e629a5df8dd296cde164f9b
|
polymer_states/__init__.py
|
polymer_states/__init__.py
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Link states
UP, DOWN = (0, 1), (0, -1)
LEFT, RIGHT = (-1, 0), (1, 0)
SLACK = (0, 0)
|
Add link states to polymer_states
|
Add link states to polymer_states
|
Python
|
mpl-2.0
|
szabba/applied-sims
|
656c0a9b91ee6f6f3f9811b16ab75dc8003402ad
|
altair/examples/line_chart_with_generator.py
|
altair/examples/line_chart_with_generator.py
|
"""
Line Chart with Sequence Generator
----------------------------------
This examples shows how to create multiple lines using the sequence generator.
"""
# category: line charts
import altair as alt
source = alt.sequence(start=0, stop=12.7, step=0.1, as_='x')
alt.Chart(source).mark_line().transform_calculate(
sin='sin(datum.x)'
).transform_calculate(
cos='cos(datum.x)'
).transform_fold(
['sin', 'cos']
).encode(
x='x:Q',
y='value:Q',
color='key:N'
)
|
"""
Line Chart with Sequence Generator
----------------------------------
This examples shows how to create multiple lines using the sequence generator.
"""
# category: line charts
import altair as alt
source = alt.sequence(start=0, stop=12.7, step=0.1, as_='x')
alt.Chart(source).mark_line().transform_calculate(
sin='sin(datum.x)',
cos='cos(datum.x)'
).transform_fold(
['sin', 'cos']
).encode(
x='x:Q',
y='value:Q',
color='key:N'
)
|
Modify generator example to use single calculation transform
|
DOC: Modify generator example to use single calculation transform
|
Python
|
bsd-3-clause
|
jakevdp/altair,altair-viz/altair
|
7319ac2eb5d31b14c731371a82102c90d8ec3979
|
tests/test_reflection_views.py
|
tests/test_reflection_views.py
|
from sqlalchemy import MetaData, Table, inspect
from sqlalchemy.schema import CreateTable
from rs_sqla_test_utils.utils import clean, compile_query
def table_to_ddl(engine, table):
return str(CreateTable(table)
.compile(engine))
def test_view_reflection(redshift_engine):
table_ddl = "CREATE TABLE my_table (col1 INTEGER, col2 INTEGER)"
view_query = "SELECT my_table.col1, my_table.col2 FROM my_table"
view_ddl = "CREATE VIEW my_view AS %s" % view_query
conn = redshift_engine.connect()
conn.execute(table_ddl)
conn.execute(view_ddl)
insp = inspect(redshift_engine)
view_definition = insp.get_view_definition('my_view')
assert(clean(compile_query(view_definition)) == clean(view_query))
view = Table('my_view', MetaData(),
autoload=True, autoload_with=redshift_engine)
assert(len(view.columns) == 2)
|
from sqlalchemy import MetaData, Table, inspect
from sqlalchemy.schema import CreateTable
from rs_sqla_test_utils.utils import clean, compile_query
def table_to_ddl(engine, table):
return str(CreateTable(table)
.compile(engine))
def test_view_reflection(redshift_engine):
table_ddl = "CREATE TABLE my_table (col1 INTEGER, col2 INTEGER)"
view_query = "SELECT my_table.col1, my_table.col2 FROM my_table"
view_ddl = "CREATE VIEW my_view AS %s" % view_query
conn = redshift_engine.connect()
conn.execute(table_ddl)
conn.execute(view_ddl)
insp = inspect(redshift_engine)
view_definition = insp.get_view_definition('my_view')
assert(clean(compile_query(view_definition)) == clean(view_query))
view = Table('my_view', MetaData(),
autoload=True, autoload_with=redshift_engine)
assert(len(view.columns) == 2)
def test_late_binding_view_reflection(redshift_engine):
table_ddl = "CREATE TABLE my_table (col1 INTEGER, col2 INTEGER)"
view_query = "SELECT my_table.col1, my_table.col2 FROM public.my_table"
view_ddl = ("CREATE VIEW my_late_view AS "
"%s WITH NO SCHEMA BINDING" % view_query)
conn = redshift_engine.connect()
conn.execute(table_ddl)
conn.execute(view_ddl)
insp = inspect(redshift_engine)
view_definition = insp.get_view_definition('my_late_view')
# For some reason, Redshift returns the entire DDL for late binding views.
assert(clean(compile_query(view_definition)) == clean(view_ddl))
view = Table('my_late_view', MetaData(),
autoload=True, autoload_with=redshift_engine)
assert(len(view.columns) == 2)
|
Add test for late-binding views
|
Add test for late-binding views
|
Python
|
mit
|
sqlalchemy-redshift/sqlalchemy-redshift,sqlalchemy-redshift/sqlalchemy-redshift,graingert/redshift_sqlalchemy
|
e051c915d72b76a189c16de6ff82bcebdab9f881
|
caffe2/python/layers/__init__.py
|
caffe2/python/layers/__init__.py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from importlib import import_module
import pkgutil
import sys
import inspect
from . import layers
def import_recursive(package, clsmembers):
"""
Takes a package and imports all modules underneath it
"""
pkg_dir = package.__path__
module_location = package.__name__
for (_module_loader, name, ispkg) in pkgutil.iter_modules(pkg_dir):
module_name = "{}.{}".format(module_location, name) # Module/package
module = import_module(module_name)
clsmembers += [cls[1] for cls in inspect.getmembers(module, inspect.isclass)]
if ispkg:
import_recursive(module, clsmembers)
clsmembers = []
import_recursive(sys.modules[__name__], clsmembers)
for cls in clsmembers:
if issubclass(cls, layers.ModelLayer) and cls is not layers.ModelLayer:
layers.register_layer(cls.__name__, cls)
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from importlib import import_module
import pkgutil
import sys
from . import layers
def import_recursive(package):
"""
Takes a package and imports all modules underneath it
"""
pkg_dir = package.__path__
module_location = package.__name__
for (_module_loader, name, ispkg) in pkgutil.iter_modules(pkg_dir):
module_name = "{}.{}".format(module_location, name) # Module/package
module = import_module(module_name)
if ispkg:
import_recursive(module)
def find_subclasses_recursively(base_cls, sub_cls):
cur_sub_cls = base_cls.__subclasses__()
sub_cls.update(cur_sub_cls)
for cls in cur_sub_cls:
find_subclasses_recursively(cls, sub_cls)
import_recursive(sys.modules[__name__])
model_layer_subcls = set()
find_subclasses_recursively(layers.ModelLayer, model_layer_subcls)
for cls in list(model_layer_subcls):
layers.register_layer(cls.__name__, cls)
|
Allow to import subclasses of layers
|
Allow to import subclasses of layers
Summary:
We want it to be able to register children of layers who
are not direct children of ModelLayer.
This requires us to find subclasses of ModelLayer recursively.
Reviewed By: kittipatv, kennyhorror
Differential Revision: D5397120
fbshipit-source-id: cb1e03d72e3bedb960b1b865877a76e413218a71
|
Python
|
apache-2.0
|
Yangqing/caffe2,xzturn/caffe2,sf-wind/caffe2,pietern/caffe2,pietern/caffe2,davinwang/caffe2,sf-wind/caffe2,davinwang/caffe2,sf-wind/caffe2,caffe2/caffe2,Yangqing/caffe2,bwasti/caffe2,Yangqing/caffe2,bwasti/caffe2,xzturn/caffe2,pietern/caffe2,davinwang/caffe2,bwasti/caffe2,bwasti/caffe2,sf-wind/caffe2,sf-wind/caffe2,bwasti/caffe2,pietern/caffe2,Yangqing/caffe2,davinwang/caffe2,xzturn/caffe2,Yangqing/caffe2,xzturn/caffe2,davinwang/caffe2,pietern/caffe2,xzturn/caffe2
|
b99770a7c55cd6951df872793a54bfa260b145f9
|
basics/test/module-test.py
|
basics/test/module-test.py
|
from unittest import TestCase
from basics import BaseCharacter
from basics import BaseAttachment
class ModuleTest(TestCase):
def test_character_attach_attachment(self):
character = BaseCharacter().save()
attachment = BaseAttachment().save()
# Attachment should not be among the character's attachments
self.assertNotIn(attachment.id, character.attachments())
# Attach the attachment
character.attach(attachment)
# Attachment should be among the character's attachments
self.assertIn(attachment.id, character.attachments())
def test_container_containment(self):
self.fail("Test unwritten")
|
from unittest import TestCase
from basics import BaseCharacter
from basics import BaseAttachment
from basics import BaseThing
class ModuleTest(TestCase):
def test_character_attach_attachment(self):
character = BaseCharacter().save()
attachment = BaseAttachment().save()
# Attachment should not be among the character's attachments
self.assertNotIn(attachment.id, character.attachments())
# Attach the attachment
character.attach(attachment)
# Attachment should be among the character's attachments
self.assertIn(attachment.id, character.attachments())
def test_container_containment(self):
thing_a = BaseThing().save()
thing_b = BaseThing().save()
# thing_b should not be among thing_a's stuff
self.assertNotIn(thing_b.id, thing_a.stuff())
# thing_b aint contained
self.assertIsNone(thing_b.container())
# Move thing_b into thing_a
thing_b.move_to(thing_a)
# thing_b should be among thing_a's stuff
self.assertIn(thing_b.id, thing_a.stuff())
# thing_b is contained by thing_a
self.assertEqual(thing_a, thing_b.container())
|
Write test for container containment.
|
Write test for container containment.
|
Python
|
apache-2.0
|
JASchilz/RoverMUD
|
b506b6796a8ed9e778f69ddc7718a8ea3b0f9e7a
|
flynn/__init__.py
|
flynn/__init__.py
|
# coding: utf-8
import flynn.decoder
import flynn.encoder
def dump(obj, fp):
return flynn.encoder.encode(fp, obj)
def dumps(obj):
return flynn.encoder.encode_str(obj)
def dumph(obj):
return "".join(hex(n)[2:].rjust(2, "0") for n in dumps(obj))
def load(s):
return flynn.decoder.decode(s)
def loads(s):
return flynn.decoder.decode(s)
def loadh(s):
return flynn.decoder.decode(s)
|
# coding: utf-8
import base64
import flynn.decoder
import flynn.encoder
__all__ = [
"decoder",
"encoder",
"dump",
"dumps",
"dumph",
"load",
"loads",
"loadh"
]
def dump(obj, fp):
return flynn.encoder.encode(fp, obj)
def dumps(obj):
return flynn.encoder.encode_str(obj)
def dumph(obj):
return base64.b16encode(dumps(obj)).decode("utf-8")
def load(s):
return flynn.decoder.decode(s)
def loads(s):
return flynn.decoder.decode(s)
def loadh(s):
return flynn.decoder.decode(s)
|
Use base64 module to convert between bytes and base16 string
|
Use base64 module to convert between bytes and base16 string
|
Python
|
mit
|
fritz0705/flynn
|
7b71425a4434ac2544340d651f52c0d87ff37132
|
web/impact/impact/v1/helpers/refund_code_helper.py
|
web/impact/impact/v1/helpers/refund_code_helper.py
|
# MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from impact.models import RefundCode
from impact.v1.helpers.model_helper import(
INTEGER_ARRAY_FIELD,
INTEGER_FIELD,
ModelHelper,
PK_FIELD,
STRING_FIELD,
)
PROGRAMS_FIELD = {
"json-schema": {
"type": "array",
"items": {"type": "string"},
},
"POST": {"required": False},
"PATCH": {"required": False},
}
REFUND_CODE_FIELDS = {
"id": PK_FIELD,
"issued_to": INTEGER_FIELD,
"created_at": STRING_FIELD,
"unique_code": STRING_FIELD,
"discount": INTEGER_FIELD,
"maximum_uses": INTEGER_FIELD,
"programs": INTEGER_ARRAY_FIELD,
}
class RefundCodeHelper(ModelHelper):
model = RefundCode
@classmethod
def fields(self):
return REFUND_CODE_FIELDS
@property
def issued_to(self):
return self.field_pk("issued_to")
@property
def programs(self):
if hasattr(self.subject, "programs"):
programs = self.subject.programs
if programs:
return [program.pk for program in programs.all()]
|
# MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from impact.models import RefundCode
from impact.v1.helpers.model_helper import(
BOOLEAN_FIELD,
INTEGER_ARRAY_FIELD,
INTEGER_FIELD,
ModelHelper,
PK_FIELD,
STRING_FIELD,
)
PROGRAMS_FIELD = {
"json-schema": {
"type": "array",
"items": {"type": "string"},
},
"POST": {"required": False},
"PATCH": {"required": False},
}
REFUND_CODE_FIELDS = {
"id": PK_FIELD,
"issued_to": INTEGER_FIELD,
"created_at": STRING_FIELD,
"unique_code": STRING_FIELD,
"discount": INTEGER_FIELD,
"maximum_uses": INTEGER_FIELD,
"programs": INTEGER_ARRAY_FIELD,
"notes": STRING_FIELD,
"internal": BOOLEAN_FIELD,
}
class RefundCodeHelper(ModelHelper):
model = RefundCode
@classmethod
def fields(self):
return REFUND_CODE_FIELDS
@property
def issued_to(self):
return self.field_pk("issued_to")
@property
def programs(self):
if hasattr(self.subject, "programs"):
programs = self.subject.programs
if programs:
return [program.pk for program in programs.all()]
|
Add Notes and Internal Fields
|
[AC-5291] Add Notes and Internal Fields
|
Python
|
mit
|
masschallenge/impact-api,masschallenge/impact-api,masschallenge/impact-api,masschallenge/impact-api
|
41fbd5b92ac04c3a4ca0e33204bb08b12a533052
|
ibmcnx/doc/DataSources.py
|
ibmcnx/doc/DataSources.py
|
######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = AdminConfig.getid( '"/Cell:' + AdminControl.getCell() + '/"' )
dbs = AdminConfig.list( 'DataSource', cell )
for db in dbs:
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 )
|
######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = "'/Cell:" + AdminControl.getCell() + "/'"
print cell
cellid = AdminConfig.getid( )
dbs = AdminConfig.list( 'DataSource', cellid )
for db in dbs:
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 )
|
Create script to save documentation to a file
|
4: Create script to save documentation to a file
Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/4
|
Python
|
apache-2.0
|
stoeps13/ibmcnx2,stoeps13/ibmcnx2
|
07c3c7e00a4c2733a3233ff483797c798451a87f
|
apps/predict/mixins.py
|
apps/predict/mixins.py
|
"""
Basic view mixins for predict views
"""
from django.utils.decorators import method_decorator
from django.contrib.auth.decorators import login_required
from .models import PredictDataset
class PredictMixin(object):
"""The baseline predict view"""
slug_field = 'md5'
@method_decorator(login_required)
def dispatch(self, request, *args, **kwargs):
"""Only allow a logged in users to view"""
return super(PredictMixin, self).dispatch(request, *args, **kwargs)
def get_queryset(self):
"""Limit queryset to the user's own predictions only"""
qs = PredictDataset.objects.all()
if 'slug' not in self.kwargs:
# Limit to my own predictions unless I have the md5
qs = qs.filter(user_id=self.request.user.pk)
return qs
|
"""
Basic view mixins for predict views
"""
from django.utils.decorators import method_decorator
from django.contrib.auth.decorators import login_required
from .models import PredictDataset
class PredictMixin(object):
"""The baseline predict view"""
slug_field = 'md5'
@method_decorator(login_required)
def dispatch(self, request, *args, **kwargs):
"""Only allow a logged in users to view"""
return super(PredictMixin, self).dispatch(request, *args, **kwargs)
def get_queryset(self):
"""Limit queryset to the user's own predictions only"""
qset = PredictDataset.objects.all()
if 'slug' not in self.kwargs:
# Limit to my own predictions unless I have the md5
qset = qset.filter(user_id=self.request.user.pk)
return qset.prefetch_related('strains', 'strains__piperun', 'strains__piperun__programs')
|
Improve prefetch speed in predict listing pages
|
Improve prefetch speed in predict listing pages
|
Python
|
agpl-3.0
|
IQSS/gentb-site,IQSS/gentb-site,IQSS/gentb-site,IQSS/gentb-site,IQSS/gentb-site,IQSS/gentb-site,IQSS/gentb-site,IQSS/gentb-site
|
324941bb4946cea19800fb1102035bd32e8028db
|
apps/profiles/views.py
|
apps/profiles/views.py
|
from django.views.generic import DetailView, UpdateView
from django.contrib.auth.views import redirect_to_login
from django.core.urlresolvers import reverse
from django.shortcuts import redirect
from braces.views import LoginRequiredMixin
from .models import User
class ProfileDetailView(DetailView):
'''
Displays the user profile information
'''
model = User
slug_field = 'username'
slug_url_kwarg = 'username'
def get(self, request, *args, **kwargs):
user = request.user
username = self.kwargs.get(self.slug_url_kwarg)
if user.is_authenticated() and not username:
return redirect('profile_detail', username=user.username)
elif not user.is_authenticated() and not username:
return redirect_to_login(reverse('profile_detail_me'))
return super(ProfileDetailView, self).get(request, *args, **kwargs)
class ProfileUpdateView(LoginRequiredMixin, UpdateView):
model = User
slug_field = 'username'
slug_url_kwarg = 'username'
|
from django.views.generic import DetailView, UpdateView
from django.contrib.auth.views import redirect_to_login
from django.core.urlresolvers import reverse
from django.shortcuts import redirect
from braces.views import LoginRequiredMixin
from .models import User
class ProfileDetailView(DetailView):
'''
Displays the user profile information
'''
queryset = User.objects.select_related('location', 'location__country')
slug_field = 'username'
slug_url_kwarg = 'username'
def get(self, request, *args, **kwargs):
user = request.user
username = self.kwargs.get(self.slug_url_kwarg)
if user.is_authenticated() and not username:
return redirect('profile_detail', username=user.username)
elif not user.is_authenticated() and not username:
return redirect_to_login(reverse('profile_detail_me'))
return super(ProfileDetailView, self).get(request, *args, **kwargs)
class ProfileUpdateView(LoginRequiredMixin, UpdateView):
model = User
slug_field = 'username'
slug_url_kwarg = 'username'
|
Use select_related in user profile detail view
|
Use select_related in user profile detail view
|
Python
|
mit
|
SoPR/horas,SoPR/horas,SoPR/horas,SoPR/horas
|
3e842228beba066000eac536635e7e9d4d87c8e2
|
instruments/Instrument.py
|
instruments/Instrument.py
|
from traits.api import HasTraits
import json
class Instrument(HasTraits):
"""
Main super-class for all instruments.
"""
def get_settings(self):
return self.__getstate__()
def set_settings(self, settings):
for key,value in settings.items():
setattr(self, key, value)
|
from traits.api import HasTraits, Bool
import json
class Instrument(HasTraits):
"""
Main super-class for all instruments.
"""
enabled = Bool(True, desc='Whether the unit is used/enabled.')
def get_settings(self):
return self.__getstate__()
def set_settings(self, settings):
for key,value in settings.items():
setattr(self, key, value)
|
Add enabled to top-level instrument class.
|
Add enabled to top-level instrument class.
|
Python
|
apache-2.0
|
Plourde-Research-Lab/PyQLab,rmcgurrin/PyQLab,calebjordan/PyQLab,BBN-Q/PyQLab
|
cfe594ec7576ba36e93762981067ad02176a585e
|
instruments/Instrument.py
|
instruments/Instrument.py
|
from traits.api import HasTraits
import json
class Instrument(HasTraits):
"""
Main super-class for all instruments.
"""
def get_settings(self):
return self.__getstate__()
def set_settings(self, settings):
for key,value in settings.items():
setattr(self, key, value)
|
from traits.api import HasTraits, Bool
import json
class Instrument(HasTraits):
"""
Main super-class for all instruments.
"""
enabled = Bool(True, desc='Whether the unit is used/enabled.')
def get_settings(self):
return self.__getstate__()
def set_settings(self, settings):
for key,value in settings.items():
setattr(self, key, value)
|
Add enabled to top-level instrument class.
|
Add enabled to top-level instrument class.
|
Python
|
apache-2.0
|
Plourde-Research-Lab/PyQLab,BBN-Q/PyQLab,calebjordan/PyQLab,rmcgurrin/PyQLab
|
8beb6ddd2e58d6a3e54ab297d490c6650fb85a9d
|
logya/generate.py
|
logya/generate.py
|
# -*- coding: utf-8 -*-
import os
import shutil
from logya.core import Logya
from logya.fs import copytree
from logya.writer import DocWriter
class Generate(Logya):
"""Generate a Web site to deploy from current directory as source."""
def __init__(self, **kwargs):
super(self.__class__, self).__init__(**kwargs)
self.init_env()
# Init writer before executing scripts, so they can use it.
self.writer = DocWriter(self.dir_deploy, self.template)
if not kwargs['keep']:
self.info('Remove existing deploy directory')
shutil.rmtree(self.dir_deploy, True)
self.info('Generating site in directory: {}'.format(self.dir_deploy))
if os.path.exists(self.dir_static):
self.info('Copy static files')
copytree(self.dir_static, self.dir_deploy)
self.info('Build document index')
self.build_index()
self.info('Write documents')
for doc in self.docs.values():
self.writer.write(doc, self.get_doc_template(doc))
self.info(
'Written {:d} documents to deploy directory'
.format(len(self.docs)))
self.info('Write index files')
self.write_index_files()
self.info(
'Written {:d} index files to deploy directory'
.format(len(self.index)))
|
# -*- coding: utf-8 -*-
import os
import shutil
from logya.core import Logya
from logya.fs import copytree
from logya.writer import DocWriter
class Generate(Logya):
"""Generate a Web site to deploy from current directory as source."""
def __init__(self, **kwargs):
super(self.__class__, self).__init__(**kwargs)
self.init_env()
self.writer = DocWriter(self.dir_deploy, self.template)
if not kwargs['keep']:
self.info('Remove existing deploy directory')
shutil.rmtree(self.dir_deploy, True)
self.info('Generate site in directory: {}'.format(self.dir_deploy))
if os.path.exists(self.dir_static):
self.info('Copy static files')
copytree(self.dir_static, self.dir_deploy)
self.build()
self.write()
def build(self):
self.info('Build document index')
self.build_index()
def write(self):
self.info('Write documents')
for doc in self.docs.values():
self.writer.write(doc, self.get_doc_template(doc))
self.info(
'Written {:d} documents to deploy directory'
.format(len(self.docs)))
self.info('Write index files')
self.write_index_files()
self.info(
'Written {:d} index files to deploy directory'
.format(len(self.index)))
|
Add build and write function to make it easy to subclass Generate and overwrite build step
|
Add build and write function to make it easy to subclass Generate and overwrite build step
|
Python
|
mit
|
elaOnMars/logya,elaOnMars/logya,elaOnMars/logya,yaph/logya,yaph/logya
|
9971e5424b998f45e26b9da8288f20d641885043
|
massa/__init__.py
|
massa/__init__.py
|
# -*- coding: utf-8 -*-
from flask import Flask, render_template, g
from flask.ext.appconfig import AppConfig
def create_app(configfile=None):
app = Flask('massa')
AppConfig(app, configfile)
@app.route('/')
def index():
return render_template('index.html')
from .container import build
sl = build(app.config)
from .api import bp
app.register_blueprint(bp, url_prefix='/api')
@app.before_request
def globals():
g.sl = sl
return app
|
# -*- coding: utf-8 -*-
from flask import Flask, render_template, g
from flask.ext.appconfig import AppConfig
from .container import build
from .api import bp as api
def create_app(configfile=None):
app = Flask('massa')
AppConfig(app, configfile)
@app.route('/')
def index():
return render_template('index.html')
sl = build(app.config)
app.register_blueprint(api, url_prefix='/api')
@app.before_request
def globals():
g.sl = sl
return app
|
Move import statements to the top.
|
Move import statements to the top.
|
Python
|
mit
|
jaapverloop/massa
|
12c97be97a8816720899531b932be99743b6d90d
|
rest_framework_plist/__init__.py
|
rest_framework_plist/__init__.py
|
# -*- coding: utf-8 -*-
from distutils import version
__version__ = '0.2.0'
version_info = version.StrictVersion(__version__).version
|
# -*- coding: utf-8 -*-
from distutils import version
__version__ = '0.2.0'
version_info = version.StrictVersion(__version__).version
from .parsers import PlistParser # NOQA
from .renderers import PlistRenderer # NOQA
|
Make parser and renderer available at package root
|
Make parser and renderer available at package root
|
Python
|
bsd-2-clause
|
lpomfrey/django-rest-framework-plist,pombredanne/django-rest-framework-plist
|
3f7371c796a420cc077cf79b210d401c77b77815
|
rest_framework/response.py
|
rest_framework/response.py
|
from django.core.handlers.wsgi import STATUS_CODE_TEXT
from django.template.response import SimpleTemplateResponse
class Response(SimpleTemplateResponse):
"""
An HttpResponse that allows it's data to be rendered into
arbitrary media types.
"""
def __init__(self, data=None, status=None, headers=None,
renderer=None, accepted_media_type=None):
"""
Alters the init arguments slightly.
For example, drop 'template_name', and instead use 'data'.
Setting 'renderer' and 'media_type' will typically be defered,
For example being set automatically by the `APIView`.
"""
super(Response, self).__init__(None, status=status)
self.data = data
self.headers = headers and headers[:] or []
self.renderer = renderer
self.accepted_media_type = accepted_media_type
@property
def rendered_content(self):
self['Content-Type'] = self.renderer.media_type
if self.data is None:
return self.renderer.render()
render_media_type = self.accepted_media_type or self.renderer.media_type
return self.renderer.render(self.data, render_media_type)
@property
def status_text(self):
"""
Returns reason text corresponding to our HTTP response status code.
Provided for convenience.
"""
return STATUS_CODE_TEXT.get(self.status_code, '')
|
from django.core.handlers.wsgi import STATUS_CODE_TEXT
from django.template.response import SimpleTemplateResponse
class Response(SimpleTemplateResponse):
"""
An HttpResponse that allows it's data to be rendered into
arbitrary media types.
"""
def __init__(self, data=None, status=None, headers=None,
renderer=None, accepted_media_type=None):
"""
Alters the init arguments slightly.
For example, drop 'template_name', and instead use 'data'.
Setting 'renderer' and 'media_type' will typically be defered,
For example being set automatically by the `APIView`.
"""
super(Response, self).__init__(None, status=status)
self.data = data
self.headers = headers and headers[:] or []
self.renderer = renderer
# Accepted media type is the portion of the request Accept header
# that the renderer satisfied. It could be '*/*', or somthing like
# 'application/json; indent=4'
#
# This is NOT the value that will be returned in the 'Content-Type'
# header, but we do need to know the value in case there are
# any specific parameters which affect the rendering process.
self.accepted_media_type = accepted_media_type
@property
def rendered_content(self):
self['Content-Type'] = self.renderer.media_type
if self.data is None:
return self.renderer.render()
render_media_type = self.accepted_media_type or self.renderer.media_type
return self.renderer.render(self.data, render_media_type)
@property
def status_text(self):
"""
Returns reason text corresponding to our HTTP response status code.
Provided for convenience.
"""
return STATUS_CODE_TEXT.get(self.status_code, '')
|
Tweak media_type -> accepted_media_type. Need to document, but marginally less confusing
|
Tweak media_type -> accepted_media_type. Need to document, but marginally less confusing
|
Python
|
bsd-2-clause
|
kylefox/django-rest-framework,cyberj/django-rest-framework,vstoykov/django-rest-framework,wedaly/django-rest-framework,canassa/django-rest-framework,tomchristie/django-rest-framework,linovia/django-rest-framework,cheif/django-rest-framework,nhorelik/django-rest-framework,jpulec/django-rest-framework,James1345/django-rest-framework,ashishfinoit/django-rest-framework,ticosax/django-rest-framework,rubendura/django-rest-framework,d0ugal/django-rest-framework,ashishfinoit/django-rest-framework,werthen/django-rest-framework,adambain-vokal/django-rest-framework,jpadilla/django-rest-framework,kgeorgy/django-rest-framework,ebsaral/django-rest-framework,jerryhebert/django-rest-framework,VishvajitP/django-rest-framework,edx/django-rest-framework,pombredanne/django-rest-framework,douwevandermeij/django-rest-framework,douwevandermeij/django-rest-framework,maryokhin/django-rest-framework,nryoung/django-rest-framework,jness/django-rest-framework,rafaelang/django-rest-framework,wzbozon/django-rest-framework,johnraz/django-rest-framework,ossanna16/django-rest-framework,maryokhin/django-rest-framework,VishvajitP/django-rest-framework,agconti/django-rest-framework,kennydude/django-rest-framework,brandoncazander/django-rest-framework,callorico/django-rest-framework,antonyc/django-rest-framework,alacritythief/django-rest-framework,wangpanjun/django-rest-framework,rhblind/django-rest-framework,iheitlager/django-rest-framework,bluedazzle/django-rest-framework,atombrella/django-rest-framework,gregmuellegger/django-rest-framework,paolopaolopaolo/django-rest-framework,elim/django-rest-framework,kgeorgy/django-rest-framework,nryoung/django-rest-framework,kezabelle/django-rest-framework,cheif/django-rest-framework,aericson/django-rest-framework,xiaotangyuan/django-rest-framework,tigeraniya/django-rest-framework,nhorelik/django-rest-framework,YBJAY00000/django-rest-framework,sheppard/django-rest-framework,jpulec/django-rest-framework,wangpanjun/django-rest-framework,justanr/django-rest-framework,agconti/django-rest-framework,hunter007/django-rest-framework,sbellem/django-rest-framework,canassa/django-rest-framework,abdulhaq-e/django-rest-framework,AlexandreProenca/django-rest-framework,elim/django-rest-framework,arpheno/django-rest-framework,werthen/django-rest-framework,potpath/django-rest-framework,damycra/django-rest-framework,delinhabit/django-rest-framework,ticosax/django-rest-framework,ticosax/django-rest-framework,rafaelang/django-rest-framework,HireAnEsquire/django-rest-framework,wzbozon/django-rest-framework,raphaelmerx/django-rest-framework,hnakamur/django-rest-framework,edx/django-rest-framework,buptlsl/django-rest-framework,yiyocx/django-rest-framework,potpath/django-rest-framework,wwj718/django-rest-framework,hunter007/django-rest-framework,jness/django-rest-framework,fishky/django-rest-framework,andriy-s/django-rest-framework,antonyc/django-rest-framework,ajaali/django-rest-framework,damycra/django-rest-framework,yiyocx/django-rest-framework,qsorix/django-rest-framework,buptlsl/django-rest-framework,abdulhaq-e/django-rest-framework,buptlsl/django-rest-framework,dmwyatt/django-rest-framework,yiyocx/django-rest-framework,aericson/django-rest-framework,jness/django-rest-framework,uruz/django-rest-framework,ambivalentno/django-rest-framework,dmwyatt/django-rest-framework,MJafarMashhadi/django-rest-framework,adambain-vokal/django-rest-framework,kylefox/django-rest-framework,thedrow/django-rest-framework-1,canassa/django-rest-framework,zeldalink0515/django-rest-framework,sehmaschine/django-rest-framework,paolopaolopaolo/django-rest-framework,aericson/django-rest-framework,agconti/django-rest-framework,nhorelik/django-rest-framework,xiaotangyuan/django-rest-framework,zeldalink0515/django-rest-framework,krinart/django-rest-framework,bluedazzle/django-rest-framework,rafaelcaricio/django-rest-framework,leeahoward/django-rest-framework,iheitlager/django-rest-framework,raphaelmerx/django-rest-framework,jpadilla/django-rest-framework,abdulhaq-e/django-rest-framework,hunter007/django-rest-framework,kennydude/django-rest-framework,davesque/django-rest-framework,iheitlager/django-rest-framework,ebsaral/django-rest-framework,ebsaral/django-rest-framework,akalipetis/django-rest-framework,tcroiset/django-rest-framework,wedaly/django-rest-framework,James1345/django-rest-framework,xiaotangyuan/django-rest-framework,sehmaschine/django-rest-framework,cyberj/django-rest-framework,mgaitan/django-rest-framework,tigeraniya/django-rest-framework,mgaitan/django-rest-framework,hnakamur/django-rest-framework,MJafarMashhadi/django-rest-framework,MJafarMashhadi/django-rest-framework,alacritythief/django-rest-framework,rafaelang/django-rest-framework,simudream/django-rest-framework,zeldalink0515/django-rest-framework,simudream/django-rest-framework,d0ugal/django-rest-framework,kylefox/django-rest-framework,ezheidtmann/django-rest-framework,ajaali/django-rest-framework,leeahoward/django-rest-framework,sbellem/django-rest-framework,waytai/django-rest-framework,rafaelcaricio/django-rest-framework,mgaitan/django-rest-framework,tomchristie/django-rest-framework,hnakamur/django-rest-framework,uploadcare/django-rest-framework,cheif/django-rest-framework,pombredanne/django-rest-framework,sheppard/django-rest-framework,wwj718/django-rest-framework,tcroiset/django-rest-framework,krinart/django-rest-framework,atombrella/django-rest-framework,lubomir/django-rest-framework,AlexandreProenca/django-rest-framework,brandoncazander/django-rest-framework,raphaelmerx/django-rest-framework,arpheno/django-rest-framework,delinhabit/django-rest-framework,brandoncazander/django-rest-framework,waytai/django-rest-framework,ajaali/django-rest-framework,gregmuellegger/django-rest-framework,leeahoward/django-rest-framework,paolopaolopaolo/django-rest-framework,HireAnEsquire/django-rest-framework,arpheno/django-rest-framework,jpadilla/django-rest-framework,jerryhebert/django-rest-framework,andriy-s/django-rest-framework,krinart/django-rest-framework,ezheidtmann/django-rest-framework,davesque/django-rest-framework,vstoykov/django-rest-framework,tomchristie/django-rest-framework,ezheidtmann/django-rest-framework,simudream/django-rest-framework,thedrow/django-rest-framework-1,ambivalentno/django-rest-framework,rubendura/django-rest-framework,adambain-vokal/django-rest-framework,justanr/django-rest-framework,johnraz/django-rest-framework,fishky/django-rest-framework,jpulec/django-rest-framework,kezabelle/django-rest-framework,d0ugal/django-rest-framework,ossanna16/django-rest-framework,wwj718/django-rest-framework,uploadcare/django-rest-framework,fishky/django-rest-framework,douwevandermeij/django-rest-framework,lubomir/django-rest-framework,YBJAY00000/django-rest-framework,linovia/django-rest-framework,lubomir/django-rest-framework,ashishfinoit/django-rest-framework,vstoykov/django-rest-framework,ossanna16/django-rest-framework,linovia/django-rest-framework,antonyc/django-rest-framework,wedaly/django-rest-framework,rhblind/django-rest-framework,sehmaschine/django-rest-framework,YBJAY00000/django-rest-framework,potpath/django-rest-framework,thedrow/django-rest-framework-1,delinhabit/django-rest-framework,VishvajitP/django-rest-framework,elim/django-rest-framework,jtiai/django-rest-framework,rafaelcaricio/django-rest-framework,sbellem/django-rest-framework,callorico/django-rest-framework,pombredanne/django-rest-framework,andriy-s/django-rest-framework,kgeorgy/django-rest-framework,sheppard/django-rest-framework,akalipetis/django-rest-framework,tigeraniya/django-rest-framework,hnarayanan/django-rest-framework,cyberj/django-rest-framework,atombrella/django-rest-framework,dmwyatt/django-rest-framework,HireAnEsquire/django-rest-framework,waytai/django-rest-framework,wangpanjun/django-rest-framework,damycra/django-rest-framework,ambivalentno/django-rest-framework,AlexandreProenca/django-rest-framework,nryoung/django-rest-framework,gregmuellegger/django-rest-framework,hnarayanan/django-rest-framework,johnraz/django-rest-framework,James1345/django-rest-framework,tcroiset/django-rest-framework,uruz/django-rest-framework,uploadcare/django-rest-framework,werthen/django-rest-framework,davesque/django-rest-framework,bluedazzle/django-rest-framework,qsorix/django-rest-framework,alacritythief/django-rest-framework,callorico/django-rest-framework,jerryhebert/django-rest-framework,jtiai/django-rest-framework,jtiai/django-rest-framework,rubendura/django-rest-framework,kennydude/django-rest-framework,qsorix/django-rest-framework,uruz/django-rest-framework,edx/django-rest-framework,justanr/django-rest-framework,akalipetis/django-rest-framework,rhblind/django-rest-framework,hnarayanan/django-rest-framework,wzbozon/django-rest-framework,kezabelle/django-rest-framework,maryokhin/django-rest-framework
|
eb763a7c7048b857d408825241ed3de6b68b88f6
|
1/sumofmultiplesof3and5.py
|
1/sumofmultiplesof3and5.py
|
# Project Euler - Problem 1
sum = 0
for i in xrange(1, 1001):
if i % 3 == 0 or i % 5 == 0:
sum = sum + i
print "The sum is: {}".format(sum)
|
# Project Euler - Problem 1
# If we list all the natural numbers below 10 that are multiples of 3 or 5,
# we get 3, 5, 6 and 9. The sum of these multiples is 23.
# Find the sum of all the multiples of 3 or 5 below 1000.
def main(limit):
sum = 0
for i in xrange(1, limit):
if i % 3 == 0 or i % 5 == 0:
sum = sum + i
print "The sum of all multiples of 3 and 5 below {} is: {}".format(limit, sum)
if __name__ == "__main__":
main(10)
main(1001)
|
Clean up problem 1 solution a bit.
|
Clean up problem 1 solution a bit.
|
Python
|
mit
|
gregmojonnier/ProjectEuler
|
d05c68b110e4adf5f411816196cf1f457e51951e
|
nbrmd/__init__.py
|
nbrmd/__init__.py
|
"""R markdown notebook format for Jupyter
Use this module to read or write Jupyter notebooks as Rmd documents (methods 'read', 'reads', 'write', 'writes')
Use the 'pre_save_hook' method (see its documentation) to automatically dump your Jupyter notebooks as a Rmd file, in addition
to the ipynb file.
Use the 'nbrmd' conversion script to convert Jupyter notebooks from/to R markdown notebooks.
"""
from .nbrmd import read, reads, readf, write, writes, writef
from .hooks import update_rmd, update_ipynb, update_rmd_and_ipynb, update_selected_formats
from .cm import RmdFileContentsManager
|
"""R markdown notebook format for Jupyter
Use this module to read or write Jupyter notebooks as Rmd documents (methods 'read', 'reads', 'write', 'writes')
Use the 'pre_save_hook' method (see its documentation) to automatically dump your Jupyter notebooks as a Rmd file, in addition
to the ipynb file.
Use the 'nbrmd' conversion script to convert Jupyter notebooks from/to R markdown notebooks.
"""
from .nbrmd import read, reads, readf, write, writes, writef
from .hooks import update_rmd, update_ipynb, update_rmd_and_ipynb, update_selected_formats
try:
from .cm import RmdFileContentsManager
except ImportError as e:
RmdFileContentsManager = e.message
|
Allow import in case of missing notebook package
|
Allow import in case of missing notebook package
|
Python
|
mit
|
mwouts/jupytext,mwouts/jupytext,mwouts/jupytext,mwouts/jupytext,mwouts/jupytext,mwouts/jupytext,mwouts/jupytext,mwouts/jupytext,mwouts/jupytext,mwouts/jupytext
|
e75cba739b92a7209cee87f66d2c8c9df3f97799
|
bumper_kilt/scripts/run_kilt.py
|
bumper_kilt/scripts/run_kilt.py
|
#!/usr/bin/python
import time
import serial
# configure the serial connections (the parameters differs on the
# device you are connecting to)
class Bumper(object):
def __init__(self):
try:
self.ser = serial.Serial(
port="/dev/ttyS0",
baudrate=9600,
parity=serial.PARITY_ODD,
stopbits=serial.STOPBITS_TWO,
bytesize=serial.SEVENBITS
)
except Exception:
print("Bad initialisation! Check the configuration of "
"the serial port!")
exit()
self.ser.open()
self.ser.isOpen()
def loop(self):
input=1
while 1 :
# get keyboard input
input = raw_input(">> ")
# Python 3 users
# input = input(">> ")
if input == "exit":
self.ser.close()
exit()
else:
# send the character to the device
# (note that I happend a \r\n carriage return and line feed to
# the characters - this is requested by my device)
self.ser.write(input + "\r\n")
out = ""
# let's wait one second before reading output (let's give
# device time to answer)
time.sleep(1)
while self.ser.inWaiting() > 0:
out += self.ser.read(1)
if out != "":
print ">> " + out
def main():
b = Bumper()
b.loop()
if __name__ == "__main__":
main()
|
#!/usr/bin/python
import time
import serial
# configure the serial connections (the parameters differs on the
# device you are connecting to)
class Bumper(object):
def __init__(self):
try:
self.ser = serial.Serial(
port="/dev/ttyS0",
baudrate=38400,
parity=serial.PARITY_ODD,
stopbits=serial.STOPBITS_ONE,
bytesize=serial.EIGHTBITS
)
except Exception:
print("Bad initialisation! Check the configuration of "
"the serial port!")
exit()
self.ser.open()
self.ser.isOpen()
def loop(self):
input=1
while 1 :
# get keyboard input
input = raw_input(">> ")
# Python 3 users
# input = input(">> ")
if input == "exit":
self.ser.close()
exit()
else:
# send the character to the device
# (note that I happend a \r\n carriage return and line feed to
# the characters - this is requested by my device)
self.ser.write(input + "\r\n")
out = ""
# let's wait one second before reading output (let's give
# device time to answer)
time.sleep(1)
while self.ser.inWaiting() > 0:
out += self.ser.read(1)
if out != "":
print ">> " + out
def main():
b = Bumper()
b.loop()
if __name__ == "__main__":
main()
|
Set parameters of serial class to match with kilt
|
Set parameters of serial class to match with kilt
|
Python
|
mit
|
ipab-rad/rad_youbot_stack,ipab-rad/rad_youbot_stack,ipab-rad/rad_youbot_stack,ipab-rad/rad_youbot_stack
|
be03e3d6c1323e8c750afc1d4e80997f3d9d52f3
|
cyder/cydhcp/interface/dynamic_intr/forms.py
|
cyder/cydhcp/interface/dynamic_intr/forms.py
|
from django import forms
from cyder.cydhcp.interface.dynamic_intr.models import (DynamicInterface,
DynamicIntrKeyValue)
from cyder.base.mixins import UsabilityFormMixin
from cyder.cydhcp.forms import RangeWizard
class DynamicInterfaceForm(RangeWizard, UsabilityFormMixin):
def __init__(self, *args, **kwargs):
super(DynamicInterfaceForm, self).__init__(*args, **kwargs)
self.fields.keyOrder = ['system', 'domain', 'mac', 'vrf',
'site', 'range', 'workgroup', 'dhcp_enabled',
'dns_enabled', 'ctnr']
class Meta:
model = DynamicInterface
exclude = ('last_seen')
class DynamicIntrKeyValueForm(forms.ModelForm):
dynamic_interface = forms.ModelChoiceField(
queryset=DynamicInterface.objects.all(),
widget=forms.HiddenInput())
class Meta:
model = DynamicIntrKeyValue
exclude = ('is_option', 'is_statement', 'is_quoted',)
|
from django import forms
from cyder.cydhcp.interface.dynamic_intr.models import (DynamicInterface,
DynamicIntrKeyValue)
from cyder.base.mixins import UsabilityFormMixin
from cyder.cydhcp.forms import RangeWizard
class DynamicInterfaceForm(RangeWizard, UsabilityFormMixin):
def __init__(self, *args, **kwargs):
super(DynamicInterfaceForm, self).__init__(*args, **kwargs)
self.fields.keyOrder = ['system', 'domain', 'mac', 'vrf',
'site', 'range', 'workgroup', 'dhcp_enabled',
'dns_enabled', 'ctnr']
self.fields['range'].required = True
class Meta:
model = DynamicInterface
exclude = ('last_seen')
class DynamicIntrKeyValueForm(forms.ModelForm):
dynamic_interface = forms.ModelChoiceField(
queryset=DynamicInterface.objects.all(),
widget=forms.HiddenInput())
class Meta:
model = DynamicIntrKeyValue
exclude = ('is_option', 'is_statement', 'is_quoted',)
|
Reset range to be required in dynamic intr form
|
Reset range to be required in dynamic intr form
|
Python
|
bsd-3-clause
|
akeym/cyder,akeym/cyder,OSU-Net/cyder,akeym/cyder,zeeman/cyder,zeeman/cyder,murrown/cyder,OSU-Net/cyder,drkitty/cyder,drkitty/cyder,zeeman/cyder,murrown/cyder,drkitty/cyder,OSU-Net/cyder,murrown/cyder,akeym/cyder,drkitty/cyder,OSU-Net/cyder,murrown/cyder,zeeman/cyder
|
f67abceeae7716cd385a308b26ce447e0277518f
|
tests/git_wrapper_integration_tests.py
|
tests/git_wrapper_integration_tests.py
|
import unittest
import util
from git_wrapper import GitWrapper
class GitWrapperIntegrationTest(util.RepoTestCase):
def test_paths(self):
self.open_tar_repo('project01')
assert('test_file.txt' in self.repo.paths)
assert('hello_world.rb' in self.repo.paths)
def test_stage(self):
self.open_tar_repo('project02')
assert('not_committed_file.txt' in self.repo.stage)
assert('second_not_committed_file.txt' in self.repo.stage)
def test_paths_external_git_folder(self):
self.open_tar_repo('project03', '../project03.git')
assert('test_file.txt' in self.repo.paths)
assert('hello_world.rb' in self.repo.paths)
def test_stage_external_git_folder(self):
self.open_tar_repo('project04', '../project04.git')
assert('not_committed_file.txt' in self.repo.stage)
assert('second_not_committed_file.txt' in self.repo.stage)
|
import unittest
import util
from git_wrapper import GitWrapper
class GitWrapperIntegrationTest(util.RepoTestCase):
def test_paths(self):
self.open_tar_repo('project01')
assert('test_file.txt' in self.repo.paths)
assert('hello_world.rb' in self.repo.paths)
def test_stage(self):
self.open_tar_repo('project02')
assert('not_committed_file.txt' in self.repo.stage)
assert('second_not_committed_file.txt' in self.repo.stage)
class GitWrapperIntegrationTestExternalGitFolder(util.RepoTestCase):
def test_paths_external(self):
self.open_tar_repo('project03', '../project03.git')
assert('test_file.txt' in self.repo.paths)
assert('hello_world.rb' in self.repo.paths)
def test_stage_external(self):
self.open_tar_repo('project04', '../project04.git')
assert('not_committed_file.txt' in self.repo.stage)
assert('second_not_committed_file.txt' in self.repo.stage)
|
Move external git folder integration tests to a separate class
|
Move external git folder integration tests to a separate class
|
Python
|
mit
|
siu/git_repo
|
feef7985133241c5e11622b0932d3eb629e7fbfe
|
craigschart/craigschart.py
|
craigschart/craigschart.py
|
def main():
print('Hello, World.')
if __name__ == '__main__':
main()
|
from bs4 import BeautifulSoup
import requests
def get_html():
r = requests.get('http://vancouver.craigslist.ca/search/cto?query=Expedition')
print(r.status_code)
print(r.text)
return r.text
def main():
html = get_html()
soup = BeautifulSoup(html, 'lxml')
print(soup.prettify())
mydivs = soup.findAll('a', {'class': 'hdrlnk'})
for t in mydivs:
print(t)
print('Hello, World.')
if __name__ == '__main__':
main()
|
Add soup extraction of links in results page
|
Add soup extraction of links in results page
|
Python
|
mit
|
supermitch/craigschart
|
8c6b4396047736d5caf00ec30b4283ee7cdc793e
|
lighty/wsgi/decorators.py
|
lighty/wsgi/decorators.py
|
'''
'''
import functools
import operator
from .. import monads
def view(func, **constraints):
'''Functions that decorates a view. This function can also checks the
argument values
'''
func.is_view = True
@functools.wraps(func)
def wrapper(*args, **kwargs):
try:
if not functools.reduce(operator.__and__,
[constraints[arg](kwargs[arg])
for arg in constraints]):
return monads.NoneMonad(ValueError(
'Wrong view argument value'))
return monads.ValueMonad(func(*args, **kwargs))
except Exception as e:
return monads.NoneMonad(e)
return wrapper
|
'''
'''
import functools
import operator
from .. import monads
def view(func, **constraints):
'''Functions that decorates a view. This function can also checks the
argument values
'''
func.is_view = True
@functools.wraps(func)
@monads.handle_exception
def wrapper(*args, **kwargs):
if not functools.reduce(operator.__and__,
[constraints[arg](kwargs[arg])
for arg in constraints]):
return monads.NoneMonad(ValueError('Wrong view argument value'))
return monads.ValueMonad(func(*args, **kwargs))
return wrapper
|
Use exception handling with decorator
|
Use exception handling with decorator
|
Python
|
bsd-3-clause
|
GrAndSE/lighty
|
7ef1717f34360ae48f640439fd6d6706ae755e90
|
functional_tests/base.py
|
functional_tests/base.py
|
from selenium import webdriver
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.core.cache import cache
class BrowserTest(StaticLiveServerTestCase):
def setUp(self):
self.browser = webdriver.PhantomJS()
self.browser.set_window_size(1024, 768)
def tearDown(self):
self.browser.quit()
cache.clear()
|
from selenium.webdriver.chrome.webdriver import WebDriver
from selenium.webdriver.chrome.options import Options
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.core.cache import cache
class BrowserTest(StaticLiveServerTestCase):
def setUp(self):
chrome_options = Options()
chrome_options.add_argument("--headless")
chrome_options.add_argument("--window-size=1920x1080")
self.browser = WebDriver(chrome_options=chrome_options)
self.browser.set_window_size(1024, 768)
def tearDown(self):
self.browser.quit()
cache.clear()
|
Use headless chrome for functional test
|
Use headless chrome for functional test
|
Python
|
mit
|
essanpupil/cashflow,essanpupil/cashflow
|
9fdea42df37c722aefb5e8fb7c04c45c06c20f17
|
tests/test_client_users.py
|
tests/test_client_users.py
|
import pydle
from .fixtures import with_client
from .mocks import Mock
@with_client()
def test_user_creation(server, client):
client._create_user('WiZ')
assert 'WiZ' in client.users
assert client.users['WiZ']['nickname'] == 'WiZ'
@with_client()
def test_user_renaming(server, client):
client._create_user('WiZ')
client._rename_user('WiZ', 'jilles')
assert 'WiZ' not in client.users
assert 'jilles' in client.users
assert client.users['jilles']['nickname'] == 'jilles'
@with_client()
def test_user_deletion(server, client):
client._create_user('WiZ')
client._destroy_user('WiZ')
assert 'WiZ' not in client.users
|
import pydle
from .fixtures import with_client
@with_client()
def test_client_same_nick(server, client):
assert client.is_same_nick('WiZ', 'WiZ')
assert not client.is_same_nick('WiZ', 'jilles')
assert not client.is_same_nick('WiZ', 'wiz')
@with_client()
def test_user_creation(server, client):
client._create_user('WiZ')
assert 'WiZ' in client.users
assert client.users['WiZ']['nickname'] == 'WiZ'
@with_client()
def test_user_invalid_creation(server, client):
client._create_user('irc.fbi.gov')
assert 'irc.fbi.gov' not in client.users
@with_client()
def test_user_renaming(server, client):
client._create_user('WiZ')
client._rename_user('WiZ', 'jilles')
assert 'WiZ' not in client.users
assert 'jilles' in client.users
assert client.users['jilles']['nickname'] == 'jilles'
@with_client()
def test_user_renaming_creation(server, client):
client._rename_user('null', 'WiZ')
assert 'WiZ' in client.users
assert 'null' not in client.users
@with_client()
def test_user_deletion(server, client):
client._create_user('WiZ')
client._destroy_user('WiZ')
assert 'WiZ' not in client.users
@with_client()
def test_user_synchronization(server, client):
client._create_user('WiZ')
client._sync_user('WiZ', { 'hostname': 'og.irc.developer' })
assert client.users['WiZ']['hostname'] == 'og.irc.developer'
@with_client()
def test_user_synchronization_creation(server, client):
client._sync_user('WiZ', {})
assert 'WiZ' in client.users
@with_client()
def test_user_invalid_synchronization(server, client):
client._sync_user('irc.fbi.gov', {})
assert 'irc.fbi.gov' not in client.users
|
Extend client:users tests to renaming and synchronization.
|
tests: Extend client:users tests to renaming and synchronization.
|
Python
|
bsd-3-clause
|
Shizmob/pydle
|
b5fc673d44624dfddfbdd98c9806b7e7e2f67331
|
simplekv/memory/memcachestore.py
|
simplekv/memory/memcachestore.py
|
#!/usr/bin/env python
# coding=utf8
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
from .. import KeyValueStore
class MemcacheStore(KeyValueStore):
def __contains__(self, key):
try:
return key in self.mc
except TypeError:
raise IOError('memcache implementation does not support '\
'__contains__')
def __init__(self, mc):
self.mc = mc
def _delete(self, key):
self.mc.delete(key)
def _get(self, key):
rv = self.mc.get(key)
if None == rv:
raise KeyError(key)
return rv
def _get_file(self, key, file):
file.write(self._get(key))
def _open(self, key):
return StringIO(self._get(key))
def _put(self, key, data):
self.mc.set(key, data)
return key
def _put_file(self, key, file):
self.mc.set(key, file.read())
return key
def keys(self):
raise IOError('Memcache does not support listing keys.')
def iter_keys(self):
raise IOError('Memcache does not support key iteration.')
|
#!/usr/bin/env python
# coding=utf8
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
from .. import KeyValueStore
class MemcacheStore(KeyValueStore):
def __contains__(self, key):
try:
return key in self.mc
except TypeError:
raise IOError('memcache implementation does not support '\
'__contains__')
def __init__(self, mc):
self.mc = mc
def _delete(self, key):
if not self.mc.delete(key):
raise IOError('Error deleting key')
def _get(self, key):
rv = self.mc.get(key)
if None == rv:
raise KeyError(key)
return rv
def _get_file(self, key, file):
file.write(self._get(key))
def _open(self, key):
return StringIO(self._get(key))
def _put(self, key, data):
if not self.mc.set(key, data):
if len(data) >= 1024 * 1023:
raise IOError('Failed to store data, probably too large. '\
'memcached limit is 1M')
raise IOError('Failed to store data')
return key
def _put_file(self, key, file):
return self._put(key, file.read())
def keys(self):
raise IOError('Memcache does not support listing keys.')
def iter_keys(self):
raise IOError('Memcache does not support key iteration.')
|
Check if putting/getting was actually successful.
|
Check if putting/getting was actually successful.
|
Python
|
mit
|
fmarczin/simplekv,fmarczin/simplekv,karteek/simplekv,mbr/simplekv,karteek/simplekv,mbr/simplekv
|
f4c99f4a1b3e49e0768af1b4b6444ee33bef49ac
|
microauth/urls.py
|
microauth/urls.py
|
"""microauth URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
urlpatterns = [
url(r'^admin/', admin.site.urls),
# django-oauth-toolkit
url(r'^oauth2/', include('oauth2_provider.urls', namespace='oauth2_provider')),
url(r'^', include('apps.authentication.urls', namespace='microauth_authentication')),
url(r'^api/', include('apps.api.urls', namespace='microauth_api')),
]
urlpatterns += staticfiles_urlpatterns()
|
"""microauth URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
urlpatterns = [
url(r'^admin/', admin.site.urls),
# django-oauth-toolkit
url(r'^oauth2/', include('oauth2_provider.urls', namespace='oauth2_provider')),
url(r'^', include('apps.authentication.urls', namespace='microauth_authentication')),
url(r'^api/', include('apps.api.urls', namespace='microauth_api')),
url(r'^accounts/login/$', 'django.contrib.auth.views.login', {'template_name': 'admin/login.html'}),
]
urlpatterns += staticfiles_urlpatterns()
|
Add a missing route leading to the login page.
|
Add a missing route leading to the login page.
|
Python
|
mit
|
microserv/microauth,microserv/microauth,microserv/microauth
|
49f332149ae8a9a3b5faf82bc20b46dfaeb0a3ad
|
indra/sources/ctd/api.py
|
indra/sources/ctd/api.py
|
import pandas
from .processor import CTDChemicalDiseaseProcessor, \
CTDGeneDiseaseProcessor, CTDChemicalGeneProcessor
base_url = 'http://ctdbase.org/reports/'
urls = {
'chemical_gene': base_url + 'CTD_chem_gene_ixns.tsv.gz',
'chemical_disease': base_url + 'CTD_chemicals_diseases.tsv.gz',
'gene_disease': base_url + 'CTD_genes_diseases.tsv.gz',
}
processors = {
'chemical_gene': CTDChemicalGeneProcessor,
'chemical_disease': CTDChemicalDiseaseProcessor,
'gene_disease': CTDGeneDiseaseProcessor,
}
def process_from_web(subset):
if subset not in urls:
raise ValueError('%s is not a valid CTD subset.')
df = pandas.read_csv(urls[subset], sep='\t', comment='#',
header=None)
return process_dataframe(df)
def process_tsv(fname, subset):
df = pandas.read_csv(fname, sep='\t', comment='#', header=None)
return process_dataframe(df, subset)
def process_dataframe(df, subset):
if subset not in processors:
raise ValueError('%s is not a valid CTD subset.')
cp = processors[subset](df)
cp.extract_statements()
return cp
|
import pandas
from .processor import CTDChemicalDiseaseProcessor, \
CTDGeneDiseaseProcessor, CTDChemicalGeneProcessor
base_url = 'http://ctdbase.org/reports/'
urls = {
'chemical_gene': base_url + 'CTD_chem_gene_ixns.tsv.gz',
'chemical_disease': base_url + 'CTD_chemicals_diseases.tsv.gz',
'gene_disease': base_url + 'CTD_genes_diseases.tsv.gz',
}
processors = {
'chemical_gene': CTDChemicalGeneProcessor,
'chemical_disease': CTDChemicalDiseaseProcessor,
'gene_disease': CTDGeneDiseaseProcessor,
}
def process_from_web(subset, url=None):
if subset not in urls:
raise ValueError('%s is not a valid CTD subset.' % subset)
url = url if url else urls[subset]
return _process_url_or_file(url, subset)
def process_tsv(fname, subset):
return _process_url_or_file(fname, subset)
def _process_url_or_file(path, subset):
df = pandas.read_csv(path, sep='\t', comment='#',
header=None, dtype=str, keep_default_na=False)
return process_dataframe(df, subset)
def process_dataframe(df, subset):
if subset not in processors:
raise ValueError('%s is not a valid CTD subset.' % subset)
cp = processors[subset](df)
cp.extract_statements()
return cp
|
Refactor API to have single pandas load
|
Refactor API to have single pandas load
|
Python
|
bsd-2-clause
|
sorgerlab/indra,bgyori/indra,johnbachman/indra,bgyori/indra,sorgerlab/belpy,sorgerlab/belpy,johnbachman/belpy,bgyori/indra,johnbachman/indra,sorgerlab/belpy,johnbachman/belpy,johnbachman/indra,johnbachman/belpy,sorgerlab/indra,sorgerlab/indra
|
71cdbeada7e11634e1168ca2e825167cbe87b4de
|
spacy/lang/de/norm_exceptions.py
|
spacy/lang/de/norm_exceptions.py
|
# coding: utf8
from __future__ import unicode_literals
# Here we only want to include the absolute most common words. Otherwise,
# this list would get impossibly long for German – especially considering the
# old vs. new spelling rules, and all possible cases.
_exc = {
"daß": "dass"
}
NORM_EXCEPTIONS = {}
for string, norm in _exc.items():
NORM_EXCEPTIONS[string] = norm
NORM_EXCEPTIONS[string.title()] = norm
|
# coding: utf8
from __future__ import unicode_literals
# Here we only want to include the absolute most common words. Otherwise,
# this list would get impossibly long for German – especially considering the
# old vs. new spelling rules, and all possible cases.
_exc = {
"daß": "dass"
}
NORM_EXCEPTIONS = {}
for string, norm in _exc.items():
NORM_EXCEPTIONS[string.title()] = norm
|
Revert "Also include lowercase norm exceptions"
|
Revert "Also include lowercase norm exceptions"
This reverts commit 70f4e8adf37cfcfab60be2b97d6deae949b30e9e.
|
Python
|
mit
|
aikramer2/spaCy,spacy-io/spaCy,aikramer2/spaCy,recognai/spaCy,honnibal/spaCy,spacy-io/spaCy,aikramer2/spaCy,recognai/spaCy,aikramer2/spaCy,honnibal/spaCy,aikramer2/spaCy,spacy-io/spaCy,honnibal/spaCy,explosion/spaCy,spacy-io/spaCy,honnibal/spaCy,explosion/spaCy,spacy-io/spaCy,recognai/spaCy,aikramer2/spaCy,recognai/spaCy,recognai/spaCy,spacy-io/spaCy,explosion/spaCy,explosion/spaCy,recognai/spaCy,explosion/spaCy,explosion/spaCy
|
cd944a2606159c8ea11ffe8075ce4ec186fd799c
|
tests/basic_test.py
|
tests/basic_test.py
|
import unittest
from either_or import either_or
class nxppyTests(unittest.TestCase):
"""Basic tests for the NXP Read Library python wrapper."""
def test_import(self):
"""Test that it can be imported"""
import nxppy
@either_or('detect')
def test_detect_mifare_present(self):
"""Test that we can read the UID from a present Mifare card.
Either this test or the "absent" test below will pass, but never both.
"""
import nxppy
self.assertIsInstance(nxppy.read_mifare(), str, "Card UID is not a string")
@either_or('detect')
def test_detect_mifare_absent(self):
"""Test that an absent card results in a None response.
Either this test or the "present" test above will pass, but never both.
"""
import nxppy
self.assertIsNone(nxppy.read_mifare(), "Card UID is not None")
|
import unittest
from tests.either_or import either_or
class nxppyTests(unittest.TestCase):
"""Basic tests for the NXP Read Library python wrapper."""
def test_import(self):
"""Test that it can be imported"""
import nxppy
@either_or('detect')
def test_detect_mifare_present(self):
"""Test that we can read the UID from a present Mifare card.
Either this test or the "absent" test below will pass, but never both.
"""
import nxppy
reader = nxppy.Mifare()
self.assertIsInstance(reader, nxppy.Mifare)
self.assertIsInstance(reader.select(), str, "Card UID is not a string")
@either_or('detect')
def test_detect_mifare_absent(self):
"""Test that an absent card results in a None response.
Either this test or the "present" test above will pass, but never both.
"""
import nxppy
reader = nxppy.Mifare()
self.assertIsInstance(reader, nxppy.Mifare)
self.assertIsNone(reader.select(), "Card UID is not None")
|
Update tests to use class-based interface
|
Update tests to use class-based interface
|
Python
|
mit
|
AlterCodex/nxppy,Schoberm/nxppy,AlterCodex/nxppy,tuvaergun/nxppy,Schoberm/nxppy,tuvaergun/nxppy,Schoberm/nxppy,tuvaergun/nxppy,AlterCodex/nxppy
|
eb03de241f3d47173381ee22f85b5cdf5d9c1fb4
|
examples/monitoring/worker.py
|
examples/monitoring/worker.py
|
import random
import time
from os import getenv
from aiographite.aiographite import connect
from aiographite.protocol import PlaintextProtocol
GRAPHITE_HOST = getenv('GRAPHITE_HOST', 'localhost')
async def run(worker, *args, **kwargs):
value = random.randrange(10)
try:
connection = await connect(GRAPHITE_HOST, 2003, PlaintextProtocol(), loop=worker.loop)
await connection.send('workers.worker', value, time.time())
await connection.close()
except Exception as e:
worker.logger.error('Cannot connect to graphite')
|
import random
import time
from os import getenv
from aiographite.aiographite import connect
from aiographite.protocol import PlaintextProtocol
GRAPHITE_HOST = getenv('GRAPHITE_HOST', 'localhost')
async def run(worker, *args, **kwargs):
value = random.randrange(10)
try:
connection = await connect(GRAPHITE_HOST, 2003, PlaintextProtocol(), loop=worker.loop)
await connection.send('workers.worker', value, time.time())
await connection.close()
except Exception:
worker.logger.error('Cannot connect to graphite')
|
Fix flake8 issues in examples
|
Fix flake8 issues in examples
|
Python
|
apache-2.0
|
aioworkers/aioworkers
|
8396ac44d434a06c410c516b6109ec6ace030601
|
examples/pyuv_cffi_example.py
|
examples/pyuv_cffi_example.py
|
"""A simple example demonstrating basic usage of pyuv_cffi
This example creates a timer handle and a signal handle, then starts the loop. The timer callback is
run after 1 second, and repeating every 1 second thereafter. The signal handle registers a listener
for the INT signal and allows us to exit the loop by pressing ctrl-c.
"""
import signal
from pyuv_cffi import Loop, Timer, Signal
def sig_cb(sig_h, sig_num):
print('\nsig_cb({}, {})'.format(sig_h, sig_num))
sig_h.stop()
sig_h.loop.stop()
def timer_cb(timer_h):
print('timer_cb({})'.format(timer_h))
def run():
loop = Loop()
timer_h = Timer(loop)
timer_h.start(timer_cb, 1, 1)
sig_h = Signal(loop)
sig_h.start(sig_cb, signal.SIGINT)
status = loop.run()
timer_h.close() # stop and free any timers before freeing the loop
print('loop.run() -> ', status)
def main():
run()
if __name__ == '__main__':
main()
|
"""A simple example demonstrating basic usage of pyuv_cffi
This example creates a timer handle and a signal handle, then starts the loop. The timer callback is
run after 1 second, and repeating every 1 second thereafter. The signal handle registers a listener
for the INT signal and allows us to exit the loop by pressing ctrl-c.
"""
import signal
from pyuv_cffi import Loop, Timer, Signal
def sig_cb(sig_h, sig_num):
print('\nsig_cb({}, {})'.format(sig_h, sig_num))
sig_h.stop()
sig_h.loop.stop()
def timer_cb(timer_h):
print('timer_cb({})'.format(timer_h))
def run():
loop = Loop()
timer_h = Timer(loop)
timer_h.start(timer_cb, 1, 1)
sig_h = Signal(loop)
sig_h.start(sig_cb, signal.SIGINT)
status = loop.run()
timer_h.close() # we must stop and free any other handles before freeing the loop
print('loop.run() -> ', status)
# all handles in pyuv_cffi (including the loop) are automatically freed when they go out of
# scope
def main():
run()
if __name__ == '__main__':
main()
|
Add inline comment regarding freeing resources
|
Add inline comment regarding freeing resources
|
Python
|
mit
|
veegee/guv,veegee/guv
|
c28ae7e4b0637a2c4db120d9add13d5589ddca40
|
runtests.py
|
runtests.py
|
#!/usr/bin/env python
import os
import sys
def runtests():
test_dir = os.path.dirname(__file__)
sys.path.insert(0, test_dir)
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
import django
from django.test.utils import get_runner
from django.conf import settings
try:
django.setup()
except AttributeError: # 1.6 or lower
pass
TestRunner = get_runner(settings)
test_runner = TestRunner(verbosity=1, interactive=True)
failures = test_runner.run_tests(['.'])
sys.exit(failures)
if __name__ == '__main__':
runtests()
|
#!/usr/bin/env python
import os
import sys
def runtests():
test_dir = os.path.dirname(__file__)
sys.path.insert(0, test_dir)
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
import django
from django.test.utils import get_runner
from django.conf import settings
django.setup()
TestRunner = get_runner(settings)
test_runner = TestRunner(verbosity=1, interactive=True)
failures = test_runner.run_tests(['.'])
sys.exit(failures)
if __name__ == '__main__':
runtests()
|
Remove compat shim as it doesn't apply
|
Remove compat shim as it doesn't apply
|
Python
|
mit
|
sergei-maertens/django-systemjs,sergei-maertens/django-systemjs,sergei-maertens/django-systemjs,sergei-maertens/django-systemjs
|
0dddfcbdb46ac91ddc0bfed4482bce049a8593c2
|
lazyblacksmith/views/blueprint.py
|
lazyblacksmith/views/blueprint.py
|
# -*- encoding: utf-8 -*-
from flask import Blueprint
from flask import render_template
from lazyblacksmith.models import Activity
from lazyblacksmith.models import Item
from lazyblacksmith.models import Region
blueprint = Blueprint('blueprint', __name__)
@blueprint.route('/manufacturing/<int:item_id>')
def manufacturing(item_id):
"""
Display the manufacturing page with all data
"""
item = Item.query.get(item_id)
activity = item.activities.filter_by(activity=Activity.ACTIVITY_MANUFACTURING).one()
materials = item.activity_materials.filter_by(activity=Activity.ACTIVITY_MANUFACTURING)
product = item.activity_products.filter_by(activity=Activity.ACTIVITY_MANUFACTURING).one()
regions = Region.query.filter_by(wh=False)
# is any of the materials manufactured ?
has_manufactured_components = False
for material in materials:
if material.material.is_manufactured():
has_manufactured_components = True
break
return render_template('blueprint/manufacturing.html', **{
'blueprint': item,
'materials': materials,
'activity': activity,
'product': product,
'regions': regions,
'has_manufactured_components': has_manufactured_components,
})
@blueprint.route('/')
def search():
return render_template('blueprint/search.html')
|
# -*- encoding: utf-8 -*-
import config
from flask import Blueprint
from flask import render_template
from lazyblacksmith.models import Activity
from lazyblacksmith.models import Item
from lazyblacksmith.models import Region
blueprint = Blueprint('blueprint', __name__)
@blueprint.route('/manufacturing/<int:item_id>')
def manufacturing(item_id):
"""
Display the manufacturing page with all data
"""
item = Item.query.get(item_id)
activity = item.activities.filter_by(activity=Activity.ACTIVITY_MANUFACTURING).one()
materials = item.activity_materials.filter_by(activity=Activity.ACTIVITY_MANUFACTURING)
product = item.activity_products.filter_by(activity=Activity.ACTIVITY_MANUFACTURING).one()
regions = Region.query.filter(
Region.id.in_(config.CREST_REGION_PRICE)
).filter_by(
wh=False
)
# is any of the materials manufactured ?
has_manufactured_components = False
for material in materials:
if material.material.is_manufactured():
has_manufactured_components = True
break
return render_template('blueprint/manufacturing.html', **{
'blueprint': item,
'materials': materials,
'activity': activity,
'product': product,
'regions': regions,
'has_manufactured_components': has_manufactured_components,
})
@blueprint.route('/')
def search():
return render_template('blueprint/search.html')
|
Change region list to match config
|
Change region list to match config
|
Python
|
bsd-3-clause
|
Kyria/LazyBlacksmith,Kyria/LazyBlacksmith,Kyria/LazyBlacksmith,Kyria/LazyBlacksmith
|
563220ef19395201aed7f6392519f84db4ec7a77
|
tests/test_midas.py
|
tests/test_midas.py
|
import datetime
from midas import mix
from midas.midas import estimate, forecast
def test_estimate(gdp_data, farmpay_data):
y, yl, x, yf, ylf, xf = mix.mix_freq(gdp_data.gdp, farmpay_data.farmpay, 3, 1, 1,
start_date=datetime.datetime(1985, 1, 1),
end_date=datetime.datetime(2009, 1, 1))
res = estimate(y, yl, x)
fc = forecast(xf, ylf, res)
print(fc)
assert False
|
import datetime
import numpy as np
from midas import mix
from midas.midas import estimate, forecast
def test_estimate(gdp_data, farmpay_data):
y, yl, x, yf, ylf, xf = mix.mix_freq(gdp_data.gdp, farmpay_data.farmpay, 3, 1, 1,
start_date=datetime.datetime(1985, 1, 1),
end_date=datetime.datetime(2009, 1, 1))
res = estimate(y, yl, x)
fc = forecast(xf, ylf, res)
print(fc)
assert np.isclose(fc.loc['2011-04-01'][0], 1.336844, rtol=1e-6)
|
Add assertion for forecast test
|
Add assertion for forecast test
|
Python
|
mit
|
mikemull/midaspy
|
1f9a11640463df94166be8dffa824e57485154f8
|
tests/vaspy_test.py
|
tests/vaspy_test.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
from arc_test import ArcTest
from incar_test import InCarTest
from oszicar_test import OsziCarTest
from outcar_test import OutCarTest
from xsd_test import XsdTest
from xtd_test import XtdTest
from poscar_test import PosCarTest
from xyzfile_test import XyzFileTest
from cif_test import CifFileTest
def suite():
suite = unittest.TestSuite([
unittest.TestLoader().loadTestsFromTestCase(ArcTest),
unittest.TestLoader().loadTestsFromTestCase(InCarTest),
unittest.TestLoader().loadTestsFromTestCase(OsziCarTest),
unittest.TestLoader().loadTestsFromTestCase(OutCarTest),
unittest.TestLoader().loadTestsFromTestCase(XsdTest),
unittest.TestLoader().loadTestsFromTestCase(XtdTest),
unittest.TestLoader().loadTestsFromTestCase(PosCarTest),
unittest.TestLoader().loadTestsFromTestCase(XyzFileTest),
unittest.TestLoader().loadTestsFromTestCase(CifFileTest),
])
return suite
if "__main__" == __name__:
result = unittest.TextTestRunner(verbosity=2).run(suite())
if result.errors or result.failures:
raise ValueError("Get errors and failures.")
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
from arc_test import ArcTest
from incar_test import InCarTest
from oszicar_test import OsziCarTest
from outcar_test import OutCarTest
from xsd_test import XsdTest
from xtd_test import XtdTest
from poscar_test import PosCarTest
from xyzfile_test import XyzFileTest
from cif_test import CifFileTest
from ani_test import AniFileTest
def suite():
suite = unittest.TestSuite([
unittest.TestLoader().loadTestsFromTestCase(ArcTest),
unittest.TestLoader().loadTestsFromTestCase(InCarTest),
unittest.TestLoader().loadTestsFromTestCase(OsziCarTest),
unittest.TestLoader().loadTestsFromTestCase(OutCarTest),
unittest.TestLoader().loadTestsFromTestCase(XsdTest),
unittest.TestLoader().loadTestsFromTestCase(XtdTest),
unittest.TestLoader().loadTestsFromTestCase(PosCarTest),
unittest.TestLoader().loadTestsFromTestCase(XyzFileTest),
unittest.TestLoader().loadTestsFromTestCase(CifFileTest),
unittest.TestLoader().loadTestsFromTestCase(AniFileTest),
])
return suite
if "__main__" == __name__:
result = unittest.TextTestRunner(verbosity=2).run(suite())
if result.errors or result.failures:
raise ValueError("Get errors and failures.")
|
Add test for animation file.
|
Add test for animation file.
|
Python
|
mit
|
PytLab/VASPy,PytLab/VASPy
|
b8a22c1dfe58802665231e8a82bb546bfd1dbbc8
|
pybossa/sentinel/__init__.py
|
pybossa/sentinel/__init__.py
|
from redis import sentinel
class Sentinel(object):
def __init__(self, app=None):
self.app = app
if app is not None: # pragma: no cover
self.init_app(app)
def init_app(self, app):
self.connection = sentinel.Sentinel(app.config['REDIS_SENTINEL'],
socket_timeout=0.1)
self.master = self.connection.master_for('mymaster')
self.slave = self.connection.slave_for('mymaster')
|
from redis import sentinel
class Sentinel(object):
def __init__(self, app=None):
self.app = app
if app is not None: # pragma: no cover
self.init_app(app)
def init_app(self, app):
self.connection = sentinel.Sentinel(app.config['REDIS_SENTINEL'],
socket_timeout=0.1)
redis_db = app.config['REDIS_DB'] or 0
print "Redis db is ", redis_db
self.master = self.connection.master_for('mymaster', db=redis_db)
self.slave = self.connection.slave_for('mymaster', db=redis_db)
|
Use config redis database in sentinel connections
|
Use config redis database in sentinel connections
|
Python
|
agpl-3.0
|
inteligencia-coletiva-lsd/pybossa,geotagx/pybossa,jean/pybossa,PyBossa/pybossa,PyBossa/pybossa,stefanhahmann/pybossa,OpenNewsLabs/pybossa,OpenNewsLabs/pybossa,stefanhahmann/pybossa,Scifabric/pybossa,geotagx/pybossa,jean/pybossa,Scifabric/pybossa,inteligencia-coletiva-lsd/pybossa
|
695dad10b6d27e2b45a7b98abad29b9d922b976f
|
pylisp/packet/ip/protocol.py
|
pylisp/packet/ip/protocol.py
|
'''
Created on 11 jan. 2013
@author: sander
'''
from abc import abstractmethod, ABCMeta
class Protocol(object):
__metaclass__ = ABCMeta
header_type = None
@abstractmethod
def __init__(self, next_header=None, payload=''):
'''
Constructor
'''
self.next_header = next_header
self.payload = payload
def __repr__(self):
# This works as long as we accept all properties as paramters in the
# constructor
params = ['%s=%r' % (k, v) for k, v in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__,
', '.join(params))
@abstractmethod
def sanitize(self):
'''
Check and optionally fix properties
'''
@classmethod
@abstractmethod
def from_bytes(cls, bitstream):
'''
Parse the given packet and update properties accordingly
'''
@abstractmethod
def to_bytes(self):
'''
Create bytes from properties
'''
def __str__(self):
return str(self.to_bytes())
def __bytes__(self):
return bytes(self.to_bytes())
|
'''
Created on 11 jan. 2013
@author: sander
'''
from abc import abstractmethod, ABCMeta
class ProtocolElement(object):
__metaclass__ = ABCMeta
@abstractmethod
def __init__(self):
'''
Constructor
'''
def __repr__(self):
# This works as long as we accept all properties as paramters in the
# constructor
params = ['%s=%r' % (k, v) for k, v in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__,
', '.join(params))
def __str__(self):
return str(self.to_bytes())
def __bytes__(self):
return self.to_bytes()
@abstractmethod
def sanitize(self):
'''
Check and optionally fix properties
'''
@classmethod
@abstractmethod
def from_bytes(cls, bitstream):
'''
Parse the given packet and update properties accordingly
'''
@abstractmethod
def to_bytes(self):
'''
Create bytes from properties
'''
class Protocol(ProtocolElement):
__metaclass__ = ABCMeta
header_type = None
@abstractmethod
def __init__(self, next_header=None, payload=''):
'''
Constructor
'''
super(Protocol, self).__init__()
self.next_header = next_header
self.payload = payload
|
Split Protocol class in Protocol and ProtocolElement
|
Split Protocol class in Protocol and ProtocolElement
|
Python
|
bsd-3-clause
|
steffann/pylisp
|
a1c7773eb889ece3233b910c559b4e22ade3bb32
|
timpani/settings.py
|
timpani/settings.py
|
from . import database
def getAllSettings():
databaseConnection = database.ConnectionManager.getConnection("main")
query = databaseConnection.session.query(database.tables.Setting)
settings = query.all()
return {setting.name: setting.value for setting in settings}
def getSettingValue(name):
databaseConnection = database.ConnectionManager.getConnection("main")
query = (databaseConnection.session
.query(database.tables.Setting)
.filter(database.tables.Setting.name == name))
if query.count() > 0:
return query.first().value
return None
def setSettingValue(name, value):
databaseConnection = database.ConnectionManager.getConnection("main")
settingObj = database.tables.Setting(name = name, value = value)
databaseConnection.session.merge(settingObj)
databaseConnection.session.commit()
def validateSetting(name, value):
if name == "title":
return len(value) > 0
|
from . import database
def getAllSettings():
databaseConnection = database.ConnectionManager.getConnection("main")
query = databaseConnection.session.query(database.tables.Setting)
settings = query.all()
return {setting.name: setting.value for setting in settings}
def getSettingValue(name):
databaseConnection = database.ConnectionManager.getConnection("main")
query = (databaseConnection.session
.query(database.tables.Setting)
.filter(database.tables.Setting.name == name))
if query.count() > 0:
return query.first().value
return None
def setSettingValue(name, value):
valid = validateSetting(name, value)
if valid:
databaseConnection = database.ConnectionManager.getConnection("main")
settingObj = database.tables.Setting(name = name, value = value)
databaseConnection.session.merge(settingObj)
databaseConnection.session.commit()
return True
return False
def validateSetting(name, value):
if name == "title":
return len(value) > 0
|
Use setting validation function in setSettingValue
|
Use setting validation function in setSettingValue
|
Python
|
mit
|
ollien/Timpani,ollien/Timpani,ollien/Timpani
|
ee99527185268ac386aad0c54056ac640c197e42
|
dbmigrator/commands/init.py
|
dbmigrator/commands/init.py
|
# -*- coding: utf-8 -*-
# ###
# Copyright (c) 2015, Rice University
# This software is subject to the provisions of the GNU Affero General
# Public License version 3 (AGPLv3).
# See LICENCE.txt for details.
# ###
from .. import utils
__all__ = ('cli_loader',)
@utils.with_cursor
def cli_command(cursor, migrations_directory='', version=None, **kwargs):
cursor.execute("""\
CREATE TABLE IF NOT EXISTS schema_migrations (
version TEXT NOT NULL,
applied TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
)""")
cursor.execute("""\
DELETE FROM schema_migrations""")
versions = []
if version is None:
timestamp = utils.timestamp()
else:
timestamp = str(version)
for version, name in utils.get_migrations(migrations_directory):
if version <= timestamp:
versions.append((version,))
cursor.executemany("""\
INSERT INTO schema_migrations VALUES (%s)
""", versions)
print('Schema migrations initialized.')
def cli_loader(parser):
parser.add_argument('--version', type=int,
help='Set the schema version to VERSION, '
'default current timestamp')
return cli_command
|
# -*- coding: utf-8 -*-
# ###
# Copyright (c) 2015, Rice University
# This software is subject to the provisions of the GNU Affero General
# Public License version 3 (AGPLv3).
# See LICENCE.txt for details.
# ###
from .. import utils
__all__ = ('cli_loader',)
@utils.with_cursor
def cli_command(cursor, migrations_directory='', version=None, **kwargs):
cursor.execute("""\
SELECT 1 FROM information_schema.tables
WHERE table_name = 'schema_migrations'""")
table_exists = cursor.fetchone()
if table_exists:
print('Schema migrations already initialized.')
return
cursor.execute("""\
CREATE TABLE schema_migrations (
version TEXT NOT NULL,
applied TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
)""")
versions = []
if version is None:
timestamp = utils.timestamp()
else:
timestamp = str(version)
for version, name in utils.get_migrations(migrations_directory):
if version <= timestamp:
versions.append((version,))
cursor.executemany("""\
INSERT INTO schema_migrations VALUES (%s)
""", versions)
print('Schema migrations initialized.')
def cli_loader(parser):
parser.add_argument('--version', type=int,
help='Set the schema version to VERSION, '
'default current timestamp')
return cli_command
|
Stop changing schema_migrations data if the table already exists
|
Stop changing schema_migrations data if the table already exists
|
Python
|
agpl-3.0
|
karenc/db-migrator
|
30f03692eff862f1456b9c376c21fe8e57de7eaa
|
dbt/clients/agate_helper.py
|
dbt/clients/agate_helper.py
|
import agate
DEFAULT_TYPE_TESTER = agate.TypeTester(types=[
agate.data_types.Number(),
agate.data_types.Date(),
agate.data_types.DateTime(),
agate.data_types.Boolean(),
agate.data_types.Text()
])
def table_from_data(data, column_names):
"Convert list of dictionaries into an Agate table"
# The agate table is generated from a list of dicts, so the column order
# from `data` is not preserved. We can use `select` to reorder the columns
#
# If there is no data, create an empty table with the specified columns
if len(data) == 0:
return agate.Table([], column_names=column_names)
else:
table = agate.Table.from_object(data, column_types=DEFAULT_TYPE_TESTER)
return table.select(column_names)
def empty_table():
"Returns an empty Agate table. To be used in place of None"
return agate.Table(rows=[])
def as_matrix(table):
"Return an agate table as a matrix of data sans columns"
return [r.values() for r in table.rows.values()]
def from_csv(abspath):
return agate.Table.from_csv(abspath, column_types=DEFAULT_TYPE_TESTER)
|
import agate
DEFAULT_TYPE_TESTER = agate.TypeTester(types=[
agate.data_types.Boolean(true_values=('true',),
false_values=('false',),
null_values=('null',)),
agate.data_types.Number(null_values=('null',)),
agate.data_types.TimeDelta(null_values=('null',)),
agate.data_types.Date(null_values=('null',)),
agate.data_types.DateTime(null_values=('null',)),
agate.data_types.Text(null_values=('null',))
])
def table_from_data(data, column_names):
"Convert list of dictionaries into an Agate table"
# The agate table is generated from a list of dicts, so the column order
# from `data` is not preserved. We can use `select` to reorder the columns
#
# If there is no data, create an empty table with the specified columns
if len(data) == 0:
return agate.Table([], column_names=column_names)
else:
table = agate.Table.from_object(data, column_types=DEFAULT_TYPE_TESTER)
return table.select(column_names)
def empty_table():
"Returns an empty Agate table. To be used in place of None"
return agate.Table(rows=[])
def as_matrix(table):
"Return an agate table as a matrix of data sans columns"
return [r.values() for r in table.rows.values()]
def from_csv(abspath):
return agate.Table.from_csv(abspath, column_types=DEFAULT_TYPE_TESTER)
|
Make the agate table type tester more restrictive on what counts as null/true/false
|
Make the agate table type tester more restrictive on what counts as null/true/false
|
Python
|
apache-2.0
|
analyst-collective/dbt,nave91/dbt,nave91/dbt,fishtown-analytics/dbt,fishtown-analytics/dbt,fishtown-analytics/dbt,analyst-collective/dbt
|
52fa6cff088e2032fc8a3a9d732bf8affb9bccae
|
config/template.py
|
config/template.py
|
DB_USER = ''
DB_HOST = ''
DB_PASSWORD = ''
DB_NAME = ''
|
DB_USER = ''
DB_HOST = ''
DB_PASSWORD = ''
DB_NAME = ''
TWILIO_NUMBERS = ['']
|
Allow for representative view display with sample configuration
|
Allow for representative view display with sample configuration
|
Python
|
mit
|
AKVorrat/ueberwachungspaket.at,AKVorrat/ueberwachungspaket.at,AKVorrat/ueberwachungspaket.at
|
068862dc72fa82ec35e7fabc6a0a99dc10f7f034
|
octavia/common/service.py
|
octavia/common/service.py
|
# Copyright 2014 Rackspace
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from oslo_log import log
from octavia.common import config
from octavia.i18n import _LI
LOG = log.getLogger(__name__)
def prepare_service(argv=None):
"""Sets global config from config file and sets up logging."""
argv = argv or []
config.init(argv[1:])
LOG.info(_LI('Starting Octavia API server'))
log.set_defaults()
config.setup_logging(cfg.CONF)
|
# Copyright 2014 Rackspace
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from oslo_log import log
from octavia.common import config
LOG = log.getLogger(__name__)
def prepare_service(argv=None):
"""Sets global config from config file and sets up logging."""
argv = argv or []
config.init(argv[1:])
log.set_defaults()
config.setup_logging(cfg.CONF)
|
Remove bad INFO log "Starting Octavia API server"
|
Remove bad INFO log "Starting Octavia API server"
This log is also display for health_manager and house_keeping service.
Api service already display "Starting API server on..." in INFO level.
Change-Id: I0a3ff91b556accdfadbad797488d17ae7a95d85b
|
Python
|
apache-2.0
|
openstack/octavia,openstack/octavia,openstack/octavia
|
41c6b1820e8b23079d9098526854c9a60859d128
|
gcloud_expenses/test_views.py
|
gcloud_expenses/test_views.py
|
import unittest
class ViewTests(unittest.TestCase):
def setUp(self):
from pyramid import testing
self.config = testing.setUp()
def tearDown(self):
from pyramid import testing
testing.tearDown()
def test_my_view(self):
from pyramid import testing
from .views import my_view
request = testing.DummyRequest()
info = my_view(request)
self.assertEqual(info['project'], 'foo')
|
import unittest
class ViewTests(unittest.TestCase):
def setUp(self):
from pyramid import testing
self.config = testing.setUp()
def tearDown(self):
from pyramid import testing
testing.tearDown()
def test_home_page(self):
from pyramid import testing
from .views import home_page
request = testing.DummyRequest()
info = home_page(request)
self.assertEqual(info, {})
|
Fix test broken in rename.
|
Fix test broken in rename.
|
Python
|
apache-2.0
|
GoogleCloudPlatform/google-cloud-python-expenses-demo,GoogleCloudPlatform/google-cloud-python-expenses-demo
|
769c83564d5f2272837c2fbea6d781110b71b8ca
|
main.py
|
main.py
|
from sys import argv, stderr
from drawer import *
from kmeans import kmeans
def read_vectors(file_name):
result = None
with open(file_name, 'r') as f:
vector_length = int(f.readline())
vectors = list(map(lambda line: tuple(map(int, line.split())), f.readlines()))
if all((len(x) == vector_length for x in vectors)):
result = vectors
return result
def main():
vectors = read_vectors(argv[1])
clusters_count = int(argv[2])
if vectors:
if len(vectors[0]) == 2:
display_source(vectors)
clusters = kmeans(vectors, clusters_count=clusters_count)
display_result(vectors, clusters)
else:
print('Invalid input', file=stderr)
if __name__ == '__main__':
main()
|
from sys import argv, stderr
from drawer import *
from kmeans import kmeans
def read_vectors(file_name):
result = None
with open(file_name, 'r') as f:
vector_length = int(f.readline())
vectors = list(map(lambda line: tuple(map(int, line.split())), f.readlines()))
if all((len(x) == vector_length for x in vectors)):
result = vectors
return result
def main():
vectors = read_vectors(argv[1])
clusters_count = int(argv[2])
if vectors:
clusters = kmeans(vectors, clusters_count=clusters_count)
if len(vectors[0]) == 2:
display_source(vectors)
display_result(vectors, clusters)
else:
print('Invalid input', file=stderr)
if __name__ == '__main__':
main()
|
Fix trying to display result in case of not 2D vectors
|
Fix trying to display result in case of not 2D vectors
|
Python
|
mit
|
vanashimko/k-means
|
03430a5b0abbd051e878274a669edf5afaa656b3
|
sc2/helpers/control_group.py
|
sc2/helpers/control_group.py
|
class ControlGroup(set):
def __init__(self, units):
super().__init__({unit.tag for unit in units})
def __hash__(self):
return hash(tuple(sorted(list(self))))
def select_units(self, units):
return units.filter(lambda unit: unit.tag in self)
def missing_unit_tags(self, units):
return {t for t in self if units.find_by_tag(t) is None}
@property
def empty(self):
return self.amount == 0
|
class ControlGroup(set):
def __init__(self, units):
super().__init__({unit.tag for unit in units})
def __hash__(self):
return hash(tuple(sorted(list(self))))
def select_units(self, units):
return units.filter(lambda unit: unit.tag in self)
def missing_unit_tags(self, units):
return {t for t in self if units.find_by_tag(t) is None}
@property
def empty(self):
return self.amount == 0
def add_unit(self, units):
self.add(unit.tag)
def add_units(self, units):
for unit in units:
self.add_unit(unit)
def remove_unit(self, units):
self.remove(unit.tag)
def remove_units(self, units):
for unit in units:
self.remove(unit.tag)
|
Add modification operations to control groups
|
Add modification operations to control groups
|
Python
|
mit
|
Dentosal/python-sc2
|
6820de9ccdb7cc7263142108881cf98aab85adb1
|
space-age/space_age.py
|
space-age/space_age.py
|
# File: space_age.py
# Purpose: Write a program that, given an age in seconds, calculates
# how old someone is in terms of a given planet's solar years.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Saturday 17 September 2016, 06:09 PM
class SpaceAge(object):
"""docstring for SpaceAge."""
def __init__(self, _seconds):
self._seconds = _seconds
def on_earth(self):
return round((self._seconds / 31557600), 2)
def on_mercury(self):
return round((self._seconds / 31557600) * 0.240846, 2)
obj = SpaceAge(1e6)
print (obj.on_earth())
print (obj.on_mercury())
|
# File: space_age.py
# Purpose: Write a program that, given an age in seconds, calculates
# how old someone is in terms of a given planet's solar years.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Saturday 17 September 2016, 06:09 PM
class SpaceAge(object):
"""docstring for SpaceAge."""
def __init__(self, _seconds):
self._seconds = _seconds
def on_earth(self):
return round((self._seconds / 31557600), 2)
def on_mercury(self):
planet = self.on_earth() * 0.2408467
return planet
def on_venus(self):
planet = self.on_earth() * 0.61519726
return planet
def on_mars(self):
planet = self.on_earth() * 1.8808158
return planet
def on_jupiter(self):
planet = self.on_earth() * 11.862615
return planet
def on_saturn(self):
planet = self.on_earth() * 29.447498
return planet
def on_uranus(self):
planet = self.on_earth() * 84.016846
return planet
def on_neptune(self):
planet = self.on_earth() * 164.79132
return planet
obj = SpaceAge(1e6)
print (obj.on_earth())
print (obj.on_mercury())
|
Add other planets age function
|
Add other planets age function
|
Python
|
mit
|
amalshehu/exercism-python
|
eb2b91d30244fd44b45ffc21b963256150b59152
|
frappe/patches/v11_0/reload_and_rename_view_log.py
|
frappe/patches/v11_0/reload_and_rename_view_log.py
|
import frappe
def execute():
if frappe.db.exists('DocType', 'View log'):
frappe.reload_doc('core', 'doctype', 'view_log', force=True)
frappe.db.sql("INSERT INTO `tabView Log` SELECT * from `tabView log`")
frappe.delete_doc('DocType', 'View log')
frappe.reload_doc('core', 'doctype', 'view_log', force=True)
else:
frappe.reload_doc('core', 'doctype', 'view_log')
|
import frappe
def execute():
if frappe.db.exists('DocType', 'View log'):
# for mac users direct renaming would not work since mysql for mac saves table name in lower case
# so while renaming `tabView log` to `tabView Log` we get "Table 'tabView Log' already exists" error
# more info https://stackoverflow.com/a/44753093/5955589 ,
# https://dev.mysql.com/doc/refman/8.0/en/server-system-variables.html#sysvar_lower_case_table_names
# here we are creating a temp table to store view log data
frappe.db.sql("CREATE TABLE `ViewLogTemp` AS SELECT * FROM `tabView log`")
# deleting old View log table
frappe.db.sql("DROP table `tabView log`")
frappe.delete_doc('DocType', 'View log')
# reloading view log doctype to create `tabView Log` table
frappe.reload_doc('core', 'doctype', 'view_log')
frappe.db.commit()
# Move the data to newly created `tabView Log` table
frappe.db.sql("INSERT INTO `tabView Log` SELECT * FROM `ViewLogTemp`")
# Delete temporary table
frappe.db.sql("DROP table `ViewLogTemp`")
else:
frappe.reload_doc('core', 'doctype', 'view_log')
|
Fix rename view log patch for mac users
|
Fix rename view log patch for mac users
for mac users direct renaming would not work
since mysql for mac saves table name in lower case,
so while renaming `tabView log` to `tabView Log` we get
"Table 'tabView Log' already exists" error
# more info https://stackoverflow.com/a/44753093/5955589
https://dev.mysql.com/doc/refman/8.0/en/server-system-variables.html#sysvar_lower_case_table_names
|
Python
|
mit
|
mhbu50/frappe,yashodhank/frappe,vjFaLk/frappe,adityahase/frappe,mhbu50/frappe,almeidapaulopt/frappe,saurabh6790/frappe,adityahase/frappe,vjFaLk/frappe,yashodhank/frappe,saurabh6790/frappe,frappe/frappe,frappe/frappe,vjFaLk/frappe,yashodhank/frappe,almeidapaulopt/frappe,almeidapaulopt/frappe,StrellaGroup/frappe,mhbu50/frappe,StrellaGroup/frappe,yashodhank/frappe,vjFaLk/frappe,mhbu50/frappe,adityahase/frappe,StrellaGroup/frappe,almeidapaulopt/frappe,saurabh6790/frappe,frappe/frappe,saurabh6790/frappe,adityahase/frappe
|
53c39934e19fdad7926a8ad7833cd1737b47cf58
|
utilities/errors.py
|
utilities/errors.py
|
import os
import simulators
import numpy as np
import json
"""Calculate Errors on the Spectrum.
For a first go using an fixed SNR of 200 for all observations.
"""
def get_snrinfo(star, obs_num, chip):
"""Load SNR info from json file."""
snr_file = os.path.join(simulators.paths["spectra"], "detector_snrs.json")
with open(snr_file, "r") as f:
snr_data = json.load(f)
try:
return snr_data[str(star)][str(obs_num)][str(chip)]
except KeyError as e:
print("No snr data present for {0}-{1}_{2}".format(star, obs_num, chip))
raise e
def spectrum_error(star, obs_num, chip, error_off=False):
"""Return the spectrum error.
errors = None will perform a normal chi**2 statistic.
"""
if error_off:
errors = None
else:
snr = get_snrinfo(star, obs_num, chip)
if len(snr) == 1:
errors = 1 / np.float(snr[0])
else:
raise NotImplementedError("Haven't checked if an error array can be handled yet.")
return errors
|
import os
import simulators
import numpy as np
import json
import warnings
"""Calculate Errors on the Spectrum.
For a first go using an fixed SNR of 200 for all observations.
"""
def get_snrinfo(star, obs_num, chip):
"""Load SNR info from json file."""
snr_file = os.path.join(simulators.paths["spectra"], "detector_snrs.json")
with open(snr_file, "r") as f:
snr_data = json.load(f)
try:
return snr_data[str(star)][str(obs_num)][str(chip)]
except KeyError as e:
warnings.warn("No snr data present for {0}-{1}_{2}. "
"Setting error to None instead".format(star, obs_num, chip))
return None
def spectrum_error(star, obs_num, chip, error_off=False):
"""Return the spectrum error.
errors = None will perform a normal chi**2 statistic.
"""
if error_off:
errors = None
else:
snr = get_snrinfo(star, obs_num, chip)
if snr is None:
errors = None
elif len(snr) == 1:
errors = 1 / np.float(snr[0])
else:
raise NotImplementedError("Haven't checked if an error array can be handled yet.")
return errors
|
Handle no snr information in snr file. (for fake simualtions mainly)
|
Handle no snr information in snr file. (for fake simualtions mainly)
|
Python
|
mit
|
jason-neal/companion_simulations,jason-neal/companion_simulations
|
f1cabc889dd93e26295501097ac9cbf90890a1cd
|
solvent/config.py
|
solvent/config.py
|
import yaml
import os
LOCAL_OSMOSIS = 'localhost:1010'
OFFICIAL_OSMOSIS = None
OFFICIAL_BUILD = False
WITH_OFFICIAL_OBJECT_STORE = True
CLEAN = False
def load(filename):
with open(filename) as f:
data = yaml.load(f.read())
if data is None:
raise Exception("Configuration file must not be empty")
globals().update(data)
if 'SOLVENT_CONFIG' in os.environ:
data = yaml.load(os.environ['SOLVENT_CONFIG'])
globals().update(data)
if 'SOLVENT_CLEAN' in os.environ:
global CLEAN
CLEAN = True
if WITH_OFFICIAL_OBJECT_STORE and OFFICIAL_OSMOSIS is None:
raise Exception("Configuration file must contain 'OFFICIAL_OSMOSIS' field")
def objectStoresOsmosisParameter():
if WITH_OFFICIAL_OBJECT_STORE:
return LOCAL_OSMOSIS + "+" + OFFICIAL_OSMOSIS
else:
return LOCAL_OSMOSIS
|
import yaml
import os
LOCAL_OSMOSIS_IF_ROOT = 'localhost:1010'
LOCAL_OSMOSIS_IF_NOT_ROOT = 'localhost:1010'
LOCAL_OSMOSIS = None
OFFICIAL_OSMOSIS = None
OFFICIAL_BUILD = False
WITH_OFFICIAL_OBJECT_STORE = True
CLEAN = False
def load(filename):
with open(filename) as f:
data = yaml.load(f.read())
if data is None:
raise Exception("Configuration file must not be empty")
globals().update(data)
if 'SOLVENT_CONFIG' in os.environ:
data = yaml.load(os.environ['SOLVENT_CONFIG'])
globals().update(data)
if 'SOLVENT_CLEAN' in os.environ:
global CLEAN
CLEAN = True
if WITH_OFFICIAL_OBJECT_STORE and OFFICIAL_OSMOSIS is None:
raise Exception("Configuration file must contain 'OFFICIAL_OSMOSIS' field")
global LOCAL_OSMOSIS
if LOCAL_OSMOSIS is None:
if os.getuid() == 0:
LOCAL_OSMOSIS = LOCAL_OSMOSIS_IF_ROOT
else:
LOCAL_OSMOSIS = LOCAL_OSMOSIS_IF_NOT_ROOT
def objectStoresOsmosisParameter():
if WITH_OFFICIAL_OBJECT_STORE:
return LOCAL_OSMOSIS + "+" + OFFICIAL_OSMOSIS
else:
return LOCAL_OSMOSIS
|
Select local osmosis depends if user is root, to avoid permission denied on /var/lib/osmosis
|
Bugfix: Select local osmosis depends if user is root, to avoid permission denied on /var/lib/osmosis
|
Python
|
apache-2.0
|
Stratoscale/solvent,Stratoscale/solvent
|
967240f95edb300d731f24cfb259a1fe4f3bdae5
|
webapp_health_monitor/management/commands/verify.py
|
webapp_health_monitor/management/commands/verify.py
|
import importlib
import sys
from django.apps import apps
from django.core.management.base import BaseCommand
from webapp_health_monitor.verification_suit import VerificationSuit
class Command(BaseCommand):
SUBMODULE_NAME = 'verificators'
def handle(self, *args, **options):
submodules = self._get_verificator_modules()
for submodule in submodules:
try:
importlib.import_module(submodule)
except ImportError as e:
if str(e) != "No module named '{}'".format(submodule):
raise e
result = VerificationSuit().run()
self.stdout.write('{}\n'.format(result.report()))
sys.exit(result.has_failed())
def _get_verificator_modules(self):
for app in apps.get_app_configs():
yield '.'.join([app.module.__name__, self.SUBMODULE_NAME])
|
import importlib
import sys
from django.apps import apps
from django.core.management.base import BaseCommand
from webapp_health_monitor.verification_suit import VerificationSuit
class Command(BaseCommand):
SUBMODULE_NAME = 'verificators'
def handle(self, *args, **options):
submodules = self._get_verificator_modules()
for submodule in submodules:
try:
importlib.import_module(submodule)
except ImportError as e:
if not self._import_error_concerns_verificator(submodule, e):
raise e
result = VerificationSuit().run()
self.stdout.write('{}\n'.format(result.report()))
sys.exit(result.has_failed())
def _get_verificator_modules(self):
for app in apps.get_app_configs():
yield '.'.join([app.module.__name__, self.SUBMODULE_NAME])
def _import_error_concerns_verificator(self, submodule, error):
if sys.version_info >= (3, 0):
return str(error) == "No module named '{}'".format(submodule)
else:
return error.message == "No module named {}".format(
self.SUBMODULE_NAME)
|
Fix python2 django module importerror.
|
Fix python2 django module importerror.
|
Python
|
mit
|
pozytywnie/webapp-health-monitor
|
80529d5032b6728adcaad426310c30b5e6366ad4
|
solution.py
|
solution.py
|
class Kiosk():
def __init__(self, visit_cost, location):
self.visit_cost = visit_cost
self.location = location
print 'initializing Kiosk'
#patient shold be Person
def visit(self, patient):
if not patient.location == self.location:
print 'patient not in correct location'
return False
if not patient.money>self.visit_cost:
print 'patient cannot afford treatment'
#patient should be Person
def visit(self, patient):
patient.money -= visit_cost
#improve patient.diabetes
#improve patient.cardio
return True
#Patient should be from class Person
def filter(self, patient):
if not patient.location == self.location:
print "patient not at proper location"
return False
if not patient.money>self.visit_cost:
print "patient cannot afford treatment"
return False
visit(self,patient)
|
class Kiosk():
def __init__(self, location, visit_cost, diabetes_threshold,
cardio_threshold):
self.location = location
self.visit_cost = visit_cost
self.diabetes_threshold = diabetes_threshold
self.cardio_threshold = cardio_threshold
#Initial cost to create kiosk: $5000. We are measuring in rupees
self.money = -309900
print 'initializing Kiosk'
#patient shold be Person
def visit(self, patient):
if not patient.location == self.location:
print 'patient not in correct location'
return False
if not patient.money>self.visit_cost:
print 'patient cannot afford treatment'
patient.money -= visit_cost
kiosk.money += visit_cost
#If we diagnose diabetes
if patient.diabetes<diabetes_threshold:
#For now, we ignore the details and just improve the patient's
#health.
patient.diabetes = diabetes_threshold
#If we diagnose cardiovascular problems
if patient.cardio<cardio_threshold:
#For now, we ignore the details and just improve the patient's
#health.
patient.cardio = cardio_threshold
#The visit was successful
return True
|
Clean up and finish Kiosk class
|
Clean up and finish Kiosk class
There was some redundancy because I merged it poorly
|
Python
|
bsd-3-clause
|
rkawauchi/IHK,rkawauchi/IHK
|
be8344c2f796ecab60669630f4729c4ffa41c83b
|
web/impact/impact/v1/views/utils.py
|
web/impact/impact/v1/views/utils.py
|
def merge_data_by_id(data):
result = {}
for datum in data:
id = datum["id"]
item = result.get(id, {})
item.update(datum)
result[id] = item
return result.values()
def map_data(klass, query, order, data_keys, output_keys):
result = klass.objects.filter(query).order_by(order)
data = result.values_list(*data_keys)
return [dict(zip(output_keys, values))
for values in data]
|
def coalesce_dictionaries(data, merge_field="id"):
"Takes a sequence of dictionaries, merges those that share the
same merge_field, and returns a list of resulting dictionaries"
result = {}
for datum in data:
merge_id = datum[merge_field]
item = result.get(merge_id, {})
item.update(datum)
result[merge_id] = item
return result.values()
def map_data(klass, query, order, data_keys, output_keys):
result = klass.objects.filter(query).order_by(order)
data = result.values_list(*data_keys)
return [dict(zip(output_keys, values))
for values in data]
|
Rename merge_data_by_id, add doc-string, get rid of id as a local
|
[AC-4835] Rename merge_data_by_id, add doc-string, get rid of id as a local
|
Python
|
mit
|
masschallenge/impact-api,masschallenge/impact-api,masschallenge/impact-api,masschallenge/impact-api
|
028903036ac4bd3bf4a7b91ceda43a6c450f7e20
|
pipeline_notifier/main.py
|
pipeline_notifier/main.py
|
import os
import cherrypy
from flask import Flask
app = Flask(__name__)
@app.route('/')
def hello():
return 'Hello World!'
def run_server():
cherrypy.tree.graft(app, '/')
cherrypy.config.update({
'engine.autoreload_on': True,
'log.screen': True,
'server.socket_port': 8080,
'server.socket_host': '0.0.0.0'
})
cherrypy.engine.start()
cherrypy.engine.block()
if __name__ == '__main__':
run_server()
|
import os
import cherrypy
from flask import Flask
app = Flask(__name__)
@app.route('/')
def hello():
return 'Hello World!'
def run_server():
cherrypy.tree.graft(app, '/')
cherrypy.config.update({
'engine.autoreload_on': True,
'log.screen': True,
'server.socket_port': int(os.environ.get('PORT', '8080')),
'server.socket_host': '0.0.0.0'
})
cherrypy.engine.start()
cherrypy.engine.block()
if __name__ == '__main__':
run_server()
|
Use port from env if available
|
Use port from env if available
|
Python
|
mit
|
pimterry/pipeline-notifier
|
269474608221e35907896f5f618e69d6e5136388
|
facepy/exceptions.py
|
facepy/exceptions.py
|
class FacepyError(Exception):
"""Base class for exceptions raised by Facepy."""
def __init__(self, message):
self.message = message
def _get_message(self):
return self._message
def _set_message(self, message):
self._message = message
message = property(_get_message, _set_message)
|
class FacepyError(Exception):
"""Base class for exceptions raised by Facepy."""
def __init__(self, message):
self.message = message
|
Remove uneccessary getter and setter
|
Remove uneccessary getter and setter
|
Python
|
mit
|
merwok-forks/facepy,jwjohns/facepy,jgorset/facepy,Spockuto/facepy,liorshahverdi/facepy,buzzfeed/facepy,jwjohns/facepy
|
6d8b99b5e4dab49c5a2e90b07f02072c116a7367
|
robots/models.py
|
robots/models.py
|
from django.db import models
from django.contrib.sites.models import Site
from django.utils.translation import ugettext_lazy as _
class File(models.Model):
site = models.OneToOneField(Site, verbose_name=_(u'site'))
content = models.TextField(_(u'file content'))
objects = models.Manager()
class Meta:
verbose_name = _(u'robots.txt file')
verbose_name_plural = _(u'robots.txt files')
def __unicode__(self):
return u'/'.join([self.site.domain, u'robots.txt'])
|
from django.db import models
from django.contrib.sites.models import Site
from django.utils.translation import ugettext_lazy as _
class File(models.Model):
site = models.OneToOneField(Site, verbose_name=_(u'site'))
content = models.TextField(_(u'file content'))
class Meta:
verbose_name = _(u'robots.txt file')
verbose_name_plural = _(u'robots.txt files')
def __unicode__(self):
return u'/'.join([self.site.domain, u'robots.txt'])
|
Remove unnecessary manager declaration from File model
|
Remove unnecessary manager declaration from File model
|
Python
|
isc
|
trilan/lemon-robots,trilan/lemon-robots
|
aae85883bb99ac15f6922506fa64c4492101b602
|
utils/lit/tests/shared-output.py
|
utils/lit/tests/shared-output.py
|
# RUN: rm -rf %t && mkdir -p %t
# RUN: echo 'lit_config.load_config(config, "%{inputs}/shared-output/lit.cfg")' > %t/lit.site.cfg
# RUN: %{lit} %t
# RUN: FileCheck %s < %t/Output/Shared/SHARED.tmp
# RUN: FileCheck -check-prefix=NEGATIVE %s < %t/Output/Shared/SHARED.tmp
# RUN: FileCheck -check-prefix=OTHER %s < %t/Output/Shared/OTHER.tmp
# CHECK-DAG: primary
# CHECK-DAG: secondary
# CHECK-DAG: sub
# NEGATIVE-NOT: other
# OTHER: other
|
# RUN: rm -rf %t && mkdir -p %t
# RUN: echo 'lit_config.load_config(config, os.path.join("%{inputs}", "shared-output", "lit.cfg"))' > %t/lit.site.cfg
# RUN: %{lit} %t
# RUN: FileCheck %s < %t/Output/Shared/SHARED.tmp
# RUN: FileCheck -check-prefix=NEGATIVE %s < %t/Output/Shared/SHARED.tmp
# RUN: FileCheck -check-prefix=OTHER %s < %t/Output/Shared/OTHER.tmp
# CHECK-DAG: primary
# CHECK-DAG: secondary
# CHECK-DAG: sub
# NEGATIVE-NOT: other
# OTHER: other
|
Fix new test for systems that don't use / as os.path.sep
|
lit.py: Fix new test for systems that don't use / as os.path.sep
git-svn-id: 0ff597fd157e6f4fc38580e8d64ab130330d2411@315773 91177308-0d34-0410-b5e6-96231b3b80d8
|
Python
|
apache-2.0
|
apple/swift-llvm,llvm-mirror/llvm,apple/swift-llvm,apple/swift-llvm,GPUOpen-Drivers/llvm,llvm-mirror/llvm,llvm-mirror/llvm,GPUOpen-Drivers/llvm,apple/swift-llvm,llvm-mirror/llvm,llvm-mirror/llvm,GPUOpen-Drivers/llvm,GPUOpen-Drivers/llvm,apple/swift-llvm,llvm-mirror/llvm,apple/swift-llvm,apple/swift-llvm,llvm-mirror/llvm,GPUOpen-Drivers/llvm,apple/swift-llvm,GPUOpen-Drivers/llvm,llvm-mirror/llvm,llvm-mirror/llvm,GPUOpen-Drivers/llvm,GPUOpen-Drivers/llvm
|
5867a09fb43f8c4480d7aef89a200e952406a648
|
dbaas/integrations/credentials/admin/__init__.py
|
dbaas/integrations/credentials/admin/__init__.py
|
# -*- coding:utf-8 -*-
from django.contrib import admin
from .. import models
admin.site.register(models.IntegrationType, )
admin.site.register(models.IntegrationCredential, )
|
# -*- coding:utf-8 -*-
from django.contrib import admin
from .integration_credential import IntegrationCredentialAdmin
from .integration_type import IntegrationTypeAdmin
from .. import models
admin.site.register(models.IntegrationType, IntegrationTypeAdmin)
admin.site.register(models.IntegrationCredential, IntegrationCredentialAdmin)
|
Enable integration credential and integration type admin
|
Enable integration credential and integration type admin
|
Python
|
bsd-3-clause
|
globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service
|
c08c437b22982667e8ed413739147caec6c5d1ca
|
api/preprints/urls.py
|
api/preprints/urls.py
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.PreprintList.as_view(), name=views.PreprintList.view_name),
url(r'^(?P<node_id>\w+)/$', views.PreprintDetail.as_view(), name=views.PreprintDetail.view_name),
url(r'^(?P<node_id>\w+)/contributors/$', views.PreprintContributorsList.as_view(), name=views.PreprintContributorsList.view_name),
]
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.PreprintList.as_view(), name=views.PreprintList.view_name),
url(r'^(?P<node_id>\w+)/$', views.PreprintDetail.as_view(), name=views.PreprintDetail.view_name),
url(r'^(?P<node_id>\w+)/contributors/$', views.PreprintContributorsList.as_view(), name=views.PreprintContributorsList.view_name),
url(r'^(?P<node_id>\w+)/relationships/preprint_provider/$', views.PreprintToPreprintProviderRelationship.as_view(), name=views.PreprintToPreprintProviderRelationship.view_name),
]
|
Add URL route for updating provider relationship
|
Add URL route for updating provider relationship
|
Python
|
apache-2.0
|
mluo613/osf.io,rdhyee/osf.io,samchrisinger/osf.io,leb2dg/osf.io,cslzchen/osf.io,chrisseto/osf.io,leb2dg/osf.io,binoculars/osf.io,mluo613/osf.io,Nesiehr/osf.io,Johnetordoff/osf.io,laurenrevere/osf.io,emetsger/osf.io,monikagrabowska/osf.io,rdhyee/osf.io,CenterForOpenScience/osf.io,binoculars/osf.io,icereval/osf.io,binoculars/osf.io,cslzchen/osf.io,caneruguz/osf.io,samchrisinger/osf.io,baylee-d/osf.io,TomBaxter/osf.io,crcresearch/osf.io,icereval/osf.io,laurenrevere/osf.io,CenterForOpenScience/osf.io,chennan47/osf.io,samchrisinger/osf.io,caseyrollins/osf.io,cslzchen/osf.io,mfraezz/osf.io,mattclark/osf.io,cwisecarver/osf.io,chennan47/osf.io,aaxelb/osf.io,erinspace/osf.io,emetsger/osf.io,rdhyee/osf.io,monikagrabowska/osf.io,alexschiller/osf.io,HalcyonChimera/osf.io,leb2dg/osf.io,brianjgeiger/osf.io,caneruguz/osf.io,saradbowman/osf.io,mfraezz/osf.io,erinspace/osf.io,sloria/osf.io,HalcyonChimera/osf.io,Johnetordoff/osf.io,chennan47/osf.io,Johnetordoff/osf.io,aaxelb/osf.io,hmoco/osf.io,pattisdr/osf.io,caneruguz/osf.io,HalcyonChimera/osf.io,saradbowman/osf.io,brianjgeiger/osf.io,baylee-d/osf.io,acshi/osf.io,sloria/osf.io,mluo613/osf.io,Nesiehr/osf.io,alexschiller/osf.io,aaxelb/osf.io,TomBaxter/osf.io,mluo613/osf.io,brianjgeiger/osf.io,Nesiehr/osf.io,pattisdr/osf.io,leb2dg/osf.io,adlius/osf.io,caseyrollins/osf.io,chrisseto/osf.io,mfraezz/osf.io,caseyrollins/osf.io,crcresearch/osf.io,alexschiller/osf.io,felliott/osf.io,alexschiller/osf.io,alexschiller/osf.io,monikagrabowska/osf.io,adlius/osf.io,Johnetordoff/osf.io,monikagrabowska/osf.io,monikagrabowska/osf.io,CenterForOpenScience/osf.io,acshi/osf.io,rdhyee/osf.io,hmoco/osf.io,baylee-d/osf.io,cwisecarver/osf.io,mattclark/osf.io,cslzchen/osf.io,emetsger/osf.io,CenterForOpenScience/osf.io,brianjgeiger/osf.io,cwisecarver/osf.io,samchrisinger/osf.io,felliott/osf.io,caneruguz/osf.io,Nesiehr/osf.io,acshi/osf.io,mattclark/osf.io,felliott/osf.io,TomBaxter/osf.io,crcresearch/osf.io,acshi/osf.io,hmoco/osf.io,HalcyonChimera/osf.io,adlius/osf.io,mfraezz/osf.io,acshi/osf.io,emetsger/osf.io,sloria/osf.io,laurenrevere/osf.io,felliott/osf.io,chrisseto/osf.io,chrisseto/osf.io,aaxelb/osf.io,adlius/osf.io,icereval/osf.io,erinspace/osf.io,hmoco/osf.io,cwisecarver/osf.io,mluo613/osf.io,pattisdr/osf.io
|
597f586d2cf42f31a0179efc7ac8441f33b3d637
|
lib/mysql.py
|
lib/mysql.py
|
import pymysql
class MySQL():
def __init__(self, host, user, password, port):
self._host = host
self._user = user
self._password = password
self._conn = pymysql.connect(host=host, port=port,
user=user, passwd=password)
self._cursor = self._conn.cursor()
def execute(self, query):
try:
self._cursor.execute(query=query)
except (AttributeError, pymysql.OperationalError):
self.__reconnect__()
self._cursor.execute(query=query)
def fetchone(self):
return self._cursor.fetchone()
def commit(self):
return self._conn.commit()
def __reconnect__(self):
self._conn = pymysql.connect(host=self._host, port=self._port, user=self._user, passwd=self._password)
self._cursor = self._conn.cursor()
|
import pymysql
class MySQL():
def __init__(self, host, user, password, port):
self._host = host
self._user = user
self._password = password
self._port = port
self._conn = pymysql.connect(host=host, port=port,
user=user, passwd=password)
self._cursor = self._conn.cursor()
def execute(self, query):
try:
self._cursor.execute(query=query)
except (AttributeError, pymysql.OperationalError):
self.__reconnect__()
self._cursor.execute(query=query)
def fetchone(self):
return self._cursor.fetchone()
def commit(self):
return self._conn.commit()
def __reconnect__(self):
self._conn = pymysql.connect(host=self._host, port=self._port, user=self._user, passwd=self._password)
self._cursor = self._conn.cursor()
|
Define the port variable for reconnection
|
Define the port variable for reconnection
|
Python
|
mit
|
ImShady/Tubey
|
999752ec378bbf6d3017f7afc964090c6871b7d4
|
app/user_administration/tests.py
|
app/user_administration/tests.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.test import TestCase
class LoginRequiredTest(TestCase):
def test_login_required(self):
response = self.client.get('/')
self.assertEqual(
response.status_code,
302,
msg="Login Required Validation Failed, Received code {0} instead of 302".format(response.status_code)
)
self.assertEqual(
response.url,
'/login?next=/',
msg="Login Required Redirection Failed, Received url {0} instead of /login?next=/".format(response.url)
)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.test import TestCase
from django.contrib.auth.models import User
from .models import Clients
class LoginRequiredTest(TestCase):
def test_login_required(self):
response = self.client.get('/')
self.assertEqual(
response.status_code,
302,
msg="Login Required Validation Failed, Received code {0} instead of 302".format(response.status_code)
)
self.assertEqual(
response.url,
'/login?next=/',
msg="Login Required Redirection Failed, Received url {0} instead of /login?next=/".format(response.url)
)
class LoginSetup(TestCase):
def setUp(self):
self.user = User.objects.create(username='testUser', is_active=True, is_superuser=True)
self.user.set_password('RHChallenge')
self.user.save()
self.client.force_login(self.user)
class ClientsViewTest(LoginSetup):
def setUp(self):
super(ClientsViewTest, self).setUp()
self.custom_client = Clients.objects.create(first_name='RH', last_name='CH', iban='IBAN')
def test_client_create(self):
data = {'first_name': 'Rexhep', 'last_name': 'Berlajolli', 'iban': 'XK051506001004471930'}
self.client.post('/add', data=data)
clients_count = Clients.objects.count()
self.assertEqual(
clients_count,
2,
msg="Create client failed, received {0} clients instead of 2".format(clients_count)
)
def test_client_create_validation(self):
data = {'first_name': 'Invalid', 'last_name': 'Data', 'iban': 'INVALID_IBAN'}
self.client.post('/add', data=data)
clients_count = Clients.objects.count()
self.assertEqual(
clients_count,
1,
msg="Insertion of invalid data succeeded, received {0} clients instead of 1".format(clients_count)
)
def test_get_clients(self):
response = self.client.get('/')
clients = response.context_data['clients']
self.assertEqual(
list(clients),
list(Clients.objects.all()),
msg="Get clients failed, received clients {0} instead of {1}".format(clients, [self.custom_client])
)
|
Add TestCase for ClientListView and ClientCreateView
|
Add TestCase for ClientListView and ClientCreateView
|
Python
|
mit
|
rexhepberlajolli/RHChallenge,rexhepberlajolli/RHChallenge
|
8b07dde78e753f6dce663481a68856024ed2fc49
|
plutokore/__init__.py
|
plutokore/__init__.py
|
from .environments.makino import MakinoProfile
from .environments.king import KingProfile
from .jet import AstroJet
from . import luminosity
from . import plotting
from . import simulations
from . import helpers
from . import io
__all__ = [
'environments',
'luminosity',
'plotting',
'simulations',
'jet',
'helpers',
'io',
]
|
from .environments.makino import MakinoProfile
from .environments.king import KingProfile
from .jet import AstroJet
from . import luminosity
from . import plotting
from . import simulations
from . import helpers
from . import io
from . import configuration
__all__ = [
'environments',
'luminosity',
'plotting',
'simulations',
'jet',
'helpers',
'io',
'configuration',
]
|
Add configuration module to package exports
|
Add configuration module to package exports
|
Python
|
mit
|
opcon/plutokore,opcon/plutokore
|
fcd2328549dcec2986e3b972f1a8bcfb0cf2e21b
|
rst2pdf/utils.py
|
rst2pdf/utils.py
|
# -*- coding: utf-8 -*-
# See LICENSE.txt for licensing terms
#$HeadURL$
#$LastChangedDate$
#$LastChangedRevision$
import shlex
from reportlab.platypus import Spacer
from flowables import *
def parseRaw(data):
"""Parse and process a simple DSL to handle creation of flowables.
Supported (can add others on request):
* PageBreak
* Spacer width, height
"""
elements = []
lines = data.splitlines()
for line in lines:
lexer = shlex.shlex(line)
lexer.whitespace += ','
tokens = list(lexer)
command = tokens[0]
if command == 'PageBreak':
if len(tokens) == 1:
elements.append(MyPageBreak())
else:
elements.append(MyPageBreak(tokens[1]))
if command == 'Spacer':
elements.append(Spacer(int(tokens[1]), int(tokens[2])))
if command == 'Transition':
elements.append(Transition(*tokens[1:]))
return elements
# Looks like this is not used anywhere now:
# def depth(node):
# if node.parent == None:
# return 0
# else:
# return 1 + depth(node.parent)
|
# -*- coding: utf-8 -*-
# See LICENSE.txt for licensing terms
#$HeadURL$
#$LastChangedDate$
#$LastChangedRevision$
import shlex
from reportlab.platypus import Spacer
from flowables import *
from styles import adjustUnits
def parseRaw(data):
"""Parse and process a simple DSL to handle creation of flowables.
Supported (can add others on request):
* PageBreak
* Spacer width, height
"""
elements = []
lines = data.splitlines()
for line in lines:
lexer = shlex.shlex(line)
lexer.whitespace += ','
tokens = list(lexer)
command = tokens[0]
if command == 'PageBreak':
if len(tokens) == 1:
elements.append(MyPageBreak())
else:
elements.append(MyPageBreak(tokens[1]))
if command == 'Spacer':
elements.append(Spacer(adjustUnits(tokens[1]),
adjustUnits(tokens[2])))
if command == 'Transition':
elements.append(Transition(*tokens[1:]))
return elements
# Looks like this is not used anywhere now:
# def depth(node):
# if node.parent == None:
# return 0
# else:
# return 1 + depth(node.parent)
|
Add unit support for spacers
|
Add unit support for spacers
|
Python
|
mit
|
pombreda/rst2pdf,liuyi1112/rst2pdf,pombreda/rst2pdf,liuyi1112/rst2pdf,rst2pdf/rst2pdf,rst2pdf/rst2pdf
|
6b0167514bb41f877945b408638fab72873f2da8
|
postgres_copy/__init__.py
|
postgres_copy/__init__.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.db import models
from django.db import connection
from .copy_from import CopyMapping
from .copy_to import SQLCopyToCompiler, CopyToQuery
__version__ = '2.0.0'
class CopyQuerySet(models.QuerySet):
"""
Subclass of QuerySet that adds from_csv and to_csv methods.
"""
def from_csv(self, csv_path, mapping, **kwargs):
"""
Copy CSV file from the provided path to the current model using the provided mapping.
"""
mapping = CopyMapping(self.model, csv_path, mapping, **kwargs)
mapping.save(silent=True)
def to_csv(self, csv_path, *fields):
"""
Copy current QuerySet to CSV at provided path.
"""
query = self.query.clone(CopyToQuery)
query.copy_to_fields = fields
compiler = query.get_compiler(self.db, connection=connection)
compiler.execute_sql(csv_path)
CopyManager = models.Manager.from_queryset(CopyQuerySet)
__all__ = (
'CopyMapping',
'SQLCopyToCompiler',
'CopyToQuery',
'CopyManager',
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.db import models
from django.db import connection
from .copy_from import CopyMapping
from .copy_to import SQLCopyToCompiler, CopyToQuery
__version__ = '2.0.0'
class CopyQuerySet(models.QuerySet):
"""
Subclass of QuerySet that adds from_csv and to_csv methods.
"""
def from_csv(self, csv_path, mapping, **kwargs):
"""
Copy CSV file from the provided path to the current model using the provided mapping.
"""
mapping = CopyMapping(self.model, csv_path, mapping, **kwargs)
mapping.save(silent=True)
def to_csv(self, csv_path, *fields):
"""
Copy current QuerySet to CSV at provided path.
"""
query = self.query.clone(CopyToQuery)
query.copy_to_fields = fields
compiler = query.get_compiler(self.db, connection=connection)
compiler.execute_sql(csv_path)
CopyManager = models.Manager.from_queryset(CopyQuerySet)
__all__ = (
'CopyManager',
'CopyMapping',
'CopyToQuery',
'CopyToQuerySet',
'SQLCopyToCompiler',
)
|
Add CopyToQuerySet to available imports
|
Add CopyToQuerySet to available imports
|
Python
|
mit
|
california-civic-data-coalition/django-postgres-copy
|
639032215f7a51ca146810e8261448f4d0a318aa
|
downstream_node/models.py
|
downstream_node/models.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from downstream_node.startup import db
class Files(db.Model):
__tablename__ = 'files'
id = db.Column(db.Integer(), primary_key=True, autoincrement=True)
filepath = db.Column('filepath', db.String())
class Challenges(db.Model):
__tablename__ = 'challenges'
id = db.Column(db.Integer(), primary_key=True, autoincrement=True)
filepath = db.Column(db.ForeignKey('files.filepath'))
block = db.Column('block', db.String())
seed = db.Column('seed', db.String())
response = db.Column('response', db.String(), nullable=True)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from downstream_node.startup import db
class Files(db.Model):
__tablename__ = 'files'
id = db.Column(db.Integer(), primary_key=True, autoincrement=True)
filepath = db.Column('filepath', db.String())
class Challenges(db.Model):
__tablename__ = 'challenges'
id = db.Column(db.Integer(), primary_key=True, autoincrement=True)
filepath = db.Column(db.ForeignKey('files.filepath'))
root_seed = db.Column(db.String())
block = db.Column(db.String())
seed = db.Column(db.String())
response = db.Column(db.String(), nullable=True)
|
Add root seed to model
|
Add root seed to model
|
Python
|
mit
|
Storj/downstream-node,Storj/downstream-node
|
c11fd9f792afb71e01224f149121bc13a6a9bed8
|
scripts/utils.py
|
scripts/utils.py
|
#!/usr/bin/env python3
# Touhou Community Reliant Automatic Patcher
# Scripts
#
# ----
#
"""Utility functions shared among all the scripts."""
import json
import os
json_dump_params = {
'ensure_ascii': False,
'indent': '\t',
'separators': (',', ': '),
'sort_keys': True
}
# Default parameters for JSON input and output
def json_load(fn):
with open(fn, 'r', encoding='utf-8') as file:
return json.load(file)
def json_store(fn, obj, dirs=['']):
"""Saves the JSON object [obj] to [fn], creating all necessary
directories in the process. If [dirs] is given, the function is
executed for every root directory in the array."""
for i in dirs:
full_fn = os.path.join(i, fn)
os.makedirs(os.path.dirname(full_fn), exist_ok=True)
with open(full_fn, 'w', encoding='utf-8') as file:
json.dump(obj, file, **json_dump_params)
file.write('\n')
|
#!/usr/bin/env python3
# Touhou Community Reliant Automatic Patcher
# Scripts
#
# ----
#
"""Utility functions shared among all the scripts."""
from collections import OrderedDict
import json
import os
json_load_params = {
'object_pairs_hook': OrderedDict
}
json_dump_params = {
'ensure_ascii': False,
'indent': '\t',
'separators': (',', ': '),
'sort_keys': True
}
# Default parameters for JSON input and output
def json_load(fn):
with open(fn, 'r', encoding='utf-8') as file:
return json.load(file, **json_load_params)
def json_store(fn, obj, dirs=['']):
"""Saves the JSON object [obj] to [fn], creating all necessary
directories in the process. If [dirs] is given, the function is
executed for every root directory in the array."""
for i in dirs:
full_fn = os.path.join(i, fn)
os.makedirs(os.path.dirname(full_fn), exist_ok=True)
with open(full_fn, 'w', encoding='utf-8') as file:
json.dump(obj, file, **json_dump_params)
file.write('\n')
|
Use the OrderedDict class for JSON objects.
|
scripts: Use the OrderedDict class for JSON objects.
|
Python
|
unlicense
|
thpatch/thcrap,thpatch/thcrap,VBChunguk/thcrap,thpatch/thcrap,VBChunguk/thcrap,thpatch/thcrap,thpatch/thcrap,VBChunguk/thcrap
|
0e22a642526612ff9d19d1b421a1aacea4109f15
|
pylearn2/datasets/hdf5.py
|
pylearn2/datasets/hdf5.py
|
"""Objects for datasets serialized in HDF5 format (.h5)."""
import h5py
from pylearn2.datasets.dense_design_matrix import DenseDesignMatrix
class HDF5Dataset(DenseDesignMatrix):
"""Dense dataset loaded from an HDF5 file."""
def __init__(self, filename, key):
with h5py.File(filename) as f:
data = f[key][:]
if data.ndim == 2:
super(HDF5Dataset, self).__init__(X=data)
else:
super(HDF5Dataset, self).__init__(topo_view=data)
|
"""Objects for datasets serialized in HDF5 format (.h5)."""
import h5py
from pylearn2.datasets.dense_design_matrix import DenseDesignMatrix
class HDF5Dataset(DenseDesignMatrix):
"""Dense dataset loaded from an HDF5 file."""
def __init__(self, filename, X=None, topo_view=None, y=None, **kwargs):
"""
Loads data and labels from HDF5 file.
Parameters
----------
filename: str
HDF5 file name.
X: str
Key into HDF5 file for dataset design matrix.
topo_view: str
Key into HDF5 file for topological view of dataset.
y: str
Key into HDF5 file for dataset targets.
"""
with h5py.File(filename) as f:
if X is not None:
X = f[X][:]
if topo_view is not None:
topo_view = f[topo_view][:]
if y is not None:
y = f[y][:]
super(HDF5Dataset, self).__init__(X, topo_view, y, **kwargs)
|
Support for targets in HDF5 datasets
|
Support for targets in HDF5 datasets
|
Python
|
bsd-3-clause
|
alexjc/pylearn2,pombredanne/pylearn2,kose-y/pylearn2,TNick/pylearn2,lancezlin/pylearn2,theoryno3/pylearn2,aalmah/pylearn2,se4u/pylearn2,woozzu/pylearn2,daemonmaker/pylearn2,daemonmaker/pylearn2,goodfeli/pylearn2,JesseLivezey/pylearn2,nouiz/pylearn2,ddboline/pylearn2,CIFASIS/pylearn2,w1kke/pylearn2,lamblin/pylearn2,kastnerkyle/pylearn2,KennethPierce/pylearnk,mclaughlin6464/pylearn2,woozzu/pylearn2,woozzu/pylearn2,hantek/pylearn2,mkraemer67/pylearn2,cosmoharrigan/pylearn2,sandeepkbhat/pylearn2,fulmicoton/pylearn2,skearnes/pylearn2,ddboline/pylearn2,daemonmaker/pylearn2,fulmicoton/pylearn2,KennethPierce/pylearnk,jamessergeant/pylearn2,nouiz/pylearn2,mclaughlin6464/pylearn2,caidongyun/pylearn2,lamblin/pylearn2,caidongyun/pylearn2,ashhher3/pylearn2,sandeepkbhat/pylearn2,chrish42/pylearn,pkainz/pylearn2,se4u/pylearn2,se4u/pylearn2,goodfeli/pylearn2,Refefer/pylearn2,lunyang/pylearn2,msingh172/pylearn2,jeremyfix/pylearn2,junbochen/pylearn2,shiquanwang/pylearn2,matrogers/pylearn2,KennethPierce/pylearnk,w1kke/pylearn2,w1kke/pylearn2,shiquanwang/pylearn2,kastnerkyle/pylearn2,cosmoharrigan/pylearn2,jeremyfix/pylearn2,chrish42/pylearn,skearnes/pylearn2,nouiz/pylearn2,ashhher3/pylearn2,sandeepkbhat/pylearn2,lancezlin/pylearn2,aalmah/pylearn2,jamessergeant/pylearn2,pombredanne/pylearn2,lancezlin/pylearn2,TNick/pylearn2,ddboline/pylearn2,theoryno3/pylearn2,fishcorn/pylearn2,chrish42/pylearn,bartvm/pylearn2,chrish42/pylearn,hyqneuron/pylearn2-maxsom,aalmah/pylearn2,fishcorn/pylearn2,fulmicoton/pylearn2,hantek/pylearn2,mclaughlin6464/pylearn2,woozzu/pylearn2,JesseLivezey/plankton,pkainz/pylearn2,lisa-lab/pylearn2,pkainz/pylearn2,lisa-lab/pylearn2,jamessergeant/pylearn2,pkainz/pylearn2,JesseLivezey/plankton,sandeepkbhat/pylearn2,mkraemer67/pylearn2,KennethPierce/pylearnk,Refefer/pylearn2,lunyang/pylearn2,caidongyun/pylearn2,alexjc/pylearn2,bartvm/pylearn2,bartvm/pylearn2,lancezlin/pylearn2,JesseLivezey/plankton,jamessergeant/pylearn2,shiquanwang/pylearn2,lisa-lab/pylearn2,msingh172/pylearn2,kastnerkyle/pylearn2,goodfeli/pylearn2,goodfeli/pylearn2,fyffyt/pylearn2,mkraemer67/pylearn2,cosmoharrigan/pylearn2,msingh172/pylearn2,kastnerkyle/pylearn2,JesseLivezey/pylearn2,mclaughlin6464/pylearn2,fishcorn/pylearn2,jeremyfix/pylearn2,caidongyun/pylearn2,hyqneuron/pylearn2-maxsom,hyqneuron/pylearn2-maxsom,matrogers/pylearn2,theoryno3/pylearn2,CIFASIS/pylearn2,hantek/pylearn2,junbochen/pylearn2,Refefer/pylearn2,matrogers/pylearn2,JesseLivezey/plankton,ashhher3/pylearn2,Refefer/pylearn2,cosmoharrigan/pylearn2,fyffyt/pylearn2,junbochen/pylearn2,abergeron/pylearn2,fishcorn/pylearn2,abergeron/pylearn2,abergeron/pylearn2,skearnes/pylearn2,CIFASIS/pylearn2,msingh172/pylearn2,lunyang/pylearn2,ashhher3/pylearn2,aalmah/pylearn2,shiquanwang/pylearn2,junbochen/pylearn2,lamblin/pylearn2,matrogers/pylearn2,lunyang/pylearn2,mkraemer67/pylearn2,fyffyt/pylearn2,lisa-lab/pylearn2,hyqneuron/pylearn2-maxsom,alexjc/pylearn2,daemonmaker/pylearn2,fulmicoton/pylearn2,TNick/pylearn2,skearnes/pylearn2,kose-y/pylearn2,hantek/pylearn2,kose-y/pylearn2,fyffyt/pylearn2,nouiz/pylearn2,CIFASIS/pylearn2,w1kke/pylearn2,bartvm/pylearn2,pombredanne/pylearn2,lamblin/pylearn2,theoryno3/pylearn2,abergeron/pylearn2,alexjc/pylearn2,ddboline/pylearn2,JesseLivezey/pylearn2,TNick/pylearn2,kose-y/pylearn2,se4u/pylearn2,JesseLivezey/pylearn2,pombredanne/pylearn2,jeremyfix/pylearn2
|
b824cadfe61de19b5ff0f7391fe2b21b034c71b4
|
readdata.py
|
readdata.py
|
import os,sys
import json
import csv
import soundfile as sf
from scipy.fftpack import dct
from features import mfcc,fbank,sigproc,logfbank
def parseJSON(directory, filename):
data=[]
jsonMeta=[]
#open all files that end with .json in <path> directory
#and store certain attributes
try:
json_data=open(os.path.join(directory, filename))
except(IOError, RuntimeError ):
print("Cannot open ", filename)
data=json.load(json_data)
jsonMeta.append(data['filesize'])
jsonMeta.append(data['duration'])
jsonMeta.append(data['samplerate'])
jsonMeta.append(data['tags'])
jsonMeta.append(data['type'])
return jsonMeta
def parseCSV(directory, filename):
with open(os.path.join(directory, filename)) as csvfile:
csvMeta = csv.reader(csvfile, delimiter=",")
return list(csvMeta)[0]
#returns a vector with (currently) 4 features
def extractFeatures(directory,filename):
try:
data,samplerate=sf.read(os.path.join(directory, filename))
except (IOError, RuntimeError):
print("Could not open file ", filename)
print("Exiting...")
sys.exit()
#if file was opened succesfully proceed with feature extraction
#win is the size of window for mfcc extraction AND step size
win=data.size/(4*samplerate)
featureVector=mfcc(data,samplerate,win,win,1)
#featureVector is of type numpy_array
return featureVector
|
import os,sys
import json
import csv
from yaafelib import *
def parseJSON(directory, filename):
data=[]
jsonMeta=[]
#open all files that end with .json in <path> directory
#and store certain attributes
try:
json_data=open(os.path.join(directory, filename))
except(IOError, RuntimeError ):
print("Cannot open ", filename)
data=json.load(json_data)
jsonMeta.append(data['filesize'])
jsonMeta.append(data['duration'])
jsonMeta.append(data['samplerate'])
jsonMeta.append(data['tags'])
jsonMeta.append(data['type'])
return jsonMeta
def parseCSV(directory, filename):
with open(os.path.join(directory, filename)) as csvfile:
csvMeta = csv.reader(csvfile, delimiter=",")
return list(csvMeta)[0]
#returns a vector with 2 features
def extractFeatures(directory,filename):
# yaaaaafe
fp = FeaturePlan(sample_rate=44100, resample=True)
fp.addFeature('mfcc: MFCC blockSize=512 stepSize=256 CepsNbCoeffs=1')
fp.addFeature('psp: PerceptualSpread blockSize=512 stepSize=256')
df = fp.getDataFlow()
engine = Engine()
engine.load(df)
afp = AudioFileProcessor()
afp.processFile(engine,os.path.join(directory, filename))
featureVector = engine.readAllOutputs()
return featureVector
|
Use yaafe for feature extraction
|
Use yaafe for feature extraction
Right now we extract two features (mfcc and perceptual spread)
but there is a lot of work to be done on the feature extraction method
so this is probably going to change
|
Python
|
mit
|
lOStres/JaFEM
|
b0d9a11292b6d6b17fe8b72d7735d26c47599187
|
linkatos/printer.py
|
linkatos/printer.py
|
def bot_says(channel, text, slack_client):
return slack_client.api_call("chat.postMessage",
channel=channel,
text=text,
as_user=True)
def compose_explanation(url):
return "If you would like {} to be stored please react to it with a :+1:, \
if you would like it to be ignored use :-1:".format(url)
def ask_confirmation(message, slack_client):
bot_says(message['channel'],
compose_explanation(message['url']),
slack_client)
def compose_url_list(url_cache_list):
if len(url_cache_list) == 0:
return "The list is empty"
list_message = "The list of urls to be confirmed is: \n"
for index in range(0, len(url_cache_list)):
extra = "{} - {} \n".format(index, url_cache_list[index]['url'])
list_message = list_message + extra
return list_message
def list_cached_urls(url_cache_list, channel, slack_client):
bot_says(channel,
compose_url_list(url_cache_list),
slack_client)
|
def bot_says(channel, text, slack_client):
return slack_client.api_call("chat.postMessage",
channel=channel,
text=text,
as_user=True)
def compose_explanation(url):
return "If you would like {} to be stored please react to it with a :+1:, \
if you would like it to be ignored use :-1:".format(url)
def ask_confirmation(message, slack_client):
bot_says(message['channel'],
compose_explanation(message['url']),
slack_client)
def compose_url_list(url_cache_list):
if len(url_cache_list) == 0:
return "The list is empty"
intro = "The list of urls to be confirmed is: \n"
options = ["{} - {}".format(i, v['url']) for i, v in enumerate(url_cache_list)]
return intro + "\n".join(options)
def list_cached_urls(url_cache_list, channel, slack_client):
bot_says(channel,
compose_url_list(url_cache_list),
slack_client)
|
Change iteration over a collection based on ags suggestion
|
refactor: Change iteration over a collection based on ags suggestion
|
Python
|
mit
|
iwi/linkatos,iwi/linkatos
|
bace1847cb9479bfeb271f38eef502f8d3ac240a
|
qipr/registry/forms/facet_form.py
|
qipr/registry/forms/facet_form.py
|
from registry.models import *
from operator import attrgetter
facet_Models = [
BigAim,
Category,
ClinicalArea,
ClinicalSetting,
Keyword,
SafetyTarget,
]
class FacetForm:
def __init__(self):
self.facet_categories = [model.__name__ for model in facet_Models]
for model in facet_Models:
models = list(model.objects.all())
models.sort(key=lambda m : m.project_set.count(), reverse=True)
setattr(self, model.__name__, models)
|
from registry.models import *
from operator import attrgetter
facet_Models = [
BigAim,
ClinicalArea,
ClinicalSetting,
Keyword,
]
class FacetForm:
def __init__(self):
self.facet_categories = [model.__name__ for model in facet_Models]
for model in facet_Models:
models = list(model.objects.all())
models.sort(key=lambda m : m.project_set.count(), reverse=True)
setattr(self, model.__name__, models)
|
Remove facets from main registry project page
|
Remove facets from main registry project page
|
Python
|
apache-2.0
|
ctsit/qipr,ctsit/qipr,ctsit/qipr,ctsit/qipr,ctsit/qipr
|
4792515739c4ee671b86aeed39022ad8934d5d7c
|
artie/applications.py
|
artie/applications.py
|
import os
import re
import sys
import settings
triggers = set()
timers = set()
class BadApplicationError(Exception): pass
def trigger(expression):
def decorator(func):
triggers.add((re.compile(expression), func))
return decorator
def timer(time):
def decorator(func):
timers.add((time, func))
return decorator
sys.path.insert(0, settings.APPLICATION_PATH)
for filename in os.listdir(settings.APPLICATION_PATH):
if filename != '__init__.py' and filename[-3:] == '.py':
if filename == 'triggers.py':
raise BadApplicationException(
"Application file can't be called triggers.py"
)
module = filename[:-3]
if module in sys.modules:
reload(sys.modules[module])
else:
__import__(module, locals(), globals())
|
import os
import re
import sys
import settings
triggers = set()
timers = set()
class BadApplicationError(Exception): pass
def trigger(expression):
def decorator(func):
triggers.add((re.compile(expression), func))
return decorator
def timer(time):
def decorator(func):
timers.add((time, func))
return decorator
sys.path.insert(0, settings.APPLICATION_PATH)
for filename in os.listdir(settings.APPLICATION_PATH):
if filename != '__init__.py' and filename.endswith('.py'):
if filename == 'triggers.py':
raise BadApplicationException(
"Application file can't be called triggers.py"
)
module = filename[:-3]
if module in sys.modules:
reload(sys.modules[module])
else:
__import__(module, locals(), globals())
|
Use `endswith` instead of string indeces.
|
Use `endswith` instead of string indeces.
|
Python
|
mit
|
sumeet/artie
|
46e1672bb0226ae8157d63a2d73edbfefcd644e9
|
src/test/test_google_maps.py
|
src/test/test_google_maps.py
|
import unittest
import googlemaps
from pyrules2.googlemaps import driving_roundtrip
COP = 'Copenhagen, Denmark'
MAD = 'Madrid, Spain'
BER = 'Berlin, Germany'
LIS = 'Lisbon, Portugal'
KM = 1000
class Test(unittest.TestCase):
def setUp(self):
# TODO: Sane way to import key
with open('/Users/nhc/git/pyrules/google-maps-api-key.txt') as f:
self.key = f.read()
def test_roundtrip(self):
c = googlemaps.Client(key=self.key)
r = driving_roundtrip(c, COP, MAD, BER, LIS)
self.assertGreater(r.distance(), 10000 * KM) # Bad
min_dist, best_itinerary = min(((a.distance(), a.itinerary()) for a in r.alternatives()))
self.assertLess(min_dist, 6500 * KM) # Good
self.assertListEqual([COP, LIS, MAD, BER, COP], best_itinerary)
if __name__ == "__main__":
unittest.main()
|
from os import environ
import unittest
import googlemaps
from pyrules2.googlemaps import driving_roundtrip
COP = 'Copenhagen, Denmark'
MAD = 'Madrid, Spain'
BER = 'Berlin, Germany'
LIS = 'Lisbon, Portugal'
KM = 1000
class Test(unittest.TestCase):
def setUp(self):
try:
key = environ['GOOGLE_MAPS_API_KEY']
except KeyError:
self.fail('This test requires an API key for Google Maps in the environment variable GOOGLE_MAPS_API_KEY')
self.client = googlemaps.Client(key=key)
def test_roundtrip(self):
r = driving_roundtrip(self.client, COP, MAD, BER, LIS)
self.assertGreater(r.distance(), 10000 * KM) # Bad
min_dist, itinerary = min(((a.distance(), a.itinerary()) for a in r.alternatives()))
self.assertLess(min_dist, 6500 * KM) # Good
self.assertListEqual([COP, LIS, MAD, BER, COP], itinerary)
if __name__ == "__main__":
unittest.main()
|
Move API key to environment variable
|
Move API key to environment variable
|
Python
|
mit
|
mr-niels-christensen/pyrules
|
51d1701bbc8b25bfd7b4f70c83fec7a46d965bef
|
fireplace/cards/brawl/decks_assemble.py
|
fireplace/cards/brawl/decks_assemble.py
|
"""
Decks Assemble
"""
from ..utils import *
# Tarnished Coin
class TB_011:
play = ManaThisTurn(CONTROLLER, 1)
|
"""
Decks Assemble
"""
from ..utils import *
# Deckbuilding Enchant
class TB_010:
events = (
OWN_TURN_BEGIN.on(Discover(CONTROLLER, RandomCollectible())),
Play(CONTROLLER).on(Shuffle(CONTROLLER, Copy(Play.CARD))),
OWN_TURN_END.on(Shuffle(CONTROLLER, FRIENDLY_HAND))
)
# Tarnished Coin
class TB_011:
play = ManaThisTurn(CONTROLLER, 1)
|
Implement Decks Assemble brawl rules on the Deckbuilding Enchant
|
Implement Decks Assemble brawl rules on the Deckbuilding Enchant
|
Python
|
agpl-3.0
|
Ragowit/fireplace,NightKev/fireplace,beheh/fireplace,smallnamespace/fireplace,Ragowit/fireplace,jleclanche/fireplace,smallnamespace/fireplace
|
3f4415bd551b52418a5999a1ea64e31d15097802
|
framework/transactions/commands.py
|
framework/transactions/commands.py
|
# -*- coding: utf-8 -*-
from framework.mongo import database as proxy_database
def begin(database=None):
database = database or proxy_database
database.command('beginTransaction')
def rollback(database=None):
database = database or proxy_database
database.command('rollbackTransaction')
def commit(database=None):
database = database or proxy_database
database.command('commitTransaction')
def show_live(database=None):
database = database or proxy_database
return database.command('showLiveTransactions')
|
# -*- coding: utf-8 -*-
from framework.mongo import database as proxy_database
from pymongo.errors import OperationFailure
def begin(database=None):
database = database or proxy_database
database.command('beginTransaction')
def rollback(database=None):
database = database or proxy_database
try:
database.command('rollbackTransaction')
except OperationFailure:
pass
def commit(database=None):
database = database or proxy_database
database.command('commitTransaction')
def show_live(database=None):
database = database or proxy_database
return database.command('showLiveTransactions')
|
Fix rollback transaction issue bby adding except for Operation Failure to rollback
|
Fix rollback transaction issue bby adding except for Operation Failure to rollback
|
Python
|
apache-2.0
|
erinspace/osf.io,doublebits/osf.io,brandonPurvis/osf.io,brandonPurvis/osf.io,kwierman/osf.io,GageGaskins/osf.io,hmoco/osf.io,amyshi188/osf.io,brandonPurvis/osf.io,felliott/osf.io,cosenal/osf.io,abought/osf.io,kch8qx/osf.io,haoyuchen1992/osf.io,himanshuo/osf.io,sbt9uc/osf.io,bdyetton/prettychart,samchrisinger/osf.io,monikagrabowska/osf.io,mluo613/osf.io,TomHeatwole/osf.io,bdyetton/prettychart,CenterForOpenScience/osf.io,bdyetton/prettychart,laurenrevere/osf.io,haoyuchen1992/osf.io,leb2dg/osf.io,binoculars/osf.io,njantrania/osf.io,wearpants/osf.io,ZobairAlijan/osf.io,TomHeatwole/osf.io,icereval/osf.io,mattclark/osf.io,monikagrabowska/osf.io,jeffreyliu3230/osf.io,samanehsan/osf.io,caseyrygt/osf.io,billyhunt/osf.io,adlius/osf.io,RomanZWang/osf.io,cldershem/osf.io,arpitar/osf.io,caneruguz/osf.io,mluo613/osf.io,chrisseto/osf.io,cslzchen/osf.io,caneruguz/osf.io,lyndsysimon/osf.io,emetsger/osf.io,ckc6cz/osf.io,RomanZWang/osf.io,RomanZWang/osf.io,caseyrygt/osf.io,caseyrygt/osf.io,jolene-esposito/osf.io,asanfilippo7/osf.io,monikagrabowska/osf.io,alexschiller/osf.io,caneruguz/osf.io,CenterForOpenScience/osf.io,kwierman/osf.io,dplorimer/osf,lamdnhan/osf.io,billyhunt/osf.io,aaxelb/osf.io,sloria/osf.io,ticklemepierce/osf.io,crcresearch/osf.io,alexschiller/osf.io,SSJohns/osf.io,ZobairAlijan/osf.io,brianjgeiger/osf.io,MerlinZhang/osf.io,lyndsysimon/osf.io,alexschiller/osf.io,fabianvf/osf.io,saradbowman/osf.io,pattisdr/osf.io,arpitar/osf.io,mfraezz/osf.io,KAsante95/osf.io,petermalcolm/osf.io,DanielSBrown/osf.io,kushG/osf.io,billyhunt/osf.io,DanielSBrown/osf.io,icereval/osf.io,Johnetordoff/osf.io,baylee-d/osf.io,haoyuchen1992/osf.io,zachjanicki/osf.io,Nesiehr/osf.io,laurenrevere/osf.io,chrisseto/osf.io,Johnetordoff/osf.io,cslzchen/osf.io,reinaH/osf.io,KAsante95/osf.io,fabianvf/osf.io,abought/osf.io,felliott/osf.io,GageGaskins/osf.io,SSJohns/osf.io,adlius/osf.io,Ghalko/osf.io,binoculars/osf.io,acshi/osf.io,revanthkolli/osf.io,adlius/osf.io,petermalcolm/osf.io,samanehsan/osf.io,lamdnhan/osf.io,asanfilippo7/osf.io,erinspace/osf.io,acshi/osf.io,mattclark/osf.io,mluo613/osf.io,jnayak1/osf.io,chennan47/osf.io,saradbowman/osf.io,leb2dg/osf.io,DanielSBrown/osf.io,TomHeatwole/osf.io,himanshuo/osf.io,KAsante95/osf.io,chennan47/osf.io,acshi/osf.io,hmoco/osf.io,pattisdr/osf.io,revanthkolli/osf.io,zachjanicki/osf.io,ckc6cz/osf.io,icereval/osf.io,HarryRybacki/osf.io,kch8qx/osf.io,HalcyonChimera/osf.io,brianjgeiger/osf.io,jnayak1/osf.io,brandonPurvis/osf.io,rdhyee/osf.io,sbt9uc/osf.io,MerlinZhang/osf.io,sbt9uc/osf.io,danielneis/osf.io,caseyrollins/osf.io,doublebits/osf.io,SSJohns/osf.io,leb2dg/osf.io,aaxelb/osf.io,mluke93/osf.io,rdhyee/osf.io,kushG/osf.io,revanthkolli/osf.io,Nesiehr/osf.io,HalcyonChimera/osf.io,adlius/osf.io,cosenal/osf.io,danielneis/osf.io,hmoco/osf.io,GaryKriebel/osf.io,haoyuchen1992/osf.io,barbour-em/osf.io,jeffreyliu3230/osf.io,njantrania/osf.io,sloria/osf.io,wearpants/osf.io,GageGaskins/osf.io,wearpants/osf.io,jolene-esposito/osf.io,alexschiller/osf.io,emetsger/osf.io,Nesiehr/osf.io,abought/osf.io,abought/osf.io,leb2dg/osf.io,zkraime/osf.io,Ghalko/osf.io,zachjanicki/osf.io,GaryKriebel/osf.io,ckc6cz/osf.io,Ghalko/osf.io,mfraezz/osf.io,rdhyee/osf.io,TomBaxter/osf.io,pattisdr/osf.io,mluo613/osf.io,ticklemepierce/osf.io,reinaH/osf.io,jolene-esposito/osf.io,cslzchen/osf.io,crcresearch/osf.io,barbour-em/osf.io,njantrania/osf.io,kch8qx/osf.io,barbour-em/osf.io,samchrisinger/osf.io,cwisecarver/osf.io,zamattiac/osf.io,monikagrabowska/osf.io,cwisecarver/osf.io,samchrisinger/osf.io,hmoco/osf.io,fabianvf/osf.io,reinaH/osf.io,erinspace/osf.io,zamattiac/osf.io,TomHeatwole/osf.io,revanthkolli/osf.io,amyshi188/osf.io,brianjgeiger/osf.io,njantrania/osf.io,RomanZWang/osf.io,alexschiller/osf.io,lamdnhan/osf.io,monikagrabowska/osf.io,mluke93/osf.io,samchrisinger/osf.io,GageGaskins/osf.io,cosenal/osf.io,HalcyonChimera/osf.io,caseyrollins/osf.io,HarryRybacki/osf.io,Nesiehr/osf.io,doublebits/osf.io,GaryKriebel/osf.io,cldershem/osf.io,mfraezz/osf.io,HalcyonChimera/osf.io,aaxelb/osf.io,binoculars/osf.io,lyndsysimon/osf.io,jnayak1/osf.io,wearpants/osf.io,cosenal/osf.io,fabianvf/osf.io,zkraime/osf.io,aaxelb/osf.io,mattclark/osf.io,jinluyuan/osf.io,rdhyee/osf.io,kushG/osf.io,jmcarp/osf.io,emetsger/osf.io,mluke93/osf.io,kushG/osf.io,ckc6cz/osf.io,jeffreyliu3230/osf.io,doublebits/osf.io,petermalcolm/osf.io,zachjanicki/osf.io,acshi/osf.io,billyhunt/osf.io,caseyrygt/osf.io,ticklemepierce/osf.io,reinaH/osf.io,Johnetordoff/osf.io,zamattiac/osf.io,ZobairAlijan/osf.io,asanfilippo7/osf.io,Johnetordoff/osf.io,emetsger/osf.io,amyshi188/osf.io,KAsante95/osf.io,ZobairAlijan/osf.io,jinluyuan/osf.io,petermalcolm/osf.io,TomBaxter/osf.io,ticklemepierce/osf.io,cldershem/osf.io,SSJohns/osf.io,jeffreyliu3230/osf.io,sloria/osf.io,GaryKriebel/osf.io,baylee-d/osf.io,kch8qx/osf.io,laurenrevere/osf.io,jinluyuan/osf.io,lyndsysimon/osf.io,amyshi188/osf.io,mluke93/osf.io,Ghalko/osf.io,himanshuo/osf.io,GageGaskins/osf.io,kwierman/osf.io,samanehsan/osf.io,jnayak1/osf.io,arpitar/osf.io,jmcarp/osf.io,zkraime/osf.io,zamattiac/osf.io,crcresearch/osf.io,himanshuo/osf.io,felliott/osf.io,jmcarp/osf.io,HarryRybacki/osf.io,mluo613/osf.io,HarryRybacki/osf.io,acshi/osf.io,bdyetton/prettychart,danielneis/osf.io,cldershem/osf.io,jolene-esposito/osf.io,jinluyuan/osf.io,kch8qx/osf.io,danielneis/osf.io,caneruguz/osf.io,asanfilippo7/osf.io,samanehsan/osf.io,cwisecarver/osf.io,billyhunt/osf.io,jmcarp/osf.io,CenterForOpenScience/osf.io,MerlinZhang/osf.io,sbt9uc/osf.io,brianjgeiger/osf.io,zkraime/osf.io,TomBaxter/osf.io,chrisseto/osf.io,chennan47/osf.io,mfraezz/osf.io,felliott/osf.io,doublebits/osf.io,brandonPurvis/osf.io,baylee-d/osf.io,MerlinZhang/osf.io,cwisecarver/osf.io,cslzchen/osf.io,kwierman/osf.io,DanielSBrown/osf.io,arpitar/osf.io,barbour-em/osf.io,chrisseto/osf.io,caseyrollins/osf.io,KAsante95/osf.io,CenterForOpenScience/osf.io,RomanZWang/osf.io,lamdnhan/osf.io,dplorimer/osf,dplorimer/osf,dplorimer/osf
|
43dca4ad969e44bb753c152e8f7768febea6fb68
|
quantecon/__init__.py
|
quantecon/__init__.py
|
"""
Import the main names to top level.
"""
from .compute_fp import compute_fixed_point
from .discrete_rv import DiscreteRV
from .ecdf import ECDF
from .estspec import smooth, periodogram, ar_periodogram
from .graph_tools import DiGraph
from .gridtools import cartesian, mlinspace
from .kalman import Kalman
from .lae import LAE
from .arma import ARMA
from .lqcontrol import LQ
from .lqnash import nnash
from .lss import LinearStateSpace
from .matrix_eqn import solve_discrete_lyapunov, solve_discrete_riccati
from .quadsums import var_quadratic_sum, m_quadratic_sum
#->Propose Delete From Top Level
from .markov import MarkovChain, random_markov_chain, random_stochastic_matrix, gth_solve, tauchen #Promote to keep current examples working
from .markov import mc_compute_stationary, mc_sample_path #Imports that Should be Deprecated with markov package
#<-
from .rank_nullspace import rank_est, nullspace
from .robustlq import RBLQ
from . import quad as quad
from .util import searchsorted
#Add Version Attribute
from .version import version as __version__
|
"""
Import the main names to top level.
"""
try:
import numba
except:
raise ImportError("Cannot import numba from current anaconda distribution. Please run `conda install numba` to install the latest version.")
from .compute_fp import compute_fixed_point
from .discrete_rv import DiscreteRV
from .ecdf import ECDF
from .estspec import smooth, periodogram, ar_periodogram
from .graph_tools import DiGraph
from .gridtools import cartesian, mlinspace
from .kalman import Kalman
from .lae import LAE
from .arma import ARMA
from .lqcontrol import LQ
from .lqnash import nnash
from .lss import LinearStateSpace
from .matrix_eqn import solve_discrete_lyapunov, solve_discrete_riccati
from .quadsums import var_quadratic_sum, m_quadratic_sum
#->Propose Delete From Top Level
from .markov import MarkovChain, random_markov_chain, random_stochastic_matrix, gth_solve, tauchen #Promote to keep current examples working
from .markov import mc_compute_stationary, mc_sample_path #Imports that Should be Deprecated with markov package
#<-
from .rank_nullspace import rank_est, nullspace
from .robustlq import RBLQ
from . import quad as quad
from .util import searchsorted
#Add Version Attribute
from .version import version as __version__
|
Add Check for numba in base anaconda distribution. If not found issue meaningful warning message
|
Add Check for numba in base anaconda distribution. If not found issue meaningful warning message
|
Python
|
bsd-3-clause
|
oyamad/QuantEcon.py,QuantEcon/QuantEcon.py,QuantEcon/QuantEcon.py,oyamad/QuantEcon.py
|
9d3d2beab6ec06ce13126b818029258a66f450f6
|
babelfish/__init__.py
|
babelfish/__init__.py
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2013 the BabelFish authors. All rights reserved.
# Use of this source code is governed by the 3-clause BSD license
# that can be found in the LICENSE file.
#
__title__ = 'babelfish'
__version__ = '0.4.1'
__author__ = 'Antoine Bertin'
__license__ = 'BSD'
__copyright__ = 'Copyright 2013 the BabelFish authors'
from .converters import (LanguageConverter, LanguageReverseConverter, LanguageEquivalenceConverter, CountryConverter,
CountryReverseConverter)
from .country import country_converters, COUNTRIES, COUNTRY_MATRIX, Country
from .exceptions import Error, LanguageConvertError, LanguageReverseError, CountryConvertError, CountryReverseError
from .language import language_converters, LANGUAGES, LANGUAGE_MATRIX, Language
from .script import SCRIPTS, Script
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2013 the BabelFish authors. All rights reserved.
# Use of this source code is governed by the 3-clause BSD license
# that can be found in the LICENSE file.
#
__title__ = 'babelfish'
__version__ = '0.4.1'
__author__ = 'Antoine Bertin'
__license__ = 'BSD'
__copyright__ = 'Copyright 2013 the BabelFish authors'
from .converters import (LanguageConverter, LanguageReverseConverter, LanguageEquivalenceConverter, CountryConverter,
CountryReverseConverter)
from .country import country_converters, COUNTRIES, COUNTRY_MATRIX, Country
from .exceptions import Error, LanguageConvertError, LanguageReverseError, CountryConvertError, CountryReverseError
from .language import language_converters, LANGUAGES, LANGUAGE_MATRIX, Language
from .script import SCRIPTS, SCRIPT_MATRIX, Script
|
Add SCRIPT_MATRIX to babelfish module imports
|
Add SCRIPT_MATRIX to babelfish module imports
|
Python
|
bsd-3-clause
|
Diaoul/babelfish
|
2ad21d67ccde2e25ea5c6d64cdee36dbc6425cbc
|
construct/tests/test_mapping.py
|
construct/tests/test_mapping.py
|
import unittest
from construct import Flag
class TestFlag(unittest.TestCase):
def test_parse(self):
flag = Flag("flag")
self.assertTrue(flag.parse("\x01"))
def test_parse_flipped(self):
flag = Flag("flag", truth=0, falsehood=1)
self.assertFalse(flag.parse("\x01"))
def test_build(self):
flag = Flag("flag")
self.assertEqual(flag.build(True), "\x01")
def test_build_flipped(self):
flag = Flag("flag", truth=0, falsehood=1)
self.assertEqual(flag.build(True), "\x00")
|
import unittest
from construct import Flag
class TestFlag(unittest.TestCase):
def test_parse(self):
flag = Flag("flag")
self.assertTrue(flag.parse("\x01"))
def test_parse_flipped(self):
flag = Flag("flag", truth=0, falsehood=1)
self.assertFalse(flag.parse("\x01"))
def test_parse_default(self):
flag = Flag("flag")
self.assertFalse(flag.parse("\x02"))
def test_parse_default_true(self):
flag = Flag("flag", default=True)
self.assertTrue(flag.parse("\x02"))
def test_build(self):
flag = Flag("flag")
self.assertEqual(flag.build(True), "\x01")
def test_build_flipped(self):
flag = Flag("flag", truth=0, falsehood=1)
self.assertEqual(flag.build(True), "\x00")
|
Add a couple more Flag tests.
|
tests: Add a couple more Flag tests.
|
Python
|
mit
|
riggs/construct,mosquito/construct,gkonstantyno/construct,MostAwesomeDude/construct,0000-bigtree/construct,riggs/construct,mosquito/construct,0000-bigtree/construct,gkonstantyno/construct,MostAwesomeDude/construct
|
b2dd21b2240eec28881d6162f9e35b16df906219
|
arris_cli.py
|
arris_cli.py
|
#!/usr/bin/env python
# CLI frontend to Arris modem stat scraper library arris_scraper.py
import argparse
import arris_scraper
import json
import pprint
default_url = 'http://192.168.100.1/cgi-bin/status_cgi'
parser = argparse.ArgumentParser(description='CLI tool to scrape information from Arris cable modem status pages.')
parser.add_argument('-f', '--format', choices=['ascii', 'json', 'pprint'], default='ascii', dest='output_format', help='output format')
parser.add_argument('-u', '--url', default=default_url, help='url of modem status page')
args = parser.parse_args()
if args.output_format == 'ascii':
print("ASCII output not yet implemented")
elif args.output_format == 'json':
result = arris_scraper.get_status(args.url)
print(json.dumps(result))
elif args.output_format == 'pprint':
result = arris_scraper.get_status(args.url)
pprint.pprint(result)
else:
print("How in the world did you get here?")
|
#!/usr/bin/env python
# CLI frontend to Arris modem stat scraper library arris_scraper.py
import argparse
import arris_scraper
import json
import pprint
default_url = 'http://192.168.100.1/cgi-bin/status_cgi'
parser = argparse.ArgumentParser(description='CLI tool to scrape information from Arris cable modem status pages.')
parser.add_argument('-f',
'--format',
choices=['ascii', 'json', 'pprint'],
default='ascii', dest='output_format',
help='output format')
parser.add_argument('-u',
'--url',
default=default_url,
help='url of modem status page')
args = parser.parse_args()
if args.output_format == 'ascii':
print("ASCII output not yet implemented")
elif args.output_format == 'json':
result = arris_scraper.get_status(args.url)
print(json.dumps(result))
elif args.output_format == 'pprint':
result = arris_scraper.get_status(args.url)
pprint.pprint(result)
else:
print("How in the world did you get here?")
|
Tweak formatting of argparse section to minimize lines extending past 80 chars.
|
Tweak formatting of argparse section to minimize lines extending past 80 chars.
|
Python
|
mit
|
wolrah/arris_stats
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.