commit
stringlengths 40
40
| old_file
stringlengths 4
236
| new_file
stringlengths 4
236
| old_contents
stringlengths 1
3.26k
| new_contents
stringlengths 16
4.43k
| subject
stringlengths 16
624
| message
stringlengths 17
3.29k
| lang
stringclasses 5
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
|
|---|---|---|---|---|---|---|---|---|---|
4d7dff1c335a49d13d420f3c62b1a2d2382351dd
|
trajprocess/tests/utils.py
|
trajprocess/tests/utils.py
|
"""Tools for setting up a fake directory structure for processing."""
from tempfile import mkdtemp
import os
import shutil
import json
from pkg_resources import resource_filename
def write_run_clone(proj, run, clone, gens=None):
if gens is None:
gens = [0, 1]
rc = "data/PROJ{proj}/RUN{run}/CLONE{clone}/".format(proj=proj, run=run,
clone=clone)
os.makedirs(rc, exist_ok=True)
tpr_fn = resource_filename(__name__, 'topol.tpr')
shutil.copy(tpr_fn, "{}/frame0.tpr".format(rc))
for gen in gens:
shutil.copy(resource_filename(__name__,
"traj_comp.part{:04d}.xtc".format(
gen + 1)),
"{}/frame{}.xtc".format(rc, gen))
def generate_project():
global wd
wd = mkdtemp()
os.chdir(wd)
write_run_clone(1234, 5, 7)
write_run_clone(1234, 6, 0)
with open('structs-p1234.json', 'w') as f:
json.dump({
5: {'struct': 'stru1', 'fext': 'pdb'},
6: {'struct': 'stru2', 'fext': 'pdb'}
}, f)
def cleanup():
shutil.rmtree(wd)
|
"""Tools for setting up a fake directory structure for processing."""
from tempfile import mkdtemp
import os
import shutil
import json
from pkg_resources import resource_filename
# command for generating reference data:
# gmx mdrun -nsteps 5000 -s frame0.tpr -cpi -noappend
#
# Do that three times.
def write_run_clone(proj, run, clone, gens=None):
if gens is None:
gens = [0, 1]
rc = "data/PROJ{proj}/RUN{run}/CLONE{clone}/".format(proj=proj, run=run,
clone=clone)
os.makedirs(rc, exist_ok=True)
tpr_fn = resource_filename(__name__, 'topol.tpr')
shutil.copy(tpr_fn, "{}/frame0.tpr".format(rc))
for gen in gens:
shutil.copy(resource_filename(__name__,
"traj_comp.part{:04d}.xtc".format(
gen + 1)),
"{}/frame{}.xtc".format(rc, gen))
def generate_project():
global wd
wd = mkdtemp()
os.chdir(wd)
write_run_clone(1234, 5, 7)
write_run_clone(1234, 6, 0)
with open('structs-p1234.json', 'w') as f:
json.dump({
5: {'struct': 'stru1', 'fext': 'pdb'},
6: {'struct': 'stru2', 'fext': 'pdb'}
}, f)
def cleanup():
shutil.rmtree(wd)
|
Add note about how to generate trajectories
|
Add note about how to generate trajectories
|
Python
|
mit
|
mpharrigan/trajprocess,mpharrigan/trajprocess
|
34d1bbc36f7d5c66000eec0d6debfd3ede74366f
|
bottle_auth/custom.py
|
bottle_auth/custom.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
from bottle import redirect
log = logging.getLogger('bottle-auth.custom')
class Custom(object):
def __init__(self, login_url="/login",
callback_url="http://127.0.0.1:8000"):
self.login_url = login_url
self.callback_url = callback_url
def redirect(self, environ):
return redirect(self.login_url)
def get_user(self, environ):
session = environ.get('beaker.session')
if session.get("username", None) and session.get("apikey", None):
return session
return {}
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
from bottle import redirect
log = logging.getLogger('bottle-auth.custom')
class Custom(object):
def __init__(self, login_url="/login",
callback_url="http://127.0.0.1:8000"):
self.login_url = login_url
self.callback_url = callback_url
def redirect(self, environ):
return redirect(self.login_url)
def get_user(self, environ):
session = environ.get('beaker.session')
if session.get("username", None) and session.get("apikey", None):
return session
self.redirect(environ)
|
Fix Custom class, user exit in beaker.session redirect to login page
|
Fix Custom class, user exit in beaker.session
redirect to login page
|
Python
|
mit
|
avelino/bottle-auth
|
66edf9f04c1b23681fae4234a8b297868e66b7aa
|
osmaxx-py/osmaxx/excerptexport/models/excerpt.py
|
osmaxx-py/osmaxx/excerptexport/models/excerpt.py
|
from django.db import models
from django.contrib.auth.models import User
from django.utils.translation import gettext_lazy as _
class Excerpt(models.Model):
name = models.CharField(max_length=128, verbose_name=_('name'), blank=False)
is_public = models.BooleanField(default=False, verbose_name=_('is public'))
is_active = models.BooleanField(default=True, verbose_name=_('is active'))
owner = models.ForeignKey(User, related_name='excerpts', verbose_name=_('owner'))
bounding_geometry = models.OneToOneField('BoundingGeometry', verbose_name=_('bounding geometry'))
@property
def type_of_geometry(self):
return self.bounding_geometry.type_of_geometry
@property
def extent(self):
return self.bounding_geometry.extent
def __str__(self):
return self.name
def _active_excerpts():
return Excerpt.objects.filter(is_active=True).filter(
bounding_geometry__bboxboundinggeometry__isnull=False
)
def private_user_excerpts(user):
return _active_excerpts().filter(is_public=False, owner=user)
def public_user_excerpts(user):
return _active_excerpts().filter(is_public=True, owner=user)
def other_users_public_excerpts(user):
return _active_excerpts().filter(is_public=True).exclude(owner=user)
|
from django.db import models
from django.contrib.auth.models import User
from django.utils.translation import gettext_lazy as _
class Excerpt(models.Model):
name = models.CharField(max_length=128, verbose_name=_('name'))
is_public = models.BooleanField(default=False, verbose_name=_('is public'))
is_active = models.BooleanField(default=True, verbose_name=_('is active'))
owner = models.ForeignKey(User, related_name='excerpts', verbose_name=_('owner'))
bounding_geometry = models.OneToOneField('BoundingGeometry', verbose_name=_('bounding geometry'))
@property
def type_of_geometry(self):
return self.bounding_geometry.type_of_geometry
@property
def extent(self):
return self.bounding_geometry.extent
def __str__(self):
return self.name
def _active_excerpts():
return Excerpt.objects.filter(is_active=True).filter(
bounding_geometry__bboxboundinggeometry__isnull=False
)
def private_user_excerpts(user):
return _active_excerpts().filter(is_public=False, owner=user)
def public_user_excerpts(user):
return _active_excerpts().filter(is_public=True, owner=user)
def other_users_public_excerpts(user):
return _active_excerpts().filter(is_public=True).exclude(owner=user)
|
Remove value which is already default
|
Remove value which is already default
|
Python
|
mit
|
geometalab/osmaxx,geometalab/drf-utm-zone-info,geometalab/osmaxx,geometalab/osmaxx-frontend,geometalab/osmaxx-frontend,geometalab/osmaxx-frontend,geometalab/osmaxx,geometalab/osmaxx-frontend,geometalab/drf-utm-zone-info,geometalab/osmaxx
|
cddb0309eaa0c31569f791b8b9f2c8666b65b8b4
|
openrcv/test/test_models.py
|
openrcv/test/test_models.py
|
from openrcv.models import ContestInfo
from openrcv.utiltest.helpers import UnitCase
class ContestInfoTest(UnitCase):
def test_get_candidates(self):
contest = ContestInfo()
contest.candidates = ["Alice", "Bob", "Carl"]
self.assertEqual(contest.get_candidates(), range(1, 4))
|
from textwrap import dedent
from openrcv.models import BallotsResource, BallotStreamResource, ContestInfo
from openrcv.utils import StringInfo
from openrcv.utiltest.helpers import UnitCase
class BallotsResourceTest(UnitCase):
def test(self):
ballots = [1, 3, 2]
ballot_resource = BallotsResource(ballots)
with ballot_resource() as ballots:
ballots = list(ballots)
self.assertEqual(ballots, [1, 3, 2])
class BallotStreamResourceTest(UnitCase):
def test(self):
ballot_info = StringInfo("2 1 2\n3 1\n")
ballot_resource = BallotStreamResource(ballot_info)
with ballot_resource() as ballots:
ballots = list(ballots)
self.assertEqual(ballots, ['2 1 2\n', '3 1\n'])
def test_parse_default(self):
ballot_info = StringInfo("2 1 2\n3 1\n")
parse = lambda line: line.strip()
ballot_resource = BallotStreamResource(ballot_info, parse=parse)
with ballot_resource() as ballots:
ballots = list(ballots)
self.assertEqual(ballots, ['2 1 2', '3 1'])
class ContestInfoTest(UnitCase):
def test_get_candidates(self):
contest = ContestInfo()
contest.candidates = ["Alice", "Bob", "Carl"]
self.assertEqual(contest.get_candidates(), range(1, 4))
|
Add tests for ballots resource classes.
|
Add tests for ballots resource classes.
|
Python
|
mit
|
cjerdonek/open-rcv,cjerdonek/open-rcv
|
771f429433d201463ab94439870d1bc803022722
|
nap/auth.py
|
nap/auth.py
|
from __future__ import unicode_literals
# Authentication and Authorisation
from functools import wraps
from . import http
def permit(test_func, response_class=http.Forbidden):
'''Decorate a handler to control access'''
def decorator(view_func):
@wraps(view_func)
def _wrapped_view(self, *args, **kwargs):
if test_func(self, *args, **kwargs):
return view_func(self, *args, **kwargs)
return response_class()
return _wrapped_view
return decorator
permit_logged_in = permit(
lambda self, *args, **kwargs: self.request.user.is_authenticated()
)
permit_staff = permit(
lambda self, *args, **kwargs: self.request.user.is_staff
)
def permit_groups(*groups):
def in_groups(self, *args, **kwargs):
return self.request.user.groups.filter(name__in=groups).exists()
return permit(in_groups)
|
from __future__ import unicode_literals
# Authentication and Authorisation
from functools import wraps
from . import http
def permit(test_func, response_class=http.Forbidden):
'''Decorate a handler to control access'''
def decorator(view_func):
@wraps(view_func)
def _wrapped_view(self, *args, **kwargs):
if test_func(self, *args, **kwargs):
return view_func(self, *args, **kwargs)
return response_class()
return _wrapped_view
return decorator
# Helpers for people wanting to control response class
def test_logged_in(self, *args, **kwargs):
return self.request.user.is_authenticated()
def test_staff(self, *args, **kwargs):
return self.request.user.is_staff
permit_logged_in = permit(test_logged_in)
permit_staff = permit(test_staff)
def permit_groups(*groups, response_class=http.Forbidden):
def in_groups(self, *args, **kwargs):
return self.request.user.groups.filter(name__in=groups).exists()
return permit(in_groups, response_class=response_class)
|
Make it DRYer for people
|
Make it DRYer for people
|
Python
|
bsd-3-clause
|
limbera/django-nap
|
483800541ee66de006392c361e06177bc9db4784
|
kboard/board/urls.py
|
kboard/board/urls.py
|
# Created by JHJ on 2016. 10. 5.
from django.conf.urls import url
from . import views
app_name = 'board'
urlpatterns = [
url(r'^$', views.board_list, name='board_list'),
url(r'^(?P<board_slug>[-a-z]+)/$', views.post_list, name='post_list'),
url(r'^(?P<board_slug>[-a-z]+)/new/$', views.new_post, name='new_post'),
url(r'^(?P<post_id>\d+)/delete/$', views.delete_post, name='delete_post'),
url(r'^(?P<post_id>\d+)/like/$', views.like_post, name='like_post'),
url(r'^(?P<post_id>\d+)/edit/$', views.edit_post, name='edit_post'),
url(r'^(?P<post_id>\d+)/$', views.view_post, name='view_post'),
url(r'^(?P<post_id>\d+)/comment/new/$', views.new_comment, name='new_comment'),
url(r'^(?P<post_id>\d+)/comment/(?P<comment_id>\d+)/delete/$', views.delete_comment, name='delete_comment'),
]
|
# Created by JHJ on 2016. 10. 5.
from django.conf.urls import url
from django.conf import settings
from django.conf.urls.static import static
from . import views
app_name = 'board'
urlpatterns = [
url(r'^$', views.board_list, name='board_list'),
url(r'^(?P<board_slug>[-a-z]+)/$', views.post_list, name='post_list'),
url(r'^(?P<board_slug>[-a-z]+)/new/$', views.new_post, name='new_post'),
url(r'^(?P<post_id>\d+)/delete/$', views.delete_post, name='delete_post'),
url(r'^(?P<post_id>\d+)/like/$', views.like_post, name='like_post'),
url(r'^(?P<post_id>\d+)/edit/$', views.edit_post, name='edit_post'),
url(r'^(?P<post_id>\d+)/$', views.view_post, name='view_post'),
url(r'^(?P<post_id>\d+)/comment/new/$', views.new_comment, name='new_comment'),
url(r'^(?P<post_id>\d+)/comment/(?P<comment_id>\d+)/delete/$', views.delete_comment, name='delete_comment'),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
Set url to serve uploaded file during development
|
Set url to serve uploaded file during development
|
Python
|
mit
|
guswnsxodlf/k-board,kboard/kboard,cjh5414/kboard,kboard/kboard,hyesun03/k-board,hyesun03/k-board,hyesun03/k-board,guswnsxodlf/k-board,cjh5414/kboard,darjeeling/k-board,cjh5414/kboard,kboard/kboard,guswnsxodlf/k-board
|
e09214068a12768e9aafd04363d353359ca7e1f3
|
src/actions/actions/timetracking/__init__.py
|
src/actions/actions/timetracking/__init__.py
|
#!/usr/bin/python
######################################################################
# Cloud Routes Bridge
# -------------------------------------------------------------------
# Actions Module
######################################################################
import stathat
import time
import syslog
def action(**kwargs):
''' This method is called to action a reaction '''
updateStathat(kwargs['jdata'])
return True
def updateStathat(jdata):
''' This method will be called to update a stathat Statistic '''
ez_key = jdata['time_tracking']['ez_key']
stat_name = "[%s] End to End Monitor transaction time" % jdata[
'time_tracking']['env']
value = time.time() - jdata['time_tracking']['control']
stathat.ez_value(ez_key, stat_name, value)
line = "timetracker: Sent stat to StatHat for %s" % jdata['cid']
syslog.syslog(syslog.LOG_INFO, line)
|
#!/usr/bin/python
######################################################################
# Cloud Routes Bridge
# -------------------------------------------------------------------
# Actions Module
######################################################################
import stathat
import time
def action(**kwargs):
''' This method is called to action a reaction '''
logger = kwargs['logger']
updateStathat(kwargs['jdata'], logger)
return True
def updateStathat(jdata, logger):
''' This method will be called to update a stathat Statistic '''
ez_key = jdata['time_tracking']['ez_key']
stat_name = "[%s] End to End Monitor transaction time" % jdata[
'time_tracking']['env']
value = time.time() - jdata['time_tracking']['control']
stathat.ez_value(ez_key, stat_name, value)
line = "timetracker: Sent stat to StatHat for %s" % jdata['cid']
logger.info(line)
|
Convert reactions syslog to logger: timetracking
|
Convert reactions syslog to logger: timetracking
|
Python
|
unknown
|
dethos/cloudroutes-service,asm-products/cloudroutes-service,rbramwell/runbook,codecakes/cloudroutes-service,codecakes/cloudroutes-service,asm-products/cloudroutes-service,madflojo/cloudroutes-service,Runbook/runbook,codecakes/cloudroutes-service,asm-products/cloudroutes-service,codecakes/cloudroutes-service,madflojo/cloudroutes-service,rbramwell/runbook,madflojo/cloudroutes-service,Runbook/runbook,rbramwell/runbook,rbramwell/runbook,Runbook/runbook,dethos/cloudroutes-service,madflojo/cloudroutes-service,dethos/cloudroutes-service,asm-products/cloudroutes-service,Runbook/runbook,dethos/cloudroutes-service
|
e399c0b1988ed8b2981ddc684a0a3652a73ea31e
|
pavelib/utils/test/utils.py
|
pavelib/utils/test/utils.py
|
"""
Helper functions for test tasks
"""
from paver.easy import sh, task
from pavelib.utils.envs import Env
__test__ = False # do not collect
@task
def clean_test_files():
"""
Clean fixture files used by tests and .pyc files
"""
sh("git clean -fqdx test_root/logs test_root/data test_root/staticfiles test_root/uploads")
sh("find . -type f -name \"*.pyc\" -delete")
sh("rm -rf test_root/log/auto_screenshots/*")
def clean_dir(directory):
"""
Clean coverage files, to ensure that we don't use stale data to generate reports.
"""
# We delete the files but preserve the directory structure
# so that coverage.py has a place to put the reports.
sh('find {dir} -type f -delete'.format(dir=directory))
@task
def clean_reports_dir():
"""
Clean coverage files, to ensure that we don't use stale data to generate reports.
"""
# We delete the files but preserve the directory structure
# so that coverage.py has a place to put the reports.
reports_dir = Env.REPORT_DIR.makedirs_p()
clean_dir(reports_dir)
@task
def clean_mongo():
"""
Clean mongo test databases
"""
sh("mongo {repo_root}/scripts/delete-mongo-test-dbs.js".format(repo_root=Env.REPO_ROOT))
|
"""
Helper functions for test tasks
"""
from paver.easy import sh, task
from pavelib.utils.envs import Env
__test__ = False # do not collect
@task
def clean_test_files():
"""
Clean fixture files used by tests and .pyc files
"""
sh("git clean -fqdx test_root/logs test_root/data test_root/staticfiles test_root/uploads")
sh("find . -type f -name \"*.pyc\" -delete")
sh("rm -rf test_root/log/auto_screenshots/*")
sh("rm -rf /tmp/mako_[cl]ms")
def clean_dir(directory):
"""
Clean coverage files, to ensure that we don't use stale data to generate reports.
"""
# We delete the files but preserve the directory structure
# so that coverage.py has a place to put the reports.
sh('find {dir} -type f -delete'.format(dir=directory))
@task
def clean_reports_dir():
"""
Clean coverage files, to ensure that we don't use stale data to generate reports.
"""
# We delete the files but preserve the directory structure
# so that coverage.py has a place to put the reports.
reports_dir = Env.REPORT_DIR.makedirs_p()
clean_dir(reports_dir)
@task
def clean_mongo():
"""
Clean mongo test databases
"""
sh("mongo {repo_root}/scripts/delete-mongo-test-dbs.js".format(repo_root=Env.REPO_ROOT))
|
Clean out the mako temp dirs before running tests
|
Clean out the mako temp dirs before running tests
|
Python
|
agpl-3.0
|
zofuthan/edx-platform,eemirtekin/edx-platform,TeachAtTUM/edx-platform,synergeticsedx/deployment-wipro,doismellburning/edx-platform,OmarIthawi/edx-platform,jolyonb/edx-platform,appliedx/edx-platform,philanthropy-u/edx-platform,msegado/edx-platform,pepeportela/edx-platform,JCBarahona/edX,lduarte1991/edx-platform,jamesblunt/edx-platform,vasyarv/edx-platform,valtech-mooc/edx-platform,jonathan-beard/edx-platform,beacloudgenius/edx-platform,DefyVentures/edx-platform,sameetb-cuelogic/edx-platform-test,rismalrv/edx-platform,zubair-arbi/edx-platform,ampax/edx-platform-backup,chauhanhardik/populo,ovnicraft/edx-platform,synergeticsedx/deployment-wipro,antoviaque/edx-platform,Lektorium-LLC/edx-platform,marcore/edx-platform,Endika/edx-platform,vikas1885/test1,unicri/edx-platform,Edraak/edraak-platform,Endika/edx-platform,ampax/edx-platform,motion2015/edx-platform,EDUlib/edx-platform,mtlchun/edx,shabab12/edx-platform,ESOedX/edx-platform,chrisndodge/edx-platform,antonve/s4-project-mooc,CredoReference/edx-platform,dkarakats/edx-platform,cecep-edu/edx-platform,polimediaupv/edx-platform,chauhanhardik/populo,4eek/edx-platform,wwj718/ANALYSE,ahmadiga/min_edx,appsembler/edx-platform,antonve/s4-project-mooc,gsehub/edx-platform,jazkarta/edx-platform-for-isc,xuxiao19910803/edx,ahmedaljazzar/edx-platform,dkarakats/edx-platform,LearnEra/LearnEraPlaftform,kursitet/edx-platform,shubhdev/edxOnBaadal,dsajkl/123,defance/edx-platform,angelapper/edx-platform,edx/edx-platform,devs1991/test_edx_docmode,jamiefolsom/edx-platform,jzoldak/edx-platform,doismellburning/edx-platform,nanolearningllc/edx-platform-cypress,xuxiao19910803/edx-platform,franosincic/edx-platform,DefyVentures/edx-platform,ferabra/edx-platform,cselis86/edx-platform,hastexo/edx-platform,UXE/local-edx,mjirayu/sit_academy,valtech-mooc/edx-platform,dsajkl/reqiop,cecep-edu/edx-platform,marcore/edx-platform,hamzehd/edx-platform,atsolakid/edx-platform,bdero/edx-platform,mitocw/edx-platform,philanthropy-u/edx-platform,SivilTaram/edx-platform,playm2mboy/edx-platform,zadgroup/edx-platform,cyanna/edx-platform,cyanna/edx-platform,olexiim/edx-platform,don-github/edx-platform,Stanford-Online/edx-platform,xinjiguaike/edx-platform,fintech-circle/edx-platform,benpatterson/edx-platform,adoosii/edx-platform,xuxiao19910803/edx-platform,doismellburning/edx-platform,ovnicraft/edx-platform,motion2015/a3,naresh21/synergetics-edx-platform,tiagochiavericosta/edx-platform,shurihell/testasia,dcosentino/edx-platform,AkA84/edx-platform,ubc/edx-platform,andyzsf/edx,gsehub/edx-platform,xinjiguaike/edx-platform,ubc/edx-platform,JCBarahona/edX,ampax/edx-platform,4eek/edx-platform,TeachAtTUM/edx-platform,Edraak/edx-platform,andyzsf/edx,edx/edx-platform,cognitiveclass/edx-platform,EDUlib/edx-platform,AkA84/edx-platform,olexiim/edx-platform,cognitiveclass/edx-platform,mahendra-r/edx-platform,franosincic/edx-platform,miptliot/edx-platform,jruiperezv/ANALYSE,jolyonb/edx-platform,vikas1885/test1,Lektorium-LLC/edx-platform,simbs/edx-platform,BehavioralInsightsTeam/edx-platform,jamesblunt/edx-platform,Softmotions/edx-platform,amir-qayyum-khan/edx-platform,unicri/edx-platform,waheedahmed/edx-platform,xinjiguaike/edx-platform,rhndg/openedx,arifsetiawan/edx-platform,etzhou/edx-platform,xinjiguaike/edx-platform,MakeHer/edx-platform,gsehub/edx-platform,jelugbo/tundex,jazkarta/edx-platform-for-isc,ovnicraft/edx-platform,hamzehd/edx-platform,xingyepei/edx-platform,ferabra/edx-platform,Kalyzee/edx-platform,LearnEra/LearnEraPlaftform,procangroup/edx-platform,CredoReference/edx-platform,appsembler/edx-platform,benpatterson/edx-platform,kxliugang/edx-platform,jazztpt/edx-platform,dsajkl/123,wwj718/edx-platform,Kalyzee/edx-platform,ahmadio/edx-platform,J861449197/edx-platform,jazztpt/edx-platform,Edraak/circleci-edx-platform,utecuy/edx-platform,zerobatu/edx-platform,beni55/edx-platform,knehez/edx-platform,edry/edx-platform,zubair-arbi/edx-platform,bigdatauniversity/edx-platform,kursitet/edx-platform,rhndg/openedx,pabloborrego93/edx-platform,simbs/edx-platform,alexthered/kienhoc-platform,cpennington/edx-platform,gymnasium/edx-platform,zadgroup/edx-platform,fly19890211/edx-platform,martynovp/edx-platform,hamzehd/edx-platform,shurihell/testasia,Shrhawk/edx-platform,zubair-arbi/edx-platform,LearnEra/LearnEraPlaftform,alu042/edx-platform,chand3040/cloud_that,ahmedaljazzar/edx-platform,ferabra/edx-platform,ubc/edx-platform,shubhdev/edx-platform,fintech-circle/edx-platform,zerobatu/edx-platform,waheedahmed/edx-platform,jonathan-beard/edx-platform,shabab12/edx-platform,ak2703/edx-platform,nanolearningllc/edx-platform-cypress-2,jzoldak/edx-platform,franosincic/edx-platform,proversity-org/edx-platform,UOMx/edx-platform,defance/edx-platform,leansoft/edx-platform,Edraak/circleci-edx-platform,cselis86/edx-platform,DNFcode/edx-platform,mitocw/edx-platform,SravanthiSinha/edx-platform,motion2015/a3,devs1991/test_edx_docmode,synergeticsedx/deployment-wipro,synergeticsedx/deployment-wipro,motion2015/edx-platform,chauhanhardik/populo_2,xingyepei/edx-platform,beni55/edx-platform,nttks/jenkins-test,DefyVentures/edx-platform,knehez/edx-platform,rismalrv/edx-platform,nanolearningllc/edx-platform-cypress-2,chauhanhardik/populo_2,solashirai/edx-platform,ESOedX/edx-platform,etzhou/edx-platform,doismellburning/edx-platform,jamiefolsom/edx-platform,halvertoluke/edx-platform,peterm-itr/edx-platform,shubhdev/openedx,marcore/edx-platform,inares/edx-platform,vismartltd/edx-platform,chauhanhardik/populo,jamesblunt/edx-platform,JCBarahona/edX,bdero/edx-platform,lduarte1991/edx-platform,openfun/edx-platform,ahmedaljazzar/edx-platform,arbrandes/edx-platform,a-parhom/edx-platform,Semi-global/edx-platform,B-MOOC/edx-platform,shubhdev/openedx,chrisndodge/edx-platform,4eek/edx-platform,SivilTaram/edx-platform,jbassen/edx-platform,kamalx/edx-platform,longmen21/edx-platform,CredoReference/edx-platform,waheedahmed/edx-platform,doganov/edx-platform,ZLLab-Mooc/edx-platform,jbzdak/edx-platform,openfun/edx-platform,jazkarta/edx-platform,philanthropy-u/edx-platform,adoosii/edx-platform,proversity-org/edx-platform,zofuthan/edx-platform,bitifirefly/edx-platform,OmarIthawi/edx-platform,eemirtekin/edx-platform,Edraak/edx-platform,antoviaque/edx-platform,jamiefolsom/edx-platform,shubhdev/edxOnBaadal,eduNEXT/edunext-platform,chand3040/cloud_that,shabab12/edx-platform,fly19890211/edx-platform,Ayub-Khan/edx-platform,mahendra-r/edx-platform,ahmadio/edx-platform,deepsrijit1105/edx-platform,inares/edx-platform,shurihell/testasia,eemirtekin/edx-platform,kamalx/edx-platform,kmoocdev/edx-platform,etzhou/edx-platform,don-github/edx-platform,amir-qayyum-khan/edx-platform,mcgachey/edx-platform,beni55/edx-platform,DefyVentures/edx-platform,chudaol/edx-platform,sameetb-cuelogic/edx-platform-test,polimediaupv/edx-platform,RPI-OPENEDX/edx-platform,nanolearningllc/edx-platform-cypress-2,cselis86/edx-platform,caesar2164/edx-platform,leansoft/edx-platform,longmen21/edx-platform,itsjeyd/edx-platform,franosincic/edx-platform,prarthitm/edxplatform,jamiefolsom/edx-platform,mushtaqak/edx-platform,jswope00/griffinx,ahmadiga/min_edx,cyanna/edx-platform,appsembler/edx-platform,adoosii/edx-platform,lduarte1991/edx-platform,ampax/edx-platform,iivic/BoiseStateX,polimediaupv/edx-platform,nttks/edx-platform,edx-solutions/edx-platform,DNFcode/edx-platform,stvstnfrd/edx-platform,jelugbo/tundex,motion2015/edx-platform,nttks/jenkins-test,jazkarta/edx-platform,IndonesiaX/edx-platform,vasyarv/edx-platform,Endika/edx-platform,philanthropy-u/edx-platform,10clouds/edx-platform,jruiperezv/ANALYSE,Kalyzee/edx-platform,ak2703/edx-platform,fly19890211/edx-platform,fly19890211/edx-platform,Shrhawk/edx-platform,beni55/edx-platform,nikolas/edx-platform,alexthered/kienhoc-platform,MSOpenTech/edx-platform,appliedx/edx-platform,waheedahmed/edx-platform,openfun/edx-platform,marcore/edx-platform,Lektorium-LLC/edx-platform,edx-solutions/edx-platform,y12uc231/edx-platform,wwj718/edx-platform,don-github/edx-platform,eemirtekin/edx-platform,deepsrijit1105/edx-platform,mushtaqak/edx-platform,a-parhom/edx-platform,shashank971/edx-platform,vikas1885/test1,martynovp/edx-platform,zerobatu/edx-platform,Ayub-Khan/edx-platform,chand3040/cloud_that,shubhdev/edx-platform,tiagochiavericosta/edx-platform,MakeHer/edx-platform,vasyarv/edx-platform,jzoldak/edx-platform,Edraak/circleci-edx-platform,analyseuc3m/ANALYSE-v1,kxliugang/edx-platform,Endika/edx-platform,jonathan-beard/edx-platform,Shrhawk/edx-platform,TeachAtTUM/edx-platform,antonve/s4-project-mooc,ahmadio/edx-platform,chudaol/edx-platform,don-github/edx-platform,andyzsf/edx,romain-li/edx-platform,iivic/BoiseStateX,hastexo/edx-platform,unicri/edx-platform,stvstnfrd/edx-platform,dsajkl/reqiop,chand3040/cloud_that,utecuy/edx-platform,shashank971/edx-platform,shashank971/edx-platform,JioEducation/edx-platform,SivilTaram/edx-platform,zhenzhai/edx-platform,benpatterson/edx-platform,ovnicraft/edx-platform,bigdatauniversity/edx-platform,nanolearningllc/edx-platform-cypress-2,itsjeyd/edx-platform,chrisndodge/edx-platform,Stanford-Online/edx-platform,alu042/edx-platform,Kalyzee/edx-platform,prarthitm/edxplatform,10clouds/edx-platform,xingyepei/edx-platform,nanolearningllc/edx-platform-cypress,jbzdak/edx-platform,LearnEra/LearnEraPlaftform,mahendra-r/edx-platform,xuxiao19910803/edx,utecuy/edx-platform,cyanna/edx-platform,UXE/local-edx,ahmadiga/min_edx,pomegranited/edx-platform,peterm-itr/edx-platform,gymnasium/edx-platform,adoosii/edx-platform,mcgachey/edx-platform,tanmaykm/edx-platform,zerobatu/edx-platform,ahmadiga/min_edx,louyihua/edx-platform,franosincic/edx-platform,gymnasium/edx-platform,jazkarta/edx-platform-for-isc,arifsetiawan/edx-platform,xuxiao19910803/edx,doganov/edx-platform,IONISx/edx-platform,eestay/edx-platform,angelapper/edx-platform,solashirai/edx-platform,tiagochiavericosta/edx-platform,zofuthan/edx-platform,jazkarta/edx-platform-for-isc,Edraak/edx-platform,openfun/edx-platform,tanmaykm/edx-platform,solashirai/edx-platform,ESOedX/edx-platform,knehez/edx-platform,xuxiao19910803/edx,UOMx/edx-platform,a-parhom/edx-platform,playm2mboy/edx-platform,edx-solutions/edx-platform,gsehub/edx-platform,cognitiveclass/edx-platform,JioEducation/edx-platform,itsjeyd/edx-platform,wwj718/edx-platform,Stanford-Online/edx-platform,defance/edx-platform,JioEducation/edx-platform,doganov/edx-platform,mitocw/edx-platform,hastexo/edx-platform,kxliugang/edx-platform,dsajkl/reqiop,zhenzhai/edx-platform,zofuthan/edx-platform,Semi-global/edx-platform,MSOpenTech/edx-platform,olexiim/edx-platform,msegado/edx-platform,simbs/edx-platform,cpennington/edx-platform,mjirayu/sit_academy,vikas1885/test1,jjmiranda/edx-platform,UOMx/edx-platform,tanmaykm/edx-platform,jbzdak/edx-platform,EDUlib/edx-platform,eduNEXT/edunext-platform,longmen21/edx-platform,zhenzhai/edx-platform,msegado/edx-platform,eduNEXT/edx-platform,Kalyzee/edx-platform,ovnicraft/edx-platform,sudheerchintala/LearnEraPlatForm,jazztpt/edx-platform,beacloudgenius/edx-platform,Semi-global/edx-platform,4eek/edx-platform,bigdatauniversity/edx-platform,motion2015/a3,Livit/Livit.Learn.EdX,J861449197/edx-platform,xuxiao19910803/edx-platform,wwj718/ANALYSE,ahmadio/edx-platform,Ayub-Khan/edx-platform,OmarIthawi/edx-platform,xuxiao19910803/edx,mcgachey/edx-platform,shubhdev/openedx,playm2mboy/edx-platform,eduNEXT/edx-platform,ahmedaljazzar/edx-platform,fintech-circle/edx-platform,nttks/jenkins-test,DefyVentures/edx-platform,mjirayu/sit_academy,longmen21/edx-platform,rue89-tech/edx-platform,J861449197/edx-platform,J861449197/edx-platform,nanolearningllc/edx-platform-cypress,chand3040/cloud_that,peterm-itr/edx-platform,shashank971/edx-platform,bigdatauniversity/edx-platform,Softmotions/edx-platform,rue89-tech/edx-platform,atsolakid/edx-platform,raccoongang/edx-platform,shubhdev/edxOnBaadal,vismartltd/edx-platform,knehez/edx-platform,RPI-OPENEDX/edx-platform,mtlchun/edx,antonve/s4-project-mooc,martynovp/edx-platform,analyseuc3m/ANALYSE-v1,IndonesiaX/edx-platform,ESOedX/edx-platform,kamalx/edx-platform,kmoocdev2/edx-platform,alexthered/kienhoc-platform,CredoReference/edx-platform,bigdatauniversity/edx-platform,angelapper/edx-platform,caesar2164/edx-platform,romain-li/edx-platform,RPI-OPENEDX/edx-platform,xuxiao19910803/edx-platform,bitifirefly/edx-platform,B-MOOC/edx-platform,utecuy/edx-platform,kamalx/edx-platform,leansoft/edx-platform,louyihua/edx-platform,cpennington/edx-platform,kmoocdev2/edx-platform,jbassen/edx-platform,rismalrv/edx-platform,y12uc231/edx-platform,ampax/edx-platform,DNFcode/edx-platform,shubhdev/openedx,jruiperezv/ANALYSE,rue89-tech/edx-platform,mtlchun/edx,doismellburning/edx-platform,gymnasium/edx-platform,jolyonb/edx-platform,procangroup/edx-platform,chauhanhardik/populo_2,pabloborrego93/edx-platform,stvstnfrd/edx-platform,edry/edx-platform,chauhanhardik/populo_2,teltek/edx-platform,xingyepei/edx-platform,leansoft/edx-platform,mbareta/edx-platform-ft,kxliugang/edx-platform,zadgroup/edx-platform,caesar2164/edx-platform,analyseuc3m/ANALYSE-v1,IONISx/edx-platform,kmoocdev/edx-platform,jazkarta/edx-platform,shubhdev/edx-platform,beacloudgenius/edx-platform,Semi-global/edx-platform,vasyarv/edx-platform,polimediaupv/edx-platform,shubhdev/edx-platform,Softmotions/edx-platform,nikolas/edx-platform,playm2mboy/edx-platform,dcosentino/edx-platform,kmoocdev2/edx-platform,vikas1885/test1,tanmaykm/edx-platform,motion2015/edx-platform,SravanthiSinha/edx-platform,4eek/edx-platform,dsajkl/reqiop,jswope00/griffinx,nagyistoce/edx-platform,motion2015/edx-platform,ZLLab-Mooc/edx-platform,hamzehd/edx-platform,BehavioralInsightsTeam/edx-platform,valtech-mooc/edx-platform,rismalrv/edx-platform,halvertoluke/edx-platform,antoviaque/edx-platform,shurihell/testasia,devs1991/test_edx_docmode,edx/edx-platform,Livit/Livit.Learn.EdX,lduarte1991/edx-platform,shubhdev/edxOnBaadal,cyanna/edx-platform,devs1991/test_edx_docmode,miptliot/edx-platform,iivic/BoiseStateX,nikolas/edx-platform,jonathan-beard/edx-platform,ampax/edx-platform-backup,Shrhawk/edx-platform,inares/edx-platform,jazkarta/edx-platform-for-isc,devs1991/test_edx_docmode,ak2703/edx-platform,UXE/local-edx,nttks/edx-platform,JCBarahona/edX,cecep-edu/edx-platform,mjirayu/sit_academy,nikolas/edx-platform,jazkarta/edx-platform,motion2015/a3,mitocw/edx-platform,Softmotions/edx-platform,kmoocdev2/edx-platform,sameetb-cuelogic/edx-platform-test,rismalrv/edx-platform,cselis86/edx-platform,solashirai/edx-platform,inares/edx-platform,beacloudgenius/edx-platform,Ayub-Khan/edx-platform,hamzehd/edx-platform,shubhdev/edx-platform,jamesblunt/edx-platform,atsolakid/edx-platform,wwj718/ANALYSE,raccoongang/edx-platform,CourseTalk/edx-platform,edry/edx-platform,edry/edx-platform,iivic/BoiseStateX,pomegranited/edx-platform,nttks/edx-platform,IndonesiaX/edx-platform,tiagochiavericosta/edx-platform,msegado/edx-platform,jbassen/edx-platform,dkarakats/edx-platform,JCBarahona/edX,DNFcode/edx-platform,10clouds/edx-platform,martynovp/edx-platform,ubc/edx-platform,appliedx/edx-platform,jruiperezv/ANALYSE,beni55/edx-platform,mcgachey/edx-platform,amir-qayyum-khan/edx-platform,sudheerchintala/LearnEraPlatForm,wwj718/edx-platform,edx-solutions/edx-platform,proversity-org/edx-platform,angelapper/edx-platform,pabloborrego93/edx-platform,arifsetiawan/edx-platform,zubair-arbi/edx-platform,pomegranited/edx-platform,ampax/edx-platform-backup,jelugbo/tundex,prarthitm/edxplatform,AkA84/edx-platform,dcosentino/edx-platform,jruiperezv/ANALYSE,romain-li/edx-platform,eestay/edx-platform,UXE/local-edx,leansoft/edx-platform,RPI-OPENEDX/edx-platform,TeachAtTUM/edx-platform,B-MOOC/edx-platform,tiagochiavericosta/edx-platform,jjmiranda/edx-platform,devs1991/test_edx_docmode,dsajkl/123,rhndg/openedx,OmarIthawi/edx-platform,prarthitm/edxplatform,jamiefolsom/edx-platform,sameetb-cuelogic/edx-platform-test,Edraak/edraak-platform,jswope00/griffinx,xingyepei/edx-platform,utecuy/edx-platform,mushtaqak/edx-platform,jbassen/edx-platform,louyihua/edx-platform,mbareta/edx-platform-ft,eduNEXT/edx-platform,jonathan-beard/edx-platform,ubc/edx-platform,JioEducation/edx-platform,cecep-edu/edx-platform,nagyistoce/edx-platform,vasyarv/edx-platform,bitifirefly/edx-platform,shubhdev/edxOnBaadal,ahmadio/edx-platform,SravanthiSinha/edx-platform,nanolearningllc/edx-platform-cypress-2,eestay/edx-platform,adoosii/edx-platform,mjirayu/sit_academy,mushtaqak/edx-platform,antonve/s4-project-mooc,a-parhom/edx-platform,bitifirefly/edx-platform,IONISx/edx-platform,atsolakid/edx-platform,y12uc231/edx-platform,bdero/edx-platform,shubhdev/openedx,fly19890211/edx-platform,jbassen/edx-platform,alu042/edx-platform,naresh21/synergetics-edx-platform,Edraak/circleci-edx-platform,sudheerchintala/LearnEraPlatForm,dcosentino/edx-platform,unicri/edx-platform,alexthered/kienhoc-platform,knehez/edx-platform,nikolas/edx-platform,rue89-tech/edx-platform,SravanthiSinha/edx-platform,longmen21/edx-platform,pepeportela/edx-platform,rue89-tech/edx-platform,miptliot/edx-platform,nanolearningllc/edx-platform-cypress,inares/edx-platform,Edraak/edraak-platform,sameetb-cuelogic/edx-platform-test,eduNEXT/edunext-platform,defance/edx-platform,procangroup/edx-platform,MakeHer/edx-platform,dsajkl/123,Edraak/edraak-platform,kmoocdev/edx-platform,itsjeyd/edx-platform,vismartltd/edx-platform,eduNEXT/edunext-platform,pepeportela/edx-platform,SravanthiSinha/edx-platform,kmoocdev/edx-platform,ahmadiga/min_edx,Edraak/edx-platform,edx/edx-platform,zerobatu/edx-platform,J861449197/edx-platform,arbrandes/edx-platform,nagyistoce/edx-platform,shurihell/testasia,y12uc231/edx-platform,kursitet/edx-platform,mahendra-r/edx-platform,procangroup/edx-platform,fintech-circle/edx-platform,zadgroup/edx-platform,DNFcode/edx-platform,halvertoluke/edx-platform,xinjiguaike/edx-platform,jazkarta/edx-platform,iivic/BoiseStateX,pomegranited/edx-platform,ZLLab-Mooc/edx-platform,y12uc231/edx-platform,martynovp/edx-platform,dkarakats/edx-platform,kxliugang/edx-platform,Edraak/circleci-edx-platform,wwj718/ANALYSE,nttks/jenkins-test,mbareta/edx-platform-ft,andyzsf/edx,MakeHer/edx-platform,chauhanhardik/populo,dsajkl/123,ampax/edx-platform-backup,zhenzhai/edx-platform,benpatterson/edx-platform,miptliot/edx-platform,jelugbo/tundex,beacloudgenius/edx-platform,proversity-org/edx-platform,chauhanhardik/populo,B-MOOC/edx-platform,nttks/jenkins-test,teltek/edx-platform,UOMx/edx-platform,arifsetiawan/edx-platform,jelugbo/tundex,AkA84/edx-platform,jjmiranda/edx-platform,arifsetiawan/edx-platform,msegado/edx-platform,jbzdak/edx-platform,kamalx/edx-platform,BehavioralInsightsTeam/edx-platform,mbareta/edx-platform-ft,etzhou/edx-platform,kmoocdev2/edx-platform,nttks/edx-platform,halvertoluke/edx-platform,naresh21/synergetics-edx-platform,jolyonb/edx-platform,MSOpenTech/edx-platform,mahendra-r/edx-platform,ZLLab-Mooc/edx-platform,devs1991/test_edx_docmode,arbrandes/edx-platform,cselis86/edx-platform,IndonesiaX/edx-platform,B-MOOC/edx-platform,chudaol/edx-platform,Ayub-Khan/edx-platform,hastexo/edx-platform,Semi-global/edx-platform,Shrhawk/edx-platform,jamesblunt/edx-platform,EDUlib/edx-platform,nanolearningllc/edx-platform-cypress,devs1991/test_edx_docmode,chudaol/edx-platform,halvertoluke/edx-platform,CourseTalk/edx-platform,mtlchun/edx,MSOpenTech/edx-platform,jbzdak/edx-platform,kmoocdev/edx-platform,shashank971/edx-platform,jazztpt/edx-platform,jjmiranda/edx-platform,ferabra/edx-platform,romain-li/edx-platform,jswope00/griffinx,wwj718/edx-platform,playm2mboy/edx-platform,antoviaque/edx-platform,polimediaupv/edx-platform,zubair-arbi/edx-platform,naresh21/synergetics-edx-platform,ak2703/edx-platform,zofuthan/edx-platform,Livit/Livit.Learn.EdX,AkA84/edx-platform,simbs/edx-platform,olexiim/edx-platform,vismartltd/edx-platform,mtlchun/edx,valtech-mooc/edx-platform,rhndg/openedx,alu042/edx-platform,romain-li/edx-platform,rhndg/openedx,eduNEXT/edx-platform,teltek/edx-platform,pomegranited/edx-platform,vismartltd/edx-platform,amir-qayyum-khan/edx-platform,edry/edx-platform,raccoongang/edx-platform,eestay/edx-platform,appliedx/edx-platform,solashirai/edx-platform,bdero/edx-platform,ferabra/edx-platform,louyihua/edx-platform,CourseTalk/edx-platform,wwj718/ANALYSE,kursitet/edx-platform,chrisndodge/edx-platform,cecep-edu/edx-platform,pabloborrego93/edx-platform,dcosentino/edx-platform,kursitet/edx-platform,caesar2164/edx-platform,etzhou/edx-platform,MSOpenTech/edx-platform,deepsrijit1105/edx-platform,10clouds/edx-platform,cognitiveclass/edx-platform,SivilTaram/edx-platform,deepsrijit1105/edx-platform,eemirtekin/edx-platform,jazztpt/edx-platform,motion2015/a3,eestay/edx-platform,pepeportela/edx-platform,ampax/edx-platform-backup,appliedx/edx-platform,alexthered/kienhoc-platform,jzoldak/edx-platform,RPI-OPENEDX/edx-platform,IONISx/edx-platform,CourseTalk/edx-platform,analyseuc3m/ANALYSE-v1,chudaol/edx-platform,nttks/edx-platform,Livit/Livit.Learn.EdX,waheedahmed/edx-platform,Lektorium-LLC/edx-platform,cognitiveclass/edx-platform,doganov/edx-platform,MakeHer/edx-platform,ZLLab-Mooc/edx-platform,nagyistoce/edx-platform,shabab12/edx-platform,atsolakid/edx-platform,Edraak/edx-platform,zhenzhai/edx-platform,cpennington/edx-platform,peterm-itr/edx-platform,Softmotions/edx-platform,openfun/edx-platform,dkarakats/edx-platform,arbrandes/edx-platform,IndonesiaX/edx-platform,jswope00/griffinx,ak2703/edx-platform,simbs/edx-platform,IONISx/edx-platform,Stanford-Online/edx-platform,valtech-mooc/edx-platform,BehavioralInsightsTeam/edx-platform,teltek/edx-platform,mushtaqak/edx-platform,don-github/edx-platform,zadgroup/edx-platform,stvstnfrd/edx-platform,unicri/edx-platform,bitifirefly/edx-platform,appsembler/edx-platform,benpatterson/edx-platform,xuxiao19910803/edx-platform,doganov/edx-platform,SivilTaram/edx-platform,nagyistoce/edx-platform,mcgachey/edx-platform,sudheerchintala/LearnEraPlatForm,raccoongang/edx-platform,olexiim/edx-platform,chauhanhardik/populo_2
|
f931a434839222bb00282a432d6d6a0c2c52eb7d
|
numpy/array_api/_typing.py
|
numpy/array_api/_typing.py
|
"""
This file defines the types for type annotations.
These names aren't part of the module namespace, but they are used in the
annotations in the function signatures. The functions in the module are only
valid for inputs that match the given type annotations.
"""
__all__ = [
"Array",
"Device",
"Dtype",
"SupportsDLPack",
"SupportsBufferProtocol",
"PyCapsule",
]
import sys
from typing import Any, Literal, Sequence, Type, Union, TYPE_CHECKING, TypeVar
from ._array_object import Array
from numpy import (
dtype,
int8,
int16,
int32,
int64,
uint8,
uint16,
uint32,
uint64,
float32,
float64,
)
# This should really be recursive, but that isn't supported yet. See the
# similar comment in numpy/typing/_array_like.py
_T = TypeVar("_T")
NestedSequence = Sequence[Sequence[_T]]
Device = Literal["cpu"]
if TYPE_CHECKING or sys.version_info >= (3, 9):
Dtype = dtype[Union[
int8,
int16,
int32,
int64,
uint8,
uint16,
uint32,
uint64,
float32,
float64,
]]
else:
Dtype = dtype
SupportsDLPack = Any
SupportsBufferProtocol = Any
PyCapsule = Any
|
"""
This file defines the types for type annotations.
These names aren't part of the module namespace, but they are used in the
annotations in the function signatures. The functions in the module are only
valid for inputs that match the given type annotations.
"""
from __future__ import annotations
__all__ = [
"Array",
"Device",
"Dtype",
"SupportsDLPack",
"SupportsBufferProtocol",
"PyCapsule",
]
import sys
from typing import (
Any,
Literal,
Sequence,
Type,
Union,
TYPE_CHECKING,
TypeVar,
Protocol,
)
from ._array_object import Array
from numpy import (
dtype,
int8,
int16,
int32,
int64,
uint8,
uint16,
uint32,
uint64,
float32,
float64,
)
_T_co = TypeVar("_T_co", covariant=True)
class NestedSequence(Protocol[_T_co]):
def __getitem__(self, key: int, /) -> _T_co | NestedSequence[_T_co]: ...
def __len__(self, /) -> int: ...
Device = Literal["cpu"]
if TYPE_CHECKING or sys.version_info >= (3, 9):
Dtype = dtype[Union[
int8,
int16,
int32,
int64,
uint8,
uint16,
uint32,
uint64,
float32,
float64,
]]
else:
Dtype = dtype
SupportsDLPack = Any
SupportsBufferProtocol = Any
PyCapsule = Any
|
Replace `NestedSequence` with a proper nested sequence protocol
|
ENH: Replace `NestedSequence` with a proper nested sequence protocol
|
Python
|
bsd-3-clause
|
numpy/numpy,endolith/numpy,charris/numpy,rgommers/numpy,numpy/numpy,jakirkham/numpy,seberg/numpy,mattip/numpy,jakirkham/numpy,endolith/numpy,rgommers/numpy,mattip/numpy,mattip/numpy,seberg/numpy,pdebuyl/numpy,pdebuyl/numpy,endolith/numpy,endolith/numpy,mattip/numpy,jakirkham/numpy,charris/numpy,seberg/numpy,mhvk/numpy,pdebuyl/numpy,rgommers/numpy,mhvk/numpy,numpy/numpy,jakirkham/numpy,mhvk/numpy,mhvk/numpy,charris/numpy,anntzer/numpy,pdebuyl/numpy,charris/numpy,jakirkham/numpy,numpy/numpy,rgommers/numpy,anntzer/numpy,anntzer/numpy,anntzer/numpy,seberg/numpy,mhvk/numpy
|
718a04f14f3ede084a2d9391e187b4d943463c6f
|
yanico/session/__init__.py
|
yanico/session/__init__.py
|
"""Handle nicovideo.jp user_session."""
# Copyright 2015-2016 Masayuki Yamamoto
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class UserSessionNotFoundError(Exception):
"""Firefox profile exists, buf user_session is not found."""
|
"""Handle nicovideo.jp user_session."""
# Copyright 2015-2016 Masayuki Yamamoto
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class UserSessionNotFoundError(Exception):
"""Profile exists, but user_session is not found."""
|
Fix a typo of class docstring
|
Fix a typo of class docstring
|
Python
|
apache-2.0
|
ma8ma/yanico
|
889eed552f4e17797764a9d9a2da6bbaa6d5dd33
|
admin_panel/views.py
|
admin_panel/views.py
|
from django.views import View
from django.views.generic import TemplateView
from django.contrib import auth
from django.contrib import messages
from django import http
class LoginView(TemplateView):
template_name = "admin/login.html"
def post(self, request):
username = request.POST['username']
password = request.POST['password']
user_object = auth.authenticate(request, username=username, password=password)
if user_object is None:
messages.error(request, "Invalid credentials")
return self.get(request)
auth.login(request, user_object)
messages.success(request, "You've been logged in")
return http.HttpResponseRedirect(self.get_next_url(request))
def get_next_url(self, request):
if "next" in request.GET:
return request.GET['next']
else:
return "/administration/panel"
class Panel(TemplateView):
template_name = "admin/panel.html"
class LogoutView(View):
def get(self, request):
auth.logout(request)
return http.HttpResponseRedirect("/administration/login")
|
from django.views import View
from django.views.generic import TemplateView
from django.contrib import auth
from django.contrib import messages
from django import http
from django.urls import reverse
class LoginView(TemplateView):
template_name = "admin/login.html"
def post(self, request):
username = request.POST['username']
password = request.POST['password']
user_object = auth.authenticate(request, username=username, password=password)
if user_object is None:
messages.error(request, "Invalid credentials")
return self.get(request)
auth.login(request, user_object)
messages.success(request, "You've been logged in")
return http.HttpResponseRedirect(self.get_next_url(request))
def get_next_url(self, request):
if "next" in request.GET:
return request.GET['next']
else:
return reverse("admin:Panel")
class Panel(TemplateView):
template_name = "admin/panel.html"
class LogoutView(View):
def get(self, request):
auth.logout(request)
return http.HttpResponseRedirect("/administration/login")
|
Use django reverse function to obtain url instead of hard-coding
|
Use django reverse function to obtain url instead of hard-coding
|
Python
|
mpl-2.0
|
Apo11onian/Apollo-Blog,Apo11onian/Apollo-Blog,Apo11onian/Apollo-Blog
|
0f35ed05d335e7c126675bc913b72aac3ac916df
|
project/apps/api/signals.py
|
project/apps/api/signals.py
|
from django.db.models.signals import (
post_save,
)
from django.dispatch import receiver
from rest_framework.authtoken.models import Token
from django.conf import settings
from .models import (
Contest,
)
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
def user_post_save(sender, instance=None, created=False, **kwargs):
if created:
Token.objects.create(user=instance)
@receiver(post_save, sender=Contest)
def contest_post_save(sender, instance=None, created=False, **kwargs):
if created:
instance.build()
instance.save()
|
from django.db.models.signals import (
post_save,
)
from django.dispatch import receiver
from rest_framework.authtoken.models import Token
from django.conf import settings
from .models import (
Contest,
)
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
def user_post_save(sender, instance=None, created=False, **kwargs):
if created:
Token.objects.create(user=instance)
@receiver(post_save, sender=Contest)
def contest_post_save(sender, instance=None, created=False, raw=False, **kwargs):
if not raw:
if created:
instance.build()
instance.save()
|
Add check for fixture loading
|
Add check for fixture loading
|
Python
|
bsd-2-clause
|
barberscore/barberscore-api,dbinetti/barberscore-django,dbinetti/barberscore,barberscore/barberscore-api,dbinetti/barberscore,barberscore/barberscore-api,dbinetti/barberscore-django,barberscore/barberscore-api
|
f277007e46b7c6d8c978011d7356b7527ba91133
|
axes/utils.py
|
axes/utils.py
|
from axes.models import AccessAttempt
def reset(ip=None, username=None):
"""Reset records that match ip or username, and
return the count of removed attempts.
"""
count = 0
attempts = AccessAttempt.objects.all()
if ip:
attempts = attempts.filter(ip_address=ip)
if username:
attempts = attempts.filter(username=username)
if attempts:
count = attempts.count()
attempts.delete()
return count
def iso8601(timestamp):
"""Returns datetime.timedelta translated to ISO 8601 formatted duration.
"""
seconds = timestamp.total_seconds()
minutes, seconds = divmod(seconds, 60)
hours, minutes = divmod(minutes, 60)
days, hours = divmod(hours, 24)
date = '{:.0f}D'.format(days) if days else ''
time_values = hours, minutes, seconds
time_designators = 'H', 'M', 'S'
time = ''.join([
('{:.0f}'.format(value) + designator)
for value, designator in zip(time_values, time_designators)
if value]
)
return u'P' + date + (u'T' + time if time else '')
|
from django.core.cache import cache
from axes.models import AccessAttempt
def reset(ip=None, username=None):
"""Reset records that match ip or username, and
return the count of removed attempts.
"""
count = 0
attempts = AccessAttempt.objects.all()
if ip:
attempts = attempts.filter(ip_address=ip)
if username:
attempts = attempts.filter(username=username)
if attempts:
count = attempts.count()
from axes.decorators import get_cache_key
for attempt in attempts:
cache_hash_key = get_cache_key(attempt)
if cache.get(cache_hash_key):
cache.delete(cache_hash_key)
attempts.delete()
return count
def iso8601(timestamp):
"""Returns datetime.timedelta translated to ISO 8601 formatted duration.
"""
seconds = timestamp.total_seconds()
minutes, seconds = divmod(seconds, 60)
hours, minutes = divmod(minutes, 60)
days, hours = divmod(hours, 24)
date = '{:.0f}D'.format(days) if days else ''
time_values = hours, minutes, seconds
time_designators = 'H', 'M', 'S'
time = ''.join([
('{:.0f}'.format(value) + designator)
for value, designator in zip(time_values, time_designators)
if value]
)
return u'P' + date + (u'T' + time if time else '')
|
Delete cache key in reset command line
|
Delete cache key in reset command line
|
Python
|
mit
|
jazzband/django-axes,django-pci/django-axes
|
a3c131776678b8e91e1179cd0f3c3b4b3fbbf6fb
|
openid_provider/tests/test_code_flow.py
|
openid_provider/tests/test_code_flow.py
|
from django.core.urlresolvers import reverse
from django.test import RequestFactory
from django.test import TestCase
from openid_provider.tests.utils import *
from openid_provider.views import *
class CodeFlowTestCase(TestCase):
def setUp(self):
self.factory = RequestFactory()
self.user = create_fake_user()
self.client = create_fake_client(response_type='code')
def test_authorize_invalid_parameters(self):
"""
If the request fails due to a missing, invalid, or mismatching
redirection URI, or if the client identifier is missing or invalid,
the authorization server SHOULD inform the resource owner of the error.
See: https://tools.ietf.org/html/rfc6749#section-4.1.2.1
"""
url = reverse('openid_provider:authorize')
request = self.factory.get(url)
response = AuthorizeView.as_view()(request)
self.assertEqual(response.status_code, 200)
self.assertEqual(bool(response.content), True)
|
from django.core.urlresolvers import reverse
from django.test import RequestFactory
from django.test import TestCase
from openid_provider.tests.utils import *
from openid_provider.views import *
import urllib
class CodeFlowTestCase(TestCase):
def setUp(self):
self.factory = RequestFactory()
self.user = create_fake_user()
self.client = create_fake_client(response_type='code')
def test_authorize_invalid_parameters(self):
"""
If the request fails due to a missing, invalid, or mismatching
redirection URI, or if the client identifier is missing or invalid,
the authorization server SHOULD inform the resource owner of the error.
See: https://tools.ietf.org/html/rfc6749#section-4.1.2.1
"""
url = reverse('openid_provider:authorize')
request = self.factory.get(url)
response = AuthorizeView.as_view()(request)
self.assertEqual(response.status_code, 200)
self.assertEqual(bool(response.content), True)
def test_authorize_invalid_response_type(self):
"""
The OP informs the RP by using the Error Response parameters defined
in Section 4.1.2.1 of OAuth 2.0.
See: http://openid.net/specs/openid-connect-core-1_0.html#AuthError
"""
# Create an authorize request with an unsupported response_type.
url = reverse('openid_provider:authorize')
url += '?client_id={0}&response_type=code%20id_token&scope=openid%20email' \
'&redirect_uri={1}&state=abcdefg'.format(
self.client.client_id,
urllib.quote(self.client.default_redirect_uri),
)
request = self.factory.get(url)
response = AuthorizeView.as_view()(request)
self.assertEqual(response.status_code, 302)
self.assertEqual(response.has_header('Location'), True)
# Check query component in the redirection URI.
correct_query = 'error=' in response['Location']
self.assertEqual(correct_query, True)
|
Add another test for Code Flow.
|
Add another test for Code Flow.
|
Python
|
mit
|
wayward710/django-oidc-provider,bunnyinc/django-oidc-provider,wayward710/django-oidc-provider,juanifioren/django-oidc-provider,ByteInternet/django-oidc-provider,wojtek-fliposports/django-oidc-provider,ByteInternet/django-oidc-provider,django-py/django-openid-provider,torreco/django-oidc-provider,django-py/django-openid-provider,torreco/django-oidc-provider,wojtek-fliposports/django-oidc-provider,nmohoric/django-oidc-provider,Sjord/django-oidc-provider,Sjord/django-oidc-provider,juanifioren/django-oidc-provider,nmohoric/django-oidc-provider,bunnyinc/django-oidc-provider
|
4a38d0df3d72494e2a96ac776f13ce685b537561
|
lokar/bib.py
|
lokar/bib.py
|
# coding=utf-8
from __future__ import unicode_literals
from io import BytesIO
from .marc import Record
from .util import etree, parse_xml, show_diff
class Bib(object):
""" An Alma Bib record """
def __init__(self, alma, xml):
self.alma = alma
self.orig_xml = xml.encode('utf-8')
self.init(xml)
def init(self, xml):
self.doc = parse_xml(xml)
self.mms_id = self.doc.findtext('mms_id')
self.marc_record = Record(self.doc.find('record'))
self.cz_link = self.doc.findtext('linked_record_id[@type="CZ"]') or None
def save(self, diff=False, dry_run=False):
# Save record back to Alma
post_data = ('<?xml version="1.0" encoding="UTF-8" standalone="yes"?>'.encode('utf-8') +
etree.tostring(self.doc, encoding='UTF-8'))
if diff:
show_diff(self.orig_xml, post_data)
if not dry_run:
response = self.alma.put('/bibs/{}'.format(self.mms_id),
data=BytesIO(post_data),
headers={'Content-Type': 'application/xml'})
self.init(response)
def dump(self, filename):
# Dump record to file
with open(filename, 'wb') as f:
f.write(etree.tostring(self.doc, pretty_print=True))
|
# coding=utf-8
from __future__ import unicode_literals
from io import BytesIO
from .marc import Record
from .util import etree, parse_xml, show_diff
class Bib(object):
""" An Alma Bib record """
def __init__(self, alma, xml):
self.alma = alma
self.orig_xml = xml
self.init(xml)
def init(self, xml):
self.doc = parse_xml(xml)
self.mms_id = self.doc.findtext('mms_id')
self.marc_record = Record(self.doc.find('record'))
self.cz_link = self.doc.findtext('linked_record_id[@type="CZ"]') or None
def save(self, diff=False, dry_run=False):
# Save record back to Alma
post_data = ('<?xml version="1.0" encoding="UTF-8" standalone="yes"?>' +
etree.tounicode(self.doc))
if diff:
show_diff(self.orig_xml, post_data)
if not dry_run:
response = self.alma.put('/bibs/{}'.format(self.mms_id),
data=BytesIO(post_data.encode('utf-8')),
headers={'Content-Type': 'application/xml'})
self.init(response)
def dump(self, filename):
# Dump record to file
with open(filename, 'wb') as f:
f.write(etree.tostring(self.doc, pretty_print=True))
|
Fix diffing on Py3 by comparing unicode strings
|
Fix diffing on Py3 by comparing unicode strings
|
Python
|
agpl-3.0
|
scriptotek/almar,scriptotek/lokar
|
1312dc95d9c25897c11c8e818edcb9cd2b6a32f7
|
ecommerce/extensions/app.py
|
ecommerce/extensions/app.py
|
from oscar import app
class EdxShop(app.Shop):
# URLs are only visible to users with staff permissions
default_permissions = 'is_staff'
application = EdxShop()
|
from oscar import app
from oscar.core.application import Application
class EdxShop(app.Shop):
# URLs are only visible to users with staff permissions
default_permissions = 'is_staff'
# Override core app instances with blank application instances to exclude their URLs.
promotions_app = Application()
catalogue_app = Application()
offer_app = Application()
search_app = Application()
application = EdxShop()
|
Move the security fix into Eucalyptus
|
Move the security fix into Eucalyptus
|
Python
|
agpl-3.0
|
mferenca/HMS-ecommerce,mferenca/HMS-ecommerce,mferenca/HMS-ecommerce
|
1958165c7bf3b9fa45972658b980cefe6a742164
|
myhpom/validators.py
|
myhpom/validators.py
|
import re
from django.core.exceptions import ValidationError
from django.core.validators import EmailValidator, RegexValidator
from django.contrib.auth.models import User
# First Name, Last Name: At least one alphanumeric character.
name_validator = RegexValidator(
regex=r'\w',
flags=re.U,
message='Please enter your name'
)
# Email: valid email address
email_validator = EmailValidator()
# Email is not already taken
def email_not_taken_validator(email):
if len(User.objects.filter(email=email)) > 0:
raise ValidationError(u'Email already in use.')
# Password: At least 8 chars total, 1 uppercase, lowercase, digit, special char.
def password_validator(password):
errors = []
if len(password) < 8:
errors.append(u'8 characters total')
if re.search(r"[a-z]", password) is None:
errors.append(u'1 lowercase letter (a-z)')
if re.search(r"[A-Z]", password) is None:
errors.append(u'1 uppercase letter (A-Z)')
if re.search(r"\d", password) is None:
errors.append(u'1 number (0-9)')
if re.search(r"[!\@\#\$\%\^\*\(\)\_\+\-\=]", password) is None:
errors.append(u'1 special character (! @ # $ % ^ * ( ) _ + - =)')
if len(errors) > 0:
raise ValidationError(u'Please enter a password with at least ' + u', '.join(errors))
|
import re
from django.core.exceptions import ValidationError
from django.core.validators import EmailValidator, RegexValidator
# First Name, Last Name: At least one alphanumeric character.
name_validator = RegexValidator(
regex=r'\w',
flags=re.U,
message='Please enter your name'
)
# Email: valid email address
email_validator = EmailValidator()
# Email is not already taken
def email_not_taken_validator(email):
from myhpom.models import User
if len(User.objects.filter(email=email)) > 0:
raise ValidationError(u'Email already in use.')
# Password: At least 8 chars total, 1 uppercase, lowercase, digit, special char.
def password_validator(password):
errors = []
if len(password) < 8:
errors.append(u'8 characters total')
if re.search(r"[a-z]", password) is None:
errors.append(u'1 lowercase letter (a-z)')
if re.search(r"[A-Z]", password) is None:
errors.append(u'1 uppercase letter (A-Z)')
if re.search(r"\d", password) is None:
errors.append(u'1 number (0-9)')
if re.search(r"[!\@\#\$\%\^\*\(\)\_\+\-\=]", password) is None:
errors.append(u'1 special character (! @ # $ % ^ * ( ) _ + - =)')
if len(errors) > 0:
raise ValidationError(u'Please enter a password with at least ' + u', '.join(errors))
|
Revert "[mh-14] "This import is ultimately just from django.contrib.auth.models import User - using that directly would probably address whatever circular import required that this import get put here, and make it clearer which model User is."-Dane"
|
Revert "[mh-14] "This import is ultimately just from django.contrib.auth.models import User - using that directly would probably address whatever circular import required that this import get put here, and make it clearer which model User is."-Dane"
This reverts commit 7350c56339acaef416d03b6d7ae0e818ab8db182.
|
Python
|
bsd-3-clause
|
ResearchSoftwareInstitute/MyHPOM,ResearchSoftwareInstitute/MyHPOM,ResearchSoftwareInstitute/MyHPOM,ResearchSoftwareInstitute/MyHPOM,ResearchSoftwareInstitute/MyHPOM
|
906950ec1bd1f5d0980116d10344f9f1b7d844ed
|
Importacions_F1_Q1/Fact_impF1_eliminar_Ja_existeix.py
|
Importacions_F1_Q1/Fact_impF1_eliminar_Ja_existeix.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from ooop import OOOP
import configdb
O = OOOP(**configdb.ooop)
imp_obj = O.GiscedataFacturacioImportacioLinia
imp_del_ids = imp_obj.search([('state','=','erroni'),('info','like',"Aquest fitxer XML ja s'ha processat en els següents IDs")])
#imp_del_ids += imp_obj.search([('state','=','erroni'),('info','like','XML erroni')])
imp_del_ids += imp_obj.search([('state','=','erroni'),('info','like',"XML no es correspon al tipus F1")])
imp_del_ids += imp_obj.search([('state','=','erroni'),('info','like',"Document invàlid")])
total = len(imp_del_ids)
n = 0
for imp_del_id in imp_del_ids:
try:
imp_obj.unlink([imp_del_id])
n +=1
print "%d/%d" % (n,total)
except Exception, e:
print e
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from ooop import OOOP
import configdb
O = OOOP(**configdb.ooop)
imp_obj = O.GiscedataFacturacioImportacioLinia
imp_del_ids = imp_obj.search([('state','=','erroni'),('info','like',"Aquest fitxer XML ja s'ha processat en els següents IDs")])
imp_del_ids = imp_obj.search([('state','=','erroni'),('info','like',"Ja existeix una factura amb el mateix origen")])
#imp_del_ids += imp_obj.search([('state','=','erroni'),('info','like','XML erroni')])
imp_del_ids += imp_obj.search([('state','=','erroni'),('info','like',"XML no es correspon al tipus F1")])
imp_del_ids += imp_obj.search([('state','=','erroni'),('info','like',"Document invàlid")])
total = len(imp_del_ids)
n = 0
for imp_del_id in imp_del_ids:
try:
imp_obj.unlink([imp_del_id])
n +=1
print "%d/%d" % (n,total)
except Exception, e:
print e
|
Kill "Ja existeix una factura amb el mateix.." too
|
Kill "Ja existeix una factura amb el mateix.." too
|
Python
|
agpl-3.0
|
Som-Energia/invoice-janitor
|
2e99893065abef2f751e3fb5f19a59bfee79a756
|
language_model_transcription.py
|
language_model_transcription.py
|
import metasentence
import language_model
import standard_kaldi
import diff_align
import json
import os
import sys
vocab = metasentence.load_vocabulary('PROTO_LANGDIR/graphdir/words.txt')
def lm_transcribe(audio_f, text_f):
ms = metasentence.MetaSentence(open(text_f).read(), vocab)
model_dir = language_model.getLanguageModel(ms.get_kaldi_sequence())
print 'generated model', model_dir
k = standard_kaldi.Kaldi(os.path.join(model_dir, 'graphdir', 'HCLG.fst'))
trans = standard_kaldi.transcribe(k, audio_f)
ret = diff_align.align(trans["words"], ms)
return ret
if __name__=='__main__':
AUDIO_FILE = sys.argv[1]
TEXT_FILE = sys.argv[2]
OUTPUT_FILE = sys.argv[3]
ret = lm_transcribe(AUDIO_FILE, TEXT_FILE)
json.dump(ret, open(OUTPUT_FILE, 'w'), indent=2)
|
import metasentence
import language_model
import standard_kaldi
import diff_align
import json
import os
import sys
vocab = metasentence.load_vocabulary('PROTO_LANGDIR/graphdir/words.txt')
def lm_transcribe(audio_f, text_f):
ms = metasentence.MetaSentence(open(text_f).read(), vocab)
model_dir = language_model.getLanguageModel(ms.get_kaldi_sequence())
print 'generated model', model_dir
k = standard_kaldi.Kaldi(os.path.join(model_dir, 'graphdir', 'HCLG.fst'))
trans = standard_kaldi.transcribe(k, audio_f)
ret = diff_align.align(trans["words"], ms)
return ret
if __name__=='__main__':
import argparse
parser = argparse.ArgumentParser(
description='Align a transcript to audio by generating a new language model.')
parser.add_argument('audio_file', help='input audio file in any format supported by FFMPEG')
parser.add_argument('text_file', help='input transcript as plain text')
parser.add_argument('output_file', type=argparse.FileType('w'),
help='output json file for aligned transcript')
parser.add_argument('--proto_langdir', default="PROTO_LANGDIR",
help='path to the prototype language directory')
args = parser.parse_args()
ret = lm_transcribe(args.audio_file, args.text_file)
json.dump(ret, args.output_file, indent=2)
|
Use argparse for main python entrypoint args.
|
Use argparse for main python entrypoint args.
Will make it easier to add proto_langdir as a flag argument in a future commit.
|
Python
|
mit
|
lowerquality/gentle,lowerquality/gentle,lowerquality/gentle,lowerquality/gentle
|
de15315b95f70e56d424d54637e3ac0d615ea0f0
|
proto/ho.py
|
proto/ho.py
|
from board import Board, BoardCanvas
b = Board(19, 19)
c = BoardCanvas(b)
|
#!/usr/bin/env python
import platform
import subprocess
import sys
from copy import deepcopy
from board import Board, BoardCanvas
def clear():
subprocess.check_call('cls' if platform.system() == 'Windows' else 'clear', shell=True)
class _Getch:
"""
Gets a single character from standard input. Does not echo to the
screen.
"""
def __init__(self):
try:
self.impl = _GetchWindows()
except ImportError:
self.impl = _GetchUnix()
def __call__(self):
return self.impl()
class _GetchUnix:
def __call__(self):
import tty
import termios
fd = sys.stdin.fileno()
old_settings = termios.tcgetattr(fd)
try:
tty.setraw(sys.stdin.fileno())
ch = sys.stdin.read(1)
finally:
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
return ch
class _GetchWindows:
def __init__(self):
import msvcrt # NOQA
def __call__(self):
import msvcrt
return msvcrt.getch()
getch = _Getch()
WIDTH = 19
HEIGHT = 19
def trunc_width(v):
return max(1, min(WIDTH, v))
def trunc_height(v):
return max(1, min(HEIGHT, v))
def move_up(x, y):
return trunc_width(x), trunc_height(y - 1)
def move_down(x, y):
return trunc_width(x), trunc_height(y + 1)
def move_left(x, y):
return trunc_width(x - 1), trunc_height(y)
def move_right(x, y):
return trunc_width(x + 1), trunc_height(y)
KEYS = {
'w': move_up,
'r': move_down,
'a': move_left,
's': move_right,
}
def main():
board = Board(WIDTH, HEIGHT)
canvas = BoardCanvas(board)
cur_x, cur_y = (1, 1)
while True:
clear()
# Print board
select_board = deepcopy(canvas)
select_board.set(cur_x, cur_y, 'X')
print select_board
print 'Make your move... '
# Get char
c = getch()
# Escape terminates
if c == '\x1b':
break
# Move cursor
try:
cur_x, cur_y = KEYS[c](cur_x, cur_y)
except KeyError:
pass
if __name__ == '__main__':
main()
|
Add game loop to prototype
|
Add game loop to prototype
|
Python
|
mit
|
davesque/go.py
|
038b56134017b6b3e4ea44d1b7197bc5168868d3
|
safeopt/__init__.py
|
safeopt/__init__.py
|
"""
The `safeopt` package provides...
Main classes
============
.. autosummary::
SafeOpt
SafeOptSwarm
Utilities
=========
.. autosummary::
sample_gp_function
linearly_spaced_combinations
plot_2d_gp
plot_3d_gp
plot_contour_gp
"""
from __future__ import absolute_import
from .utilities import *
from .gp_opt import *
__all__ = [s for s in dir() if not s.startswith('_')]
|
"""
The `safeopt` package provides...
Main classes
============
These classes provide the main functionality for Safe Bayesian optimization.
.. autosummary::
SafeOpt
SafeOptSwarm
Utilities
=========
The following are utilities to make testing and working with the library more pleasant.
.. autosummary::
sample_gp_function
linearly_spaced_combinations
plot_2d_gp
plot_3d_gp
plot_contour_gp
"""
from __future__ import absolute_import
from .utilities import *
from .gp_opt import *
__all__ = [s for s in dir() if not s.startswith('_')]
|
Add short comment to docs
|
Add short comment to docs
|
Python
|
mit
|
befelix/SafeOpt,befelix/SafeOpt
|
0d50f6663bbc7f366c9db6a9aeef5feb0f4cb5f2
|
src/ExampleNets/readAllFields.py
|
src/ExampleNets/readAllFields.py
|
import glob
import os
import SCIRunPythonAPI; from SCIRunPythonAPI import *
def allFields(path):
names = []
for dirname, dirnames, filenames in os.walk(path):
for filename in filenames:
if filename.endswith("fld"):
names.append(os.path.join(dirname, filename))
return names
dir = r"E:\scirun\trunk_ref\SCIRunData"
for file in allFields(dir):
read = addModule("ReadField")
read.Filename = file
show = addModule("ReportFieldInfo")
read.output[0] >> show.input.Input
#executeAll()
|
import glob
import os
import time
import SCIRunPythonAPI; from SCIRunPythonAPI import *
def allFields(path):
names = []
for dirname, dirnames, filenames in os.walk(path):
for filename in filenames:
if filename.endswith("fld"):
names.append(os.path.join(dirname, filename))
return names
def printList(list, name):
thefile = open(name, 'w')
for f,v in list:
thefile.write("%s\n\t%s\n" % (f,v))
dir = r"E:\scirun\trunk_ref\SCIRunData"
values = []
files = []
for file in allFields(dir):
read = addModule("ReadField")
read.Filename = file
files.append(file)
show = addModule("ReportFieldInfo")
prnt = addModule("PrintDatatype")
read.output[0] >> show.input.Input
show.output[0] >> prnt.input[0]
executeAll()
time.sleep(1)
values.append(prnt.ReceivedValue)
[removeModule(m.id) for m in modules()]
printList(zip(files, values), r'E:\fieldTypes.txt')
|
Update script to print all field types to a file
|
Update script to print all field types to a file
|
Python
|
mit
|
moritzdannhauer/SCIRunGUIPrototype,jessdtate/SCIRun,jessdtate/SCIRun,jessdtate/SCIRun,jcollfont/SCIRun,jcollfont/SCIRun,ajanson/SCIRun,jessdtate/SCIRun,ajanson/SCIRun,moritzdannhauer/SCIRunGUIPrototype,collint8/SCIRun,moritzdannhauer/SCIRunGUIPrototype,moritzdannhauer/SCIRunGUIPrototype,ajanson/SCIRun,jessdtate/SCIRun,collint8/SCIRun,jcollfont/SCIRun,ajanson/SCIRun,collint8/SCIRun,jcollfont/SCIRun,jcollfont/SCIRun,collint8/SCIRun,ajanson/SCIRun,jessdtate/SCIRun,jessdtate/SCIRun,jcollfont/SCIRun,collint8/SCIRun,collint8/SCIRun,collint8/SCIRun,ajanson/SCIRun,jessdtate/SCIRun,collint8/SCIRun,moritzdannhauer/SCIRunGUIPrototype,moritzdannhauer/SCIRunGUIPrototype,ajanson/SCIRun,moritzdannhauer/SCIRunGUIPrototype,jcollfont/SCIRun
|
b1bd07038b0c6a6d801e686372996b3478c71af9
|
iss/management/commands/upsert_iss_organizations.py
|
iss/management/commands/upsert_iss_organizations.py
|
#!/usr/bin/env python
"""Upserts Organization records with data from Salesforce Accounts.
"""
import logging
import os
from django.core.management.base import BaseCommand
import iss.salesforce
import iss.utils
logger = logging.getLogger(os.path.basename(__file__))
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument(
'-m', '--modified-within',
type=int,
metavar='n-days',
default=7,
help='upsert organizations for accounts modified within n-days')
def handle(self, *args, **options):
upsert_organizations_for_recently_modified_accounts(
options['modified_within'])
def upsert_organizations_for_recently_modified_accounts(since=7):
"""Upsert organizations for SF Accounts modified in last `since` days."""
logger.info('upserting orgs for accounts modified in last {since} days'.
format(since=since))
recently_modified_accounts = (
iss.salesforce.Account.get_recently_modified_accounts(since=since))
iss.utils.upsert_organizations_for_accounts(recently_modified_accounts)
|
#!/usr/bin/env python
"""Upserts Organization records with data from Salesforce Accounts.
"""
import logging
import os
from django.core.management.base import BaseCommand
import iss.models
import iss.salesforce
import iss.utils
logger = logging.getLogger(os.path.basename(__file__))
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument(
'-m', '--modified-within',
type=int,
metavar='n-days',
default=7,
help='upsert organizations for accounts modified within n-days')
parser.add_argument(
'-i', '--include-aashe-in-website',
action='store_true',
help='force AASHE exclude_from_website to be False')
def handle(self, *args, **options):
upsert_organizations_for_recently_modified_accounts(
since=options['modified_within'],
include_aashe_in_website=options['include_aashe_in_website'])
def upsert_organizations_for_recently_modified_accounts(
since=7, include_aashe_in_website=False):
"""Upsert organizations for SF Accounts modified in last `since` days.
When `include_aashe_in_website` is true, set the
`exclude_from_website` flag on the Organization representing AASHE
to False (0, actually). (Added for the Hub project.)
"""
logger.info('upserting orgs for accounts modified in last {since} days'.
format(since=since))
recently_modified_accounts = (
iss.salesforce.Account.get_recently_modified_accounts(since=since))
iss.utils.upsert_organizations_for_accounts(recently_modified_accounts)
if include_aashe_in_website:
aashe = iss.models.Organization.objects.get(org_name="AASHE")
if aashe.exclude_from_website:
aashe.exclude_from_website = 0
aashe.save()
|
Add --include-aashe-in-website flag to org upsert
|
Add --include-aashe-in-website flag to org upsert
|
Python
|
mit
|
AASHE/iss
|
07f531c7e3bbc0149fad4cfda75d8803cbc48e1d
|
smserver/chatplugin.py
|
smserver/chatplugin.py
|
#!/usr/bin/env python3
# -*- coding: utf8 -*-
"""
This module add the class needed for creating custom chat command
:Example:
Here's a simple ChatPlugin which will send a HelloWorld on use
``
ChatHelloWorld(ChatPlugin):
helper = "Display Hello World"
cimmand
def __call__(self, serv, message):
serv.send_message("Hello world", to="me")
``
"""
class ChatPlugin(object):
"""
Inherit from this class to add a command in the chat.
helper: Text that will be show when calling the help command
permission: Permission needed for this command (see ability)
room: Specify here if the command need to be execute in a room
command: The command to use to call this function
"""
helper = ""
permission = None
room = False
command = None
def can(self, serv):
"""
Method call each time somenone try to run this command
:param serv: The StepmaniaController instance
:type serv: StepmaniaController
:return: True if authorize False if not
:rtype: bool
"""
if self.room and not serv.room:
return False
if self.permission and serv.cannot(self.permission, serv.conn.room):
return False
return True
def __call__(self, serv, message):
"""
Action to perform when using the command
:param serv: The StepmaniaController instance
:param message: The text after the command. (Eg. /command text)
:type serv: StepmaniaController
:type message: str
:return: Nothing
"""
|
#!/usr/bin/env python3
# -*- coding: utf8 -*-
"""
This module add the class needed for creating custom chat command
:Example:
Here's a simple ChatPlugin which will send a HelloWorld on use
``
ChatHelloWorld(ChatPlugin):
helper = "Display Hello World"
command = "hello"
def __call__(self, serv, message):
serv.send_message("Hello world", to="me")
``
"""
class ChatPlugin(object):
"""
Inherit from this class to add a command in the chat.
helper: Text that will be show when calling the help command
permission: Permission needed for this command (see ability)
room: Specify here if the command need to be execute in a room
command: The command to use to call this function
"""
helper = ""
permission = None
room = False
command = None
def can(self, serv):
"""
Method call each time somenone try to run this command
:param serv: The StepmaniaController instance
:type serv: StepmaniaController
:return: True if authorize False if not
:rtype: bool
"""
if self.room and not serv.room:
return False
if self.permission and serv.cannot(self.permission, serv.conn.room):
return False
return True
def __call__(self, serv, message):
"""
Action to perform when using the command
:param serv: The StepmaniaController instance
:param message: The text after the command. (Eg. /command text)
:type serv: StepmaniaController
:type message: str
:return: Nothing
"""
|
Correct chat plugin example in docsctring
|
Correct chat plugin example in docsctring
|
Python
|
mit
|
ningirsu/stepmania-server,Nickito12/stepmania-server,ningirsu/stepmania-server,Nickito12/stepmania-server
|
83292a4b6f6bec00b20c623fa6f44e15aa82cd2a
|
runtests.py
|
runtests.py
|
#!/usr/bin/env python
import sys
from os.path import dirname, abspath
import django
from django.conf import settings
if len(sys.argv) > 1 and 'postgres' in sys.argv:
sys.argv.remove('postgres')
db_engine = 'django.db.backends.postgresql_psycopg2'
db_name = 'test_main'
else:
db_engine = 'django.db.backends.sqlite3'
db_name = ''
if not settings.configured:
settings.configure(
DATABASES=dict(default=dict(ENGINE=db_engine, NAME=db_name)),
INSTALLED_APPS = [
'django.contrib.contenttypes',
'genericm2m',
'genericm2m.genericm2m_tests',
],
)
from django.test.utils import get_runner
def runtests(*test_args):
if not test_args:
if sys.version_info[0] > 2:
test_args = ['genericm2m.genericm2m_tests']
else:
test_args = ["genericm2m_tests"]
parent = dirname(abspath(__file__))
sys.path.insert(0, parent)
TestRunner = get_runner(settings)
test_runner = TestRunner(verbosity=1, interactive=True)
failures = test_runner.run_tests(test_args)
sys.exit(failures)
if __name__ == '__main__':
runtests(*sys.argv[1:])
|
#!/usr/bin/env python
import sys
from os.path import dirname, abspath
import django
from django.conf import settings
if len(sys.argv) > 1 and 'postgres' in sys.argv:
sys.argv.remove('postgres')
db_engine = 'django.db.backends.postgresql_psycopg2'
db_name = 'test_main'
else:
db_engine = 'django.db.backends.sqlite3'
db_name = ''
if not settings.configured:
settings.configure(
DATABASES=dict(default=dict(ENGINE=db_engine, NAME=db_name)),
INSTALLED_APPS = [
'django.contrib.contenttypes',
'genericm2m',
'genericm2m.genericm2m_tests',
],
MIDDLEWARE_CLASSES = (),
)
from django.test.utils import get_runner
django.setup()
def runtests(*test_args):
if not test_args:
if sys.version_info[0] > 2:
test_args = ['genericm2m.genericm2m_tests']
else:
test_args = ["genericm2m_tests"]
parent = dirname(abspath(__file__))
sys.path.insert(0, parent)
TestRunner = get_runner(settings)
test_runner = TestRunner(verbosity=1, interactive=True)
failures = test_runner.run_tests(test_args)
sys.exit(failures)
if __name__ == '__main__':
runtests(*sys.argv[1:])
|
Fix "AppRegistryNotReady: Models aren't loaded yet"
|
Fix "AppRegistryNotReady: Models aren't loaded yet"
|
Python
|
mit
|
coleifer/django-generic-m2m,coleifer/django-generic-m2m,coleifer/django-generic-m2m
|
e77042c914b9725da0fef7e56ede12635c1a876b
|
s3s3/api.py
|
s3s3/api.py
|
"""
The API for s3s3.
"""
import tempfile
from boto.s3.connection import S3Connection
def create_connection(connection_args):
connection_args = connection_args.copy()
connection_args.pop('bucket_name')
return S3Connection(**connection_args)
def upload(source_key, dest_keys):
"""
`source_key` The source boto s3 key.
`dest_keys` The destination boto s3 keys.
"""
# Use the same name if no destination key is passed.
if not dest_key:
dest_key = source_key
with tempfile.NamedTemporaryFile() as data:
source_key.get_contents_to_file(data)
for dest_key in dest_keys:
dest_key.set_contents_from_filename(data.name)
|
"""
The API for s3s3.
"""
import tempfile
from boto.s3.connection import S3Connection
def create_connection(connection_args):
connection_args = connection_args.copy()
connection_args.pop('bucket_name')
return S3Connection(**connection_args)
def upload(source_key, dest_keys):
"""
`source_key` The source boto s3 key.
`dest_keys` A list of the destination boto s3 keys.
"""
# Use the same name if no destination key is passed.
if not dest_keys or not source_key:
raise Exception(
'The source_key and dest_keys parameters are required.')
with tempfile.NamedTemporaryFile() as data:
source_key.get_contents_to_file(data)
for dest_key in dest_keys:
dest_key.set_contents_from_filename(data.name)
|
Fix typo. dest_key => dest_keys.
|
Fix typo. dest_key => dest_keys.
modified: s3s3/api.py
|
Python
|
mit
|
lsst-sqre/s3s3,lsst-sqre/s3-glacier
|
ada7e2d2b98664fd6c481c4279677a4292e5bfef
|
openedx/features/idea/api_views.py
|
openedx/features/idea/api_views.py
|
from django.http import JsonResponse
from django.shortcuts import get_object_or_404
from rest_framework import status
from rest_framework.authentication import SessionAuthentication, BasicAuthentication
from rest_framework.permissions import IsAuthenticated
from rest_framework.views import APIView
from openedx.features.idea.models import Idea
class FavoriteAPIView(APIView):
"""
FavoriteAPIView is used to toggle favorite idea for the user
"""
authentication_classes = (SessionAuthentication, BasicAuthentication)
permission_classes = (IsAuthenticated,)
def post(self, request, idea_id):
response = {'message': 'Idea is added to favorites', 'is_idea_favorite': True}
toggle_status = status.HTTP_201_CREATED
user = request.user
idea = get_object_or_404(Idea, pk=idea_id)
toggle_favorite_status = idea.toggle_favorite(user)
if not toggle_favorite_status:
response['is_idea_favorite'] = False
response['message'] = 'Idea is removed from favorites'
toggle_status = status.HTTP_200_OK
response['favorite_count'] = idea.favorites.count()
return JsonResponse(response, status=toggle_status)
|
from django.http import JsonResponse
from django.shortcuts import get_object_or_404
from edx_rest_framework_extensions.auth.session.authentication import SessionAuthenticationAllowInactiveUser
from rest_framework import status
from rest_framework.permissions import IsAuthenticated
from rest_framework.views import APIView
from openedx.features.idea.models import Idea
class FavoriteAPIView(APIView):
"""
FavoriteAPIView is used to toggle favorite idea for the user
"""
authentication_classes = (SessionAuthenticationAllowInactiveUser,)
permission_classes = (IsAuthenticated,)
def post(self, request, idea_id):
response = {'message': 'Idea is added to favorites', 'is_idea_favorite': True}
toggle_status = status.HTTP_201_CREATED
user = request.user
idea = get_object_or_404(Idea, pk=idea_id)
toggle_favorite_status = idea.toggle_favorite(user)
if not toggle_favorite_status:
response['is_idea_favorite'] = False
response['message'] = 'Idea is removed from favorites'
toggle_status = status.HTTP_200_OK
response['favorite_count'] = idea.favorites.count()
return JsonResponse(response, status=toggle_status)
|
Change authentication classes to cater inactive users
|
[LP-1965] Change authentication classes to cater inactive users
|
Python
|
agpl-3.0
|
philanthropy-u/edx-platform,philanthropy-u/edx-platform,philanthropy-u/edx-platform,philanthropy-u/edx-platform
|
d6782066e3ed3f00e3c8dcffe2ffd0b9bad18d17
|
slave/skia_slave_scripts/render_pdfs.py
|
slave/skia_slave_scripts/render_pdfs.py
|
#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Run the Skia render_pdfs executable. """
from build_step import BuildStep, BuildStepWarning
import sys
class RenderPdfs(BuildStep):
def _Run(self):
# Skip this step for now, since the new SKPs are causing it to crash.
raise BuildStepWarning('Skipping this step since it is crashing.')
#self.RunFlavoredCmd('render_pdfs', [self._device_dirs.SKPDir()])
if '__main__' == __name__:
sys.exit(BuildStep.RunBuildStep(RenderPdfs))
|
#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Run the Skia render_pdfs executable. """
from build_step import BuildStep
import sys
class RenderPdfs(BuildStep):
def _Run(self):
self.RunFlavoredCmd('render_pdfs', [self._device_dirs.SKPDir()])
if '__main__' == __name__:
sys.exit(BuildStep.RunBuildStep(RenderPdfs))
|
Revert "Skip RenderPdfs until the crash is fixed"
|
Revert "Skip RenderPdfs until the crash is fixed"
This reverts commit fd03af0fbcb5f1b3656bcc78d934c560816d6810.
https://codereview.chromium.org/15002002/ fixes the crash.
R=borenet@google.com
Review URL: https://codereview.chromium.org/14577010
git-svn-id: 32fc27f4dcfb6c0385cd9719852b95fe6680452d@9019 2bbb7eff-a529-9590-31e7-b0007b416f81
|
Python
|
bsd-3-clause
|
Tiger66639/skia-buildbot,google/skia-buildbot,google/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot,Tiger66639/skia-buildbot,Tiger66639/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot
|
bf91e50b02ff8ef89e660e3c853cc2f30646f32d
|
bash_runner/tasks.py
|
bash_runner/tasks.py
|
"""
Cloudify plugin for running a simple bash script.
Operations:
start: Run a script
"""
from celery import task
from cosmo.events import send_event as send_riemann_event
from cloudify.utils import get_local_ip
get_ip = get_local_ip
send_event = send_riemann_event
@task
def start(__cloudify_id, port=8080, **kwargs):
with open('/home/ubuntu/hello', 'w') as f:
print >> f, 'HELLO BASH! %s' % port
send_event(__cloudify_id, get_ip(), "hello_bash status", "state", "running")
|
"""
Cloudify plugin for running a simple bash script.
Operations:
start: Run a script
"""
from celery import task
from cosmo.events import send_event as send_riemann_event
from cloudify.utils import get_local_ip
get_ip = get_local_ip
send_event = send_riemann_event
@task
def start(__cloudify_id, port=8080, **kwargs):
with open('/home/ubuntu/hello', 'w') as f:
print >> f, 'HELLO BASH! %s' % port
send_event(__cloudify_id, get_ip(), "bash_runner status", "state", "running")
|
Change the status string in riemann
|
Change the status string in riemann
|
Python
|
apache-2.0
|
rantav/cosmo-plugin-bash-runner
|
c568cf4b1be5e38b92f7d3a9131e67ff9eff764e
|
lib/ctf_gameserver/lib/helper.py
|
lib/ctf_gameserver/lib/helper.py
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
def convert_arg_line_to_args(arg_line):
"""argparse helper for splitting input from config
Allows comment lines in configfiles and allows both argument and
value on the same line
"""
if arg_line.strip().startswith('#'):
return []
else:
return arg_line.split()
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
import shlex
def convert_arg_line_to_args(arg_line):
"""argparse helper for splitting input from config
Allows comment lines in configfiles and allows both argument and
value on the same line
"""
return shlex.split(arg_line, comments=True)
|
Improve config argument splitting to allow quoted spaces
|
Improve config argument splitting to allow quoted spaces
|
Python
|
isc
|
fausecteam/ctf-gameserver,fausecteam/ctf-gameserver,fausecteam/ctf-gameserver,fausecteam/ctf-gameserver,fausecteam/ctf-gameserver
|
a234b8dfc45d9e08a452ccc4f275283eb1eb5485
|
dataactbroker/scripts/loadFSRS.py
|
dataactbroker/scripts/loadFSRS.py
|
import logging
import sys
from dataactcore.models.baseInterface import databaseSession
from dataactbroker.fsrs import (
configValid, fetchAndReplaceBatch, GRANT, PROCUREMENT)
logger = logging.getLogger(__name__)
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
with databaseSession() as sess:
if not configValid():
logger.error("No config for broker/fsrs/[service]/wsdl")
sys.exit(1)
else:
procs = fetchAndReplaceBatch(sess, PROCUREMENT)
grants = fetchAndReplaceBatch(sess, GRANT)
awards = procs + grants
numSubAwards = sum(len(a.subawards) for a in awards)
logger.info("Inserted/Updated %s awards, %s subawards",
len(awards), numSubAwards)
|
import logging
import sys
from dataactcore.interfaces.db import databaseSession
from dataactbroker.fsrs import (
configValid, fetchAndReplaceBatch, GRANT, PROCUREMENT)
logger = logging.getLogger(__name__)
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
with databaseSession() as sess:
if not configValid():
logger.error("No config for broker/fsrs/[service]/wsdl")
sys.exit(1)
else:
procs = fetchAndReplaceBatch(sess, PROCUREMENT)
grants = fetchAndReplaceBatch(sess, GRANT)
awards = procs + grants
numSubAwards = sum(len(a.subawards) for a in awards)
logger.info("Inserted/Updated %s awards, %s subawards",
len(awards), numSubAwards)
|
Switch to using dbSession in db.py instead of baseInterface.py
|
Switch to using dbSession in db.py instead of baseInterface.py
This is another file that should have been included in PR #272,
where we transitioned all existing non-Flask db access to a
db connection using the new contextmanager. Originally missed
this one because it *is* using a contextmanager, but it's using
one in the deprecated baseInterface.py instead of the newer db.py.
|
Python
|
cc0-1.0
|
fedspendingtransparency/data-act-broker-backend,chambers-brian/SIG_Digital-Strategy_SI_ODP_Backend,fedspendingtransparency/data-act-broker-backend,chambers-brian/SIG_Digital-Strategy_SI_ODP_Backend
|
0022726e9f2d122ff84eb19ed2807649ab96f931
|
deployment/cfn/utils/constants.py
|
deployment/cfn/utils/constants.py
|
EC2_INSTANCE_TYPES = [
't2.micro',
't2.small',
't2.medium',
't2.large'
]
RDS_INSTANCE_TYPES = [
'db.t2.micro',
'db.t2.small',
'db.t2.medium',
'db.t2.large'
]
ALLOW_ALL_CIDR = '0.0.0.0/0'
VPC_CIDR = '10.0.0.0/16'
HTTP = 80
HTTPS = 443
POSTGRESQL = 5432
SSH = 22
AMAZON_ACCOUNT_ID = 'amazon'
AMAZON_S3_VPC_ENDPOINT = 'com.amazonaws.us-east-1.s3'
CANONICAL_ACCOUNT_ID = '099720109477'
|
EC2_INSTANCE_TYPES = [
't2.micro',
't2.small',
't2.medium',
't2.large',
'm3.medium'
]
RDS_INSTANCE_TYPES = [
'db.t2.micro',
'db.t2.small',
'db.t2.medium',
'db.t2.large'
]
ALLOW_ALL_CIDR = '0.0.0.0/0'
VPC_CIDR = '10.0.0.0/16'
HTTP = 80
HTTPS = 443
POSTGRESQL = 5432
SSH = 22
AMAZON_ACCOUNT_ID = 'amazon'
AMAZON_S3_VPC_ENDPOINT = 'com.amazonaws.us-east-1.s3'
CANONICAL_ACCOUNT_ID = '099720109477'
|
Add m3.medium to EC2 instance types
|
Add m3.medium to EC2 instance types
This is the lowest `m3` family instance type with ephemeral storage.
|
Python
|
apache-2.0
|
azavea/raster-foundry,aaronxsu/raster-foundry,kdeloach/raster-foundry,kdeloach/raster-foundry,azavea/raster-foundry,azavea/raster-foundry,aaronxsu/raster-foundry,azavea/raster-foundry,kdeloach/raster-foundry,raster-foundry/raster-foundry,azavea/raster-foundry,raster-foundry/raster-foundry,kdeloach/raster-foundry,kdeloach/raster-foundry,aaronxsu/raster-foundry,raster-foundry/raster-foundry,aaronxsu/raster-foundry
|
4d5d4665f2b46e12618b7762246d84884447e99e
|
redash/cli/organization.py
|
redash/cli/organization.py
|
from flask_script import Manager
from redash import models
manager = Manager(help="Organization management commands.")
@manager.option('domains', help="comma separated list of domains to allow")
def set_google_apps_domains(domains):
organization = models.Organization.select().first()
organization.settings[models.Organization.SETTING_GOOGLE_APPS_DOMAINS] = domains.split(',')
organization.save()
print "Updated list of allowed domains to: {}".format(organization.google_apps_domains)
@manager.command
def show_google_apps_domains():
organization = models.Organization.select().first()
print "Current list of Google Apps domains: {}".format(organization.google_apps_domains)
|
from flask_script import Manager
from redash import models
manager = Manager(help="Organization management commands.")
@manager.option('domains', help="comma separated list of domains to allow")
def set_google_apps_domains(domains):
organization = models.Organization.select().first()
organization.settings[models.Organization.SETTING_GOOGLE_APPS_DOMAINS] = domains.split(',')
organization.save()
print "Updated list of allowed domains to: {}".format(organization.google_apps_domains)
@manager.command
def show_google_apps_domains():
organization = models.Organization.select().first()
print "Current list of Google Apps domains: {}".format(organization.google_apps_domains)
@manager.command
def list():
"""List all organizations"""
orgs = models.Organization.select()
for i, org in enumerate(orgs):
if i > 0:
print "-" * 20
print "Id: {}\nName: {}\nSlug: {}".format(org.id, org.name, org.slug)
|
Add 'manage.py org list' command
|
Add 'manage.py org list' command
'org list' simply prints out the organizations.
|
Python
|
bsd-2-clause
|
pubnative/redash,pubnative/redash,pubnative/redash,pubnative/redash,pubnative/redash
|
c23cd25247974abc85c66451737f4de8d8b19d1b
|
lib/rapidsms/backends/backend.py
|
lib/rapidsms/backends/backend.py
|
#!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4
class Backend(object):
def log(self, level, message):
self.router.log(level, message)
def start(self):
raise NotImplementedError
def stop(self):
raise NotImplementedError
def send(self):
raise NotImplementedError
def receive(self):
raise NotImplementedError
|
#!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4
class Backend(object):
def __init__ (self, router):
self.router = router
def log(self, level, message):
self.router.log(level, message)
def start(self):
raise NotImplementedError
def stop(self):
raise NotImplementedError
def send(self):
raise NotImplementedError
def receive(self):
raise NotImplementedError
|
Add a constructor method for Backend
|
Add a constructor method for Backend
|
Python
|
bsd-3-clause
|
dimagi/rapidsms,ehealthafrica-ci/rapidsms,eHealthAfrica/rapidsms,ken-muturi/rapidsms,lsgunth/rapidsms,catalpainternational/rapidsms,unicefuganda/edtrac,catalpainternational/rapidsms,eHealthAfrica/rapidsms,ken-muturi/rapidsms,ken-muturi/rapidsms,lsgunth/rapidsms,unicefuganda/edtrac,lsgunth/rapidsms,ehealthafrica-ci/rapidsms,unicefuganda/edtrac,peterayeni/rapidsms,caktus/rapidsms,lsgunth/rapidsms,caktus/rapidsms,peterayeni/rapidsms,peterayeni/rapidsms,dimagi/rapidsms,eHealthAfrica/rapidsms,catalpainternational/rapidsms,rapidsms/rapidsms-core-dev,peterayeni/rapidsms,rapidsms/rapidsms-core-dev,catalpainternational/rapidsms,ehealthafrica-ci/rapidsms,dimagi/rapidsms-core-dev,caktus/rapidsms,dimagi/rapidsms-core-dev
|
7a37e3afa29410636c75408bc649e70c519e07f1
|
test/user_profile_test.py
|
test/user_profile_test.py
|
import json
from pymessenger.user_profile import UserProfileApi
from test_env import *
upa = UserProfileApi(PAGE_ACCESS_TOKEN, app_secret=APP_SECRET)
def test_fields_blank():
user_profile = upa.get(TEST_USER_ID)
assert user_profile is not None
def test_fields():
fields = ['first_name', 'last_name']
user_profile = upa.get(TEST_USER_ID, fields=fields)
assert user_profile is not None
assert len(user_profile.keys()) == len(fields)
|
import json
import sys, os
sys.path.append(os.path.realpath(os.path.dirname(__file__)+"/.."))
from pymessenger.user_profile import UserProfileApi
TOKEN = os.environ.get('TOKEN')
APP_SECRET = os.environ.get('APP_SECRET')
TEST_USER_ID = os.environ.get('RECIPIENT_ID')
upa = UserProfileApi(TOKEN, app_secret=APP_SECRET)
def test_fields_blank():
user_profile = upa.get(TEST_USER_ID)
assert user_profile is not None
def test_fields():
fields = ['first_name', 'last_name']
user_profile = upa.get(TEST_USER_ID, fields=fields)
assert user_profile is not None
assert len(user_profile.keys()) == len(fields)
|
Fix user profile test to include same environment variables
|
Fix user profile test to include same environment variables
|
Python
|
mit
|
karlinnolabs/pymessenger,Cretezy/pymessenger2,davidchua/pymessenger
|
2fec4b3ffa1619f81088383c9f565b51f6171fd6
|
seaborn/miscplot.py
|
seaborn/miscplot.py
|
from __future__ import division
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
__all__ = ["palplot", "dogplot"]
def palplot(pal, size=1):
"""Plot the values in a color palette as a horizontal array.
Parameters
----------
pal : sequence of matplotlib colors
colors, i.e. as returned by seaborn.color_palette()
size :
scaling factor for size of plot
"""
n = len(pal)
f, ax = plt.subplots(1, 1, figsize=(n * size, size))
ax.imshow(np.arange(n).reshape(1, n),
cmap=mpl.colors.ListedColormap(list(pal)),
interpolation="nearest", aspect="auto")
ax.set_xticks(np.arange(n) - .5)
ax.set_yticks([-.5, .5])
ax.set_xticklabels([])
ax.set_yticklabels([])
def dogplot():
"""Who's a good boy?"""
try:
from urllib.request import urlopen
except ImportError:
from urllib2 import urlopen
from io import BytesIO
url = "https://github.com/mwaskom/seaborn-data/raw/master/png/img1.png"
data = BytesIO(urlopen(url).read())
img = plt.imread(data)
f, ax = plt.subplots(figsize=(5, 5), dpi=100)
f.subplots_adjust(0, 0, 1, 1)
ax.imshow(img)
ax.set_axis_off()
|
from __future__ import division
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
__all__ = ["palplot", "dogplot"]
def palplot(pal, size=1):
"""Plot the values in a color palette as a horizontal array.
Parameters
----------
pal : sequence of matplotlib colors
colors, i.e. as returned by seaborn.color_palette()
size :
scaling factor for size of plot
"""
n = len(pal)
f, ax = plt.subplots(1, 1, figsize=(n * size, size))
ax.imshow(np.arange(n).reshape(1, n),
cmap=mpl.colors.ListedColormap(list(pal)),
interpolation="nearest", aspect="auto")
ax.set_xticks(np.arange(n) - .5)
ax.set_yticks([-.5, .5])
ax.set_xticklabels([])
ax.set_yticklabels([])
def dogplot():
"""Who's a good boy?"""
try:
from urllib.request import urlopen
except ImportError:
from urllib2 import urlopen
from io import BytesIO
url = "https://github.com/mwaskom/seaborn-data/raw/master/png/img{}.png"
pic = np.random.randint(2, 7)
data = BytesIO(urlopen(url.format(pic)).read())
img = plt.imread(data)
f, ax = plt.subplots(figsize=(5, 5), dpi=100)
f.subplots_adjust(0, 0, 1, 1)
ax.imshow(img)
ax.set_axis_off()
|
Update to reflect new example data
|
Update to reflect new example data
|
Python
|
bsd-3-clause
|
arokem/seaborn,mwaskom/seaborn,anntzer/seaborn,arokem/seaborn,mwaskom/seaborn,anntzer/seaborn
|
a4c5e9a970a297d59000468dde8423fa9db00c0f
|
packs/fixtures/actions/scripts/streamwriter-script.py
|
packs/fixtures/actions/scripts/streamwriter-script.py
|
#!/usr/bin/env python
import argparse
import sys
import ast
from lib.exceptions import CustomException
class StreamWriter(object):
def run(self, stream):
if stream.upper() == 'STDOUT':
sys.stdout.write('STREAM IS STDOUT.')
return stream
if stream.upper() == 'STDERR':
sys.stderr.write('STREAM IS STDERR.')
return stream
raise CustomException('Invalid stream specified.')
def main(args):
stream = args.stream
writer = StreamWriter()
stream = writer.run(stream)
str_arg = args.str_arg
int_arg = args.int_arg
obj_arg = args.obj_arg
if str_arg:
sys.stdout.write(' STR: %s' % str_arg)
if int_arg:
sys.stdout.write(' INT: %d' % int_arg)
if obj_arg:
sys.stdout.write(' OBJ: %s' % obj_arg)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='')
parser.add_argument('--stream', help='Stream.', required=True)
parser.add_argument('--str_arg', help='Some string arg.')
parser.add_argument('--int_arg', help='Some int arg.', type=float)
parser.add_argument('--obj_arg', help='Some dict arg.', type=ast.literal_eval)
args = parser.parse_args()
main(args)
|
#!/usr/bin/env python
import argparse
import sys
import ast
import re
from lib.exceptions import CustomException
class StreamWriter(object):
def run(self, stream):
if stream.upper() == 'STDOUT':
sys.stdout.write('STREAM IS STDOUT.')
return stream
if stream.upper() == 'STDERR':
sys.stderr.write('STREAM IS STDERR.')
return stream
raise CustomException('Invalid stream specified.')
def main(args):
stream = args.stream
writer = StreamWriter()
stream = writer.run(stream)
str_arg = args.str_arg
int_arg = args.int_arg
obj_arg = args.obj_arg
if str_arg:
sys.stdout.write(' STR: %s' % str_arg)
if int_arg:
sys.stdout.write(' INT: %d' % int_arg)
if obj_arg:
# Remove any u'' so it works consistently under Python 2 and 3.x
obj_arg_str = str(obj_arg)
value = re.sub("u'(.*?)'", r"'\1'", obj_arg_str)
sys.stdout.write(' OBJ: %s' % value)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='')
parser.add_argument('--stream', help='Stream.', required=True)
parser.add_argument('--str_arg', help='Some string arg.')
parser.add_argument('--int_arg', help='Some int arg.', type=float)
parser.add_argument('--obj_arg', help='Some dict arg.', type=ast.literal_eval)
args = parser.parse_args()
main(args)
|
Fix streamwriter action so it doesn't include "u" type prefix in the object result.
|
Fix streamwriter action so it doesn't include "u" type prefix in the
object result.
This way it works consistently and correctly under Python 2 and Python
3.
|
Python
|
apache-2.0
|
StackStorm/st2tests,StackStorm/st2tests,StackStorm/st2tests
|
0fa23851cbe33ba0d3bddb8367d7089545de6847
|
setup.py
|
setup.py
|
#! /usr/bin/env python
from distutils.core import setup
setup(
name = 'qless-py',
version = '0.10.0',
description = 'Redis-based Queue Management',
long_description = '''
Redis-based queue management, with heartbeating, job tracking,
stats, notifications, and a whole lot more.''',
url = 'http://github.com/seomoz/qless-py',
author = 'Dan Lecocq',
author_email = 'dan@seomoz.org',
license = "MIT License",
keywords = 'redis, qless, job',
packages = ['qless', 'qless.workers'],
package_dir = {
'qless': 'qless',
'qless.workers': 'qless/workers'},
package_data = {'qless': ['qless-core/*.lua']},
include_package_data = True,
scripts = ['bin/qless-py-worker'],
extras_require = {
'ps': ['setproctitle']
},
install_requires = [
'argparse', 'hiredis', 'redis', 'psutil', 'simplejson'],
classifiers = [
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Intended Audience :: Developers',
'Operating System :: OS Independent'
]
)
|
#! /usr/bin/env python
from distutils.core import setup
setup(
name = 'qless-py',
version = '0.10.0',
description = 'Redis-based Queue Management',
long_description = '''
Redis-based queue management, with heartbeating, job tracking,
stats, notifications, and a whole lot more.''',
url = 'http://github.com/seomoz/qless-py',
author = 'Dan Lecocq',
author_email = 'dan@seomoz.org',
license = "MIT License",
keywords = 'redis, qless, job',
packages = ['qless', 'qless.workers'],
package_dir = {
'qless': 'qless',
'qless.workers': 'qless/workers'},
package_data = {'qless': ['qless-core/*.lua']},
include_package_data = True,
scripts = ['bin/qless-py-worker'],
extras_require = {
'ps': ['setproctitle']
},
install_requires = [
'argparse', 'decorator', 'hiredis', 'redis', 'psutil', 'simplejson'],
classifiers = [
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Intended Audience :: Developers',
'Operating System :: OS Independent'
]
)
|
Fix for "No module named decorator" on fresh environment installs.
|
Fix for "No module named decorator" on fresh environment installs.
Fixes regression from 4b26b5837ced0c2f76495b05b87e63e05f81c2af.
|
Python
|
mit
|
seomoz/qless-py,seomoz/qless-py
|
5476145559e0e47dac47b41dd4bfdb9fd41bfe29
|
setup.py
|
setup.py
|
#!/usr/bin/env python
"""Setup script for the pyparsing module distribution."""
from distutils.core import setup
from pyparsing import __version__
setup(# Distribution meta-data
name = "pyparsing",
version = __version__,
description = "Python parsing module",
author = "Paul McGuire",
author_email = "ptmcg@users.sourceforge.net",
url = "http://pyparsing.wikispaces.com/",
download_url = "http://sourceforge.net/project/showfiles.php?group_id=97203",
license = "MIT License",
py_modules = ["pyparsing"],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
]
)
|
#!/usr/bin/env python
"""Setup script for the pyparsing module distribution."""
from distutils.core import setup
from pyparsing import __version__
setup(# Distribution meta-data
name = "pyparsing",
version = __version__,
description = "Python parsing module",
author = "Paul McGuire",
author_email = "ptmcg@users.sourceforge.net",
url = "http://pyparsing.wikispaces.com/",
download_url = "http://sourceforge.net/project/showfiles.php?group_id=97203",
license = "MIT License",
py_modules = ["pyparsing", "pyparsing_py3"],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
]
)
|
Add change to ship both pyparsing and pyparsing_py3 modules.
|
Add change to ship both pyparsing and pyparsing_py3 modules.
|
Python
|
mit
|
5monkeys/pyparsing
|
af1d3b67bb6428a298e5028b7c86624d2f7f00c8
|
setup.py
|
setup.py
|
"""
Copyright (c) 2010-2013, Anthony Garcia <anthony@lagg.me>
Distributed under the ISC License (see LICENSE)
"""
from distutils.core import setup, Command
from distutils.errors import DistutilsOptionError
from unittest import TestLoader, TextTestRunner
import steam
class run_tests(Command):
description = "Run the steamodd unit tests"
user_options = [
("key=", 'k', "Your API key")
]
def initialize_options(self):
self.key = None
def finalize_options(self):
if not self.key:
raise DistutilsOptionError("API key is required")
else:
steam.api.key.set(self.key)
def run(self):
tests = TestLoader().discover("tests")
TextTestRunner(verbosity = 2).run(tests)
setup(name = "steamodd",
version = steam.__version__,
description = "High level Steam API implementation with low level reusable core",
packages = ["steam"],
author = steam.__author__,
author_email = steam.__contact__,
url = "https://github.com/Lagg/steamodd",
classifiers = [
"License :: OSI Approved :: ISC License (ISCL)",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python"
],
license = steam.__license__,
cmdclass = {"run_tests": run_tests})
|
"""
Copyright (c) 2010-2013, Anthony Garcia <anthony@lagg.me>
Distributed under the ISC License (see LICENSE)
"""
from distutils.core import setup, Command
from distutils.errors import DistutilsOptionError
from unittest import TestLoader, TextTestRunner
import steam
class run_tests(Command):
description = "Run the steamodd unit tests"
user_options = [
("key=", 'k', "Your API key")
]
def initialize_options(self):
self.key = None
def finalize_options(self):
if not self.key:
raise DistutilsOptionError("API key is required")
else:
steam.api.key.set(self.key)
def run(self):
tests = TestLoader().discover("tests")
TextTestRunner(verbosity = 2).run(tests)
setup(name = "steamodd",
version = steam.__version__,
description = "High level Steam API implementation with low level reusable core",
long_description = "Please see the `README <https://github.com/Lagg/steamodd/blob/master/README.md>`_ for a full description.",
packages = ["steam"],
author = steam.__author__,
author_email = steam.__contact__,
url = "https://github.com/Lagg/steamodd",
classifiers = [
"License :: OSI Approved :: ISC License (ISCL)",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python"
],
license = steam.__license__,
cmdclass = {"run_tests": run_tests})
|
Add rst long description for pypi
|
Add rst long description for pypi
|
Python
|
isc
|
miedzinski/steamodd,Lagg/steamodd
|
d815c8de309239e3c6f28e54793c9973ca9acc39
|
twilio/values.py
|
twilio/values.py
|
unset = object()
def of(d):
return {k: v for k, v in d.iteritems() if v != unset}
|
from six import iteritems
unset = object()
def of(d):
return {k: v for k, v in iteritems(d) if v != unset}
|
Replace iteritems with six helper
|
Replace iteritems with six helper
|
Python
|
mit
|
twilio/twilio-python,tysonholub/twilio-python
|
2989c7074853266fd134a10df4afdcb700499203
|
analyticsdataserver/urls.py
|
analyticsdataserver/urls.py
|
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic import RedirectView
from analyticsdataserver import views
urlpatterns = [
url(r'^$', RedirectView.as_view(url='/docs')), # pylint: disable=no-value-for-parameter
url(r'^api-auth/', include('rest_framework.urls', 'rest_framework')),
url(r'^api-token-auth/', 'rest_framework.authtoken.views.obtain_auth_token'),
url(r'^api/', include('analytics_data_api.urls', 'api')),
url(r'^docs/', include('rest_framework_swagger.urls')),
url(r'^status/$', views.StatusView.as_view(), name='status'),
url(r'^authenticated/$', views.AuthenticationTestView.as_view(), name='authenticated'),
url(r'^health/$', views.HealthView.as_view(), name='health'),
]
if settings.ENABLE_ADMIN_SITE: # pragma: no cover
admin.autodiscover()
urlpatterns.append(url(r'^site/admin/', include(admin.site.urls)))
handler500 = 'analyticsdataserver.views.handle_internal_server_error' # pylint: disable=invalid-name
handler404 = 'analyticsdataserver.views.handle_missing_resource_error' # pylint: disable=invalid-name
|
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic import RedirectView
from analyticsdataserver import views
from rest_framework.authtoken.views import obtain_auth_token
urlpatterns = [
url(r'^$', RedirectView.as_view(url='/docs')), # pylint: disable=no-value-for-parameter
url(r'^api-auth/', include('rest_framework.urls', 'rest_framework')),
url(r'^api-token-auth/', obtain_auth_token),
url(r'^api/', include('analytics_data_api.urls', 'api')),
url(r'^docs/', include('rest_framework_swagger.urls')),
url(r'^status/$', views.StatusView.as_view(), name='status'),
url(r'^authenticated/$', views.AuthenticationTestView.as_view(), name='authenticated'),
url(r'^health/$', views.HealthView.as_view(), name='health'),
]
if settings.ENABLE_ADMIN_SITE: # pragma: no cover
admin.autodiscover()
urlpatterns.append(url(r'^site/admin/', include(admin.site.urls)))
handler500 = 'analyticsdataserver.views.handle_internal_server_error' # pylint: disable=invalid-name
handler404 = 'analyticsdataserver.views.handle_missing_resource_error' # pylint: disable=invalid-name
|
Update string arg to url() to callable
|
Update string arg to url() to callable
|
Python
|
agpl-3.0
|
Stanford-Online/edx-analytics-data-api,edx/edx-analytics-data-api,Stanford-Online/edx-analytics-data-api,edx/edx-analytics-data-api,Stanford-Online/edx-analytics-data-api
|
ef516fb03db9bdaa0f0bea97526a65c319b8e43c
|
tohu/v3/utils.py
|
tohu/v3/utils.py
|
from collections import namedtuple
__all__ = ['identity', 'print_generated_sequence']
def identity(x):
"""
Helper function which returns its argument unchanged.
That is, `identity(x)` returns `x` for any input `x`.
"""
return x
def print_generated_sequence(gen, num, *, sep=", ", seed=None):
"""
Helper function which prints a sequence of `num` items
produced by the random generator `gen`.
"""
if seed:
gen.reset(seed)
elems = [str(next(gen)) for _ in range(num)]
sep_initial = "\n\n" if '\n' in sep else " "
print("Generated sequence:{}{}".format(sep_initial, sep.join(elems)))
def make_dummy_tuples(chars='abcde'):
Quux = namedtuple('Quux', ['x', 'y'])
some_tuples = [Quux((c*2).upper(), c*2) for c in chars]
return some_tuples
|
from collections import namedtuple
__all__ = ['identity', 'print_generated_sequence']
def identity(x):
"""
Helper function which returns its argument unchanged.
That is, `identity(x)` returns `x` for any input `x`.
"""
return x
def print_generated_sequence(gen, num, *, sep=", ", fmt='', seed=None):
"""
Helper function which prints a sequence of `num` items
produced by the random generator `gen`.
"""
if seed:
gen.reset(seed)
elems = [format(next(gen), fmt) for _ in range(num)]
sep_initial = "\n\n" if '\n' in sep else " "
print("Generated sequence:{}{}".format(sep_initial, sep.join(elems)))
def make_dummy_tuples(chars='abcde'):
Quux = namedtuple('Quux', ['x', 'y'])
some_tuples = [Quux((c*2).upper(), c*2) for c in chars]
return some_tuples
|
Allow passing format option to helper function
|
Allow passing format option to helper function
|
Python
|
mit
|
maxalbert/tohu
|
f12beb5d2fbdc72c12f473c5cac04716f4893666
|
test/viz/test_volcano.py
|
test/viz/test_volcano.py
|
from sequana.viz import Volcano
def test1():
import numpy as np
fc = np.random.randn(1000)
pvalue = np.random.randn(1000)
v = Volcano(fc, -np.log10(pvalue**2), pvalue_threshold=3)
v.plot()
v.plot(logy=True)
|
from sequana.viz import Volcano
import pandas as pd
def test1():
import numpy as np
fc = np.random.randn(1000)
pvalue = np.random.randn(1000)
df = pd.DataFrame({"log2FoldChange": fc, "padj": pvalue ** 2})
v = Volcano(data=df, pvalue_threshold=3)
v.plot()
v.plot(logy=True)
|
Update test for volcano plot
|
Update test for volcano plot
|
Python
|
bsd-3-clause
|
sequana/sequana,sequana/sequana,sequana/sequana,sequana/sequana,sequana/sequana
|
3d1969ebf187ed0f0ee52e84e951f65b108ce4cf
|
l10n_br_coa_simple/hooks.py
|
l10n_br_coa_simple/hooks.py
|
# Copyright (C) 2020 - Gabriel Cardoso de Faria <gabriel.cardoso@kmee.com.br>
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from odoo import api, tools, SUPERUSER_ID
def post_init_hook(cr, registry):
env = api.Environment(cr, SUPERUSER_ID, {})
coa_simple_tmpl = env.ref(
'l10n_br_coa_simple.l10n_br_coa_simple_chart_template')
if env['ir.module.module'].search_count([
('name', '=', 'l10n_br_account'),
('state', '=', 'installed'),
]):
from odoo.addons.l10n_br_account.hooks import load_fiscal_taxes
# Relate fiscal taxes to account taxes.
load_fiscal_taxes(env, coa_simple_tmpl)
# Load COA to Demo Company
if not tools.config.get('without_demo'):
env.user.company_id = env.ref(
'l10n_br_fiscal.empresa_simples_nacional')
coa_simple_tmpl.try_loading_for_current_company()
|
# Copyright (C) 2020 - Gabriel Cardoso de Faria <gabriel.cardoso@kmee.com.br>
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from odoo import api, tools, SUPERUSER_ID
def post_init_hook(cr, registry):
env = api.Environment(cr, SUPERUSER_ID, {})
coa_simple_tmpl = env.ref(
'l10n_br_coa_simple.l10n_br_coa_simple_chart_template')
if env['ir.module.module'].search_count([
('name', '=', 'l10n_br_account'),
('state', '=', 'installed'),
]):
from odoo.addons.l10n_br_account.hooks import load_fiscal_taxes
# Relate fiscal taxes to account taxes.
load_fiscal_taxes(env, coa_simple_tmpl)
# Load COA to Demo Company
if not tools.config.get('without_demo'):
user_admin = env.ref('base.user_admin')
user_admin.company_id = env.ref(
'l10n_br_base.empresa_simples_nacional')
coa_simple_tmpl.sudo(
user=user_admin.id).try_loading_for_current_company()
user_admin.company_id = env.ref('base.main_company')
|
Use admin user to create COA
|
[FIX] l10n_br_coa_simple: Use admin user to create COA
|
Python
|
agpl-3.0
|
akretion/l10n-brazil,OCA/l10n-brazil,OCA/l10n-brazil,OCA/l10n-brazil,akretion/l10n-brazil,akretion/l10n-brazil
|
f43f71cb016bc71ea32e80c2fd86f05b6af38468
|
snoop/ipython.py
|
snoop/ipython.py
|
import ast
from snoop import snoop
from IPython.core.magic import Magics, cell_magic, magics_class
@magics_class
class SnoopMagics(Magics):
@cell_magic
def snoop(self, _line, cell):
filename = self.shell.compile.cache(cell)
code = self.shell.compile(cell, filename, 'exec')
tracer = snoop()
tracer.variable_whitelist = set()
for node in ast.walk(ast.parse(cell)):
if isinstance(node, ast.Name):
tracer.variable_whitelist.add(node.id)
tracer.target_codes.add(code)
with tracer:
self.shell.ex(code)
|
import ast
from snoop import snoop
from IPython.core.magic import Magics, cell_magic, magics_class
@magics_class
class SnoopMagics(Magics):
@cell_magic
def snoop(self, _line, cell):
filename = self.shell.compile.cache(cell)
code = self.shell.compile(cell, filename, 'exec')
tracer = snoop()
tracer.variable_whitelist = set()
for node in ast.walk(ast.parse(cell)):
if isinstance(node, ast.Name):
name = node.id
if isinstance(
self.shell.user_global_ns.get(name),
type(ast),
):
# hide modules
continue
tracer.variable_whitelist.add(name)
tracer.target_codes.add(code)
with tracer:
self.shell.ex(code)
|
Hide modules from variables traced by %%snoop
|
Hide modules from variables traced by %%snoop
|
Python
|
mit
|
alexmojaki/snoop,alexmojaki/snoop
|
e63a914457fc10d895eb776a164939da3ddd9464
|
waftools/gogobject.py
|
waftools/gogobject.py
|
from waflib.Task import Task
from waflib.TaskGen import extension
class gogobject(Task):
run_str = '${GGG} ${GGGFLAGS} -o ${TGT[0].parent.abspath()} ${SRC}'
@extension('.go.in')
def gogobject_hook(self, node):
tg = self.bld.get_tgen_by_name('go-gobject-gen')
ggg = tg.link_task.outputs[0]
if not self.env.GGG:
self.env.GGG = ggg.abspath()
go_out = node.change_ext('')
c_out = go_out.change_ext('.gen.c')
h_out = go_out.change_ext('.gen.h')
task = self.create_task('gogobject', node, [go_out, c_out, h_out])
task.dep_nodes = [ggg]
return task
|
from waflib.Task import Task
from waflib.TaskGen import extension
class gogobject(Task):
run_str = '${GGG} ${GGGFLAGS} -o ${TGT[0].parent.abspath()} ${SRC}'
@extension('.go.in')
def gogobject_hook(self, node):
tg = self.bld.get_tgen_by_name('go-gobject-gen')
ggg = tg.link_task.outputs[0]
if not self.env.GGG:
self.env.GGG = ggg.abspath()
go_out = node.change_ext('')
c_out = go_out.change_ext('.gen.c')
h_out = go_out.change_ext('.gen.h')
task = self.create_task('gogobject', node, [go_out, c_out, h_out])
task.dep_nodes = [ggg, node.parent.find_node('config.json')]
return task
|
Use config.json as a go-gobject-gen dependency as well.
|
Use config.json as a go-gobject-gen dependency as well.
|
Python
|
mit
|
nsf/gogobject,nsf/gogobject,nsf/gogobject,nsf/gogobject
|
ef048131d586812c2d73edd6297dfae4305b6074
|
website/exceptions.py
|
website/exceptions.py
|
class OSFError(Exception):
"""Base class for exceptions raised by the Osf application"""
pass
class NodeError(OSFError):
"""Raised when an action cannot be performed on a Node model"""
pass
class NodeStateError(NodeError):
"""Raised when the Node's state is not suitable for the requested action
Example: Node.remove_node() is called, but the node has non-deleted children
"""
pass
class SanctionTokenError(NodeError):
"""Base class for errors arising from the user of a sanction token."""
pass
class InvalidSanctionRejectionToken(SanctionTokenError):
"""Raised if a embargo disapproval token is not found."""
message_short = "Invalid Token"
message_long = "This embargo disapproval link is invalid. Are you logged into the correct account?"
class InvalidSanctionApprovalToken(SanctionTokenError):
"""Raised if a embargo disapproval token is not found."""
message_short = "Invalid Token"
message_long = "This embargo disapproval link is invalid. Are you logged into the correct account?"
|
class OSFError(Exception):
"""Base class for exceptions raised by the Osf application"""
pass
class NodeError(OSFError):
"""Raised when an action cannot be performed on a Node model"""
pass
class NodeStateError(NodeError):
"""Raised when the Node's state is not suitable for the requested action
Example: Node.remove_node() is called, but the node has non-deleted children
"""
pass
class SanctionTokenError(NodeError):
"""Base class for errors arising from the user of a sanction token."""
pass
class InvalidSanctionRejectionToken(SanctionTokenError):
"""Raised if a Sanction subclass disapproval token submitted is invalid
or associated with another admin authorizer
"""
message_short = "Invalid Token"
message_long = "This disapproval link is invalid. Are you logged into the correct account?"
class InvalidSanctionApprovalToken(SanctionTokenError):
"""Raised if a Sanction subclass approval token submitted is invalid
or associated with another admin authorizer
"""
message_short = "Invalid Token"
message_long = "This disapproval link is invalid. Are you logged into the correct account?"
|
Update Sanction exception error message and docstrings
|
Update Sanction exception error message and docstrings
|
Python
|
apache-2.0
|
alexschiller/osf.io,amyshi188/osf.io,felliott/osf.io,cwisecarver/osf.io,ckc6cz/osf.io,TomBaxter/osf.io,emetsger/osf.io,HalcyonChimera/osf.io,caneruguz/osf.io,RomanZWang/osf.io,doublebits/osf.io,adlius/osf.io,ticklemepierce/osf.io,danielneis/osf.io,samchrisinger/osf.io,baylee-d/osf.io,sloria/osf.io,asanfilippo7/osf.io,RomanZWang/osf.io,erinspace/osf.io,hmoco/osf.io,monikagrabowska/osf.io,crcresearch/osf.io,icereval/osf.io,mluo613/osf.io,Nesiehr/osf.io,mattclark/osf.io,felliott/osf.io,brandonPurvis/osf.io,hmoco/osf.io,amyshi188/osf.io,zachjanicki/osf.io,petermalcolm/osf.io,GageGaskins/osf.io,mluke93/osf.io,Nesiehr/osf.io,njantrania/osf.io,DanielSBrown/osf.io,chennan47/osf.io,doublebits/osf.io,ZobairAlijan/osf.io,DanielSBrown/osf.io,samchrisinger/osf.io,laurenrevere/osf.io,brandonPurvis/osf.io,cwisecarver/osf.io,doublebits/osf.io,KAsante95/osf.io,chrisseto/osf.io,petermalcolm/osf.io,cosenal/osf.io,leb2dg/osf.io,rdhyee/osf.io,aaxelb/osf.io,pattisdr/osf.io,jmcarp/osf.io,mfraezz/osf.io,zachjanicki/osf.io,emetsger/osf.io,cwisecarver/osf.io,cslzchen/osf.io,mattclark/osf.io,brianjgeiger/osf.io,samanehsan/osf.io,saradbowman/osf.io,wearpants/osf.io,crcresearch/osf.io,erinspace/osf.io,CenterForOpenScience/osf.io,ZobairAlijan/osf.io,ZobairAlijan/osf.io,chrisseto/osf.io,TomHeatwole/osf.io,zachjanicki/osf.io,jmcarp/osf.io,CenterForOpenScience/osf.io,TomBaxter/osf.io,acshi/osf.io,brandonPurvis/osf.io,mfraezz/osf.io,cosenal/osf.io,erinspace/osf.io,njantrania/osf.io,Ghalko/osf.io,CenterForOpenScience/osf.io,brianjgeiger/osf.io,RomanZWang/osf.io,Ghalko/osf.io,hmoco/osf.io,aaxelb/osf.io,pattisdr/osf.io,asanfilippo7/osf.io,ticklemepierce/osf.io,haoyuchen1992/osf.io,caneruguz/osf.io,wearpants/osf.io,Ghalko/osf.io,caseyrollins/osf.io,abought/osf.io,sbt9uc/osf.io,caseyrollins/osf.io,amyshi188/osf.io,icereval/osf.io,jnayak1/osf.io,wearpants/osf.io,ticklemepierce/osf.io,lyndsysimon/osf.io,felliott/osf.io,caneruguz/osf.io,rdhyee/osf.io,abought/osf.io,HalcyonChimera/osf.io,TomBaxter/osf.io,arpitar/osf.io,kch8qx/osf.io,abought/osf.io,samanehsan/osf.io,sbt9uc/osf.io,HalcyonChimera/osf.io,rdhyee/osf.io,saradbowman/osf.io,adlius/osf.io,arpitar/osf.io,KAsante95/osf.io,monikagrabowska/osf.io,cosenal/osf.io,mluo613/osf.io,brandonPurvis/osf.io,emetsger/osf.io,adlius/osf.io,MerlinZhang/osf.io,jolene-esposito/osf.io,kch8qx/osf.io,jnayak1/osf.io,GageGaskins/osf.io,leb2dg/osf.io,sloria/osf.io,zamattiac/osf.io,binoculars/osf.io,danielneis/osf.io,jnayak1/osf.io,mfraezz/osf.io,aaxelb/osf.io,jmcarp/osf.io,KAsante95/osf.io,leb2dg/osf.io,felliott/osf.io,leb2dg/osf.io,samchrisinger/osf.io,kwierman/osf.io,monikagrabowska/osf.io,binoculars/osf.io,baylee-d/osf.io,ZobairAlijan/osf.io,mluke93/osf.io,jnayak1/osf.io,CenterForOpenScience/osf.io,SSJohns/osf.io,brianjgeiger/osf.io,mattclark/osf.io,caneruguz/osf.io,haoyuchen1992/osf.io,kwierman/osf.io,danielneis/osf.io,TomHeatwole/osf.io,jolene-esposito/osf.io,baylee-d/osf.io,kch8qx/osf.io,TomHeatwole/osf.io,samanehsan/osf.io,SSJohns/osf.io,alexschiller/osf.io,laurenrevere/osf.io,cslzchen/osf.io,doublebits/osf.io,mfraezz/osf.io,alexschiller/osf.io,zamattiac/osf.io,GageGaskins/osf.io,abought/osf.io,billyhunt/osf.io,cwisecarver/osf.io,binoculars/osf.io,ticklemepierce/osf.io,MerlinZhang/osf.io,njantrania/osf.io,TomHeatwole/osf.io,sloria/osf.io,billyhunt/osf.io,arpitar/osf.io,chennan47/osf.io,rdhyee/osf.io,brandonPurvis/osf.io,petermalcolm/osf.io,caseyrollins/osf.io,ckc6cz/osf.io,asanfilippo7/osf.io,billyhunt/osf.io,asanfilippo7/osf.io,jolene-esposito/osf.io,jolene-esposito/osf.io,Johnetordoff/osf.io,kch8qx/osf.io,jmcarp/osf.io,KAsante95/osf.io,Johnetordoff/osf.io,caseyrygt/osf.io,cslzchen/osf.io,cslzchen/osf.io,kch8qx/osf.io,danielneis/osf.io,amyshi188/osf.io,monikagrabowska/osf.io,RomanZWang/osf.io,zamattiac/osf.io,laurenrevere/osf.io,mluke93/osf.io,crcresearch/osf.io,SSJohns/osf.io,chrisseto/osf.io,acshi/osf.io,mluo613/osf.io,samchrisinger/osf.io,HalcyonChimera/osf.io,chennan47/osf.io,Johnetordoff/osf.io,ckc6cz/osf.io,MerlinZhang/osf.io,chrisseto/osf.io,RomanZWang/osf.io,acshi/osf.io,arpitar/osf.io,icereval/osf.io,lyndsysimon/osf.io,mluo613/osf.io,adlius/osf.io,monikagrabowska/osf.io,kwierman/osf.io,pattisdr/osf.io,SSJohns/osf.io,caseyrygt/osf.io,Nesiehr/osf.io,GageGaskins/osf.io,sbt9uc/osf.io,Johnetordoff/osf.io,doublebits/osf.io,acshi/osf.io,haoyuchen1992/osf.io,acshi/osf.io,haoyuchen1992/osf.io,zamattiac/osf.io,alexschiller/osf.io,sbt9uc/osf.io,aaxelb/osf.io,samanehsan/osf.io,mluo613/osf.io,zachjanicki/osf.io,KAsante95/osf.io,lyndsysimon/osf.io,Ghalko/osf.io,mluke93/osf.io,ckc6cz/osf.io,DanielSBrown/osf.io,GageGaskins/osf.io,kwierman/osf.io,caseyrygt/osf.io,lyndsysimon/osf.io,hmoco/osf.io,brianjgeiger/osf.io,billyhunt/osf.io,emetsger/osf.io,njantrania/osf.io,MerlinZhang/osf.io,caseyrygt/osf.io,billyhunt/osf.io,Nesiehr/osf.io,DanielSBrown/osf.io,alexschiller/osf.io,wearpants/osf.io,cosenal/osf.io,petermalcolm/osf.io
|
04807105282211ff4ad79e8b4e9b13442e083c86
|
planex_globals.py
|
planex_globals.py
|
import os.path
BUILD_ROOT_DIR = "planex-build-root"
[SPECS_DIR, SOURCES_DIR, SRPMS_DIR, RPMS_DIR, BUILD_DIR] = map(
lambda x: os.path.join(BUILD_ROOT_DIR, x),
['SPECS', 'SOURCES', 'SRPMS', 'RPMS', 'BUILD'])
SPECS_GLOB = os.path.join(SPECS_DIR, "*.spec")
|
import os.path
BUILD_ROOT_DIR = "planex-build-root"
[SPECS_DIR, SOURCES_DIR, SRPMS_DIR, RPMS_DIR, BUILD_DIR] = [
os.path.join(BUILD_ROOT_DIR, dir_name) for dir_name in
['SPECS', 'SOURCES', 'SRPMS', 'RPMS', 'BUILD']]
SPECS_GLOB = os.path.join(SPECS_DIR, "*.spec")
|
Replace deprecated 'map' builtin with list comprehension
|
globals: Replace deprecated 'map' builtin with list comprehension
Signed-off-by: Euan Harris <c3b6e83069c8e9e3af49a38fec6026be89559638@citrix.com>
|
Python
|
lgpl-2.1
|
djs55/planex,jonludlam/planex,djs55/planex,simonjbeaumont/planex,euanh/planex-cleanhistory,euanh/planex-cleanhistory,simonjbeaumont/planex,djs55/planex,jonludlam/planex,jonludlam/planex,simonjbeaumont/planex,euanh/planex-cleanhistory
|
42a4d5959524875fd39c190f6119eb06a97eabf2
|
build/setenv.py
|
build/setenv.py
|
import os,sys
#General vars
CURDIR=os.path.dirname(os.path.abspath(__file__))
TOPDIR=os.path.dirname(CURDIR)
DOWNLOAD_DIR=TOPDIR+'\\downloads'
#Default vars
PY_VER='Python27'
BIN_DIR=TOPDIR+'\\bin'
PY_DIR=BIN_DIR+'\\'+PY_VER #Don't mess with PYTHONHOME
############################################################
#Check environment settings in case they'e been overridden
env=os.environ
CURDIR=env.get('CURDIR',CURDIR)
TOPDIR=env.get('TOPDIR',os.path.dirname(CURDIR))
DOWNLOAD_DIR=env.get('DOWNLOAD_DIR',DOWNLOAD_DIR)
PY_VER=env.get('PY_VER',PY_VER)
BIN_DIR=env.get('BIN_DIR',BIN_DIR)
PY_DIR=env.get('PY_DIR',PY_DIR)
#Hide from autocomplete IDEs
del os
del sys
del env
|
import os,sys
#General vars
CURDIR=os.path.dirname(os.path.abspath(__file__))
TOPDIR=os.path.dirname(os.path.dirname(CURDIR))
DOWNLOAD_DIR=os.path.join(TOPDIR,'downloads')
#Default vars
PY_VER='Python27'
BIN_DIR=os.path.join(TOPDIR,'bin')
PY_DIR=os.path.join(BIN_DIR,PY_VER) #Don't mess with PYTHONHOME
############################################################
#Check environment settings in case they'e been overridden
env=os.environ
CURDIR=env.get('CURDIR',CURDIR)
TOPDIR=env.get('TOPDIR',os.path.dirname(os.path.dirname(CURDIR)))
DOWNLOAD_DIR=env.get('DOWNLOAD_DIR',DOWNLOAD_DIR)
PY_VER=env.get('PY_VER',PY_VER)
BIN_DIR=env.get('BIN_DIR',BIN_DIR)
PY_DIR=env.get('PY_DIR',PY_DIR)
#Hide from autocomplete IDEs
del os
del sys
del env
|
Change path following import of build folder
|
Change path following import of build folder
|
Python
|
mit
|
lpinner/metageta
|
cdc4f79210b69f131926374aff61be72ab573c46
|
scripts/import_queued_submissions.py
|
scripts/import_queued_submissions.py
|
#!/usr/bin/env python
# Copyright (C) 2011 Lukas Lalinsky
# Distributed under the MIT license, see the LICENSE file for details.
from acoustid.script import run_script
from acoustid.data.submission import import_queued_submissions
def main(script, opts, args):
conn = script.engine.connect()
with conn.begin():
import_queued_submissions(conn)
run_script(main)
|
#!/usr/bin/env python
# Copyright (C) 2011 Lukas Lalinsky
# Distributed under the MIT license, see the LICENSE file for details.
from acoustid.script import run_script
from acoustid.data.submission import import_queued_submissions
def main(script, opts, args):
conn = script.engine.connect()
with conn.begin():
import_queued_submissions(conn, limit=100)
run_script(main)
|
Raise the import batch size to 100
|
Raise the import batch size to 100
|
Python
|
mit
|
lalinsky/acoustid-server,lalinsky/acoustid-server,lalinsky/acoustid-server,lalinsky/acoustid-server
|
bebb1d9bc44300e7c65fba90d6c2eb76243ea372
|
scripts/cm2lut.py
|
scripts/cm2lut.py
|
#!/usr/bin/env python
"""
Script used to create lut lists used by mayavi from matplotlib colormaps.
This requires matlplotlib to be installed and should not be ran by the
user, but only once in a while to synchronize with MPL developpement.
"""
# Authors: Frederic Petit <fredmfp@gmail.com>,
# Gael Varoquaux <gael.varoquaux@normalesup.org>
# Copyright (c) 2007, Enthought, Inc.
# License: BSD Style.
from matplotlib.cm import datad, get_cmap
import numpy as np
from enthought.mayavi.core import lut as destination_module
import os
target_dir = os.path.dirname(destination_module.__file__)
values = np.linspace(0., 1., 256)
lut_dic = {}
for name in datad.keys():
if name.endswith('_r'):
continue
lut_dic[name] = get_cmap(name)(values.copy())
out_name = os.path.join(target_dir, 'pylab_luts.npz')
np.savez(out_name, **lut_dic)
|
#!/usr/bin/env python
"""
Script used to create lut lists used by mayavi from matplotlib colormaps.
This requires matlplotlib to be installed and should not be ran by the
user, but only once in a while to synchronize with MPL developpement.
"""
# Authors: Frederic Petit <fredmfp@gmail.com>,
# Gael Varoquaux <gael.varoquaux@normalesup.org>
# Copyright (c) 2007-2009, Enthought, Inc.
# License: BSD Style.
import os
import numpy as np
from matplotlib.cm import datad, get_cmap
from enthought.mayavi.core import lut as destination_module
from enthought.persistence import state_pickler
target_dir = os.path.dirname(destination_module.__file__)
values = np.linspace(0., 1., 256)
lut_dic = {}
for name in datad.keys():
if name.endswith('_r'):
continue
lut_dic[name] = get_cmap(name)(values.copy())
out_name = os.path.join(target_dir, 'pylab_luts.pkl')
state_pickler.dump(lut_dic, out_name)
|
Add a modified lut-data-generating script to use pickle, rather than npz
|
ENH: Add a modified lut-data-generating script to use pickle, rather than npz
|
Python
|
bsd-3-clause
|
dmsurti/mayavi,liulion/mayavi,alexandreleroux/mayavi,alexandreleroux/mayavi,dmsurti/mayavi,liulion/mayavi
|
fece0019a54534b56960a30785bb70edb5d205bf
|
example_base/forms.py
|
example_base/forms.py
|
# -*- encoding: utf-8 -*-
from base.form_utils import RequiredFieldForm
from .models import Document
from base.form_utils import FileDropInput
class DocumentForm(RequiredFieldForm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for name in ('file', 'description'):
self.fields[name].widget.attrs.update(
{'class': 'pure-input-2-3'}
)
class Meta:
model = Document
fields = (
'file',
'description',
)
widgets = {'file': FileDropInput()}
|
# -*- encoding: utf-8 -*-
from django import forms
from base.form_utils import RequiredFieldForm, FileDropInput
from .models import Document
class DocumentForm(RequiredFieldForm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for name in ('file', 'description'):
self.fields[name].widget.attrs.update(
{'class': 'pure-input-2-3'}
)
class Meta:
model = Document
fields = (
'file',
'description',
)
# Not required RequiredFieldForm uses FileDropInput for FileField
# widgets = {'file': FileDropInput()}
# this is an example of how to use in a basic ModelForm
class BasicDocumentModelForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for name in ('file', 'description'):
self.fields[name].widget.attrs.update(
{'class': 'pure-input-2-3'}
)
class Meta:
model = Document
fields = (
'file',
'description',
)
widgets = {'file': FileDropInput()}
# this is an example of how to use in a basic Form
class NonModelForm(forms.Form):
file = forms.FileField(widget=FileDropInput)
description = forms.CharField(max_length=200)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for name in ('file', 'description'):
self.fields[name].widget.attrs.update(
{'class': 'pure-input-2-3'}
)
|
Add examples of ways to use FileDropInput
|
Add examples of ways to use FileDropInput
|
Python
|
apache-2.0
|
pkimber/base,pkimber/base,pkimber/base,pkimber/base
|
1e17e868ff332003da959a397b8846c9386b35e8
|
API_to_backend.py
|
API_to_backend.py
|
from multiprocessing import Queue, Process
import time
import backend
command_queue = Queue()
response_queue = Queue()
def start_backend():
if handler:
handler.stop()
handler = Process(target=backend.start, args=(command_queue, response_queue))
handler.start()
def get_for(url, queue, timeout):
beginning = time.time()
result = queue.get(timeout=timeout)
if result["url"] == url:
return result["body"]
else:
queue.put(result)
return get_for(url, queue, timeout - (time.time()-beginning))
|
from multiprocessing import Queue, Process
import time
import backend
command_queue = Queue()
response_queue = Queue()
def start_backend():
handler = Process(target=backend.start, args=(command_queue, response_queue))
handler.start()
def get_for(url, queue, timeout):
beginning = time.time()
result = queue.get(timeout=timeout)
if result["url"] == url:
return result["body"]
else:
queue.put(result)
return get_for(url, queue, timeout - (time.time()-beginning))
|
Revert "Quit Backend If Running"
|
Revert "Quit Backend If Running"
This reverts commit a00432191e2575aba0f20ffb1a96a323699ae4fc.
|
Python
|
mit
|
IAPark/PITherm
|
7c18cbf6dced0435537fb4067dfa878ae9ccc6af
|
accounts/models.py
|
accounts/models.py
|
from django.db import models
from django.contrib.auth.models import User
class Profile(models.Model):
user = models.OneToOneField(User)
def __str__(self):
return self.user.get_full_name() or self.user.username
|
from django.db import models
from django.db.models.signals import post_save
from django.contrib.auth.models import User
class Profile(models.Model):
user = models.OneToOneField(User)
def __str__(self):
return self.user.get_full_name() or self.user.username
def create_user_profile(sender, instance, created, **kwargs):
if created:
Profile.objects.create(user=instance)
post_save.connect(create_user_profile, sender=User)
|
Create profile if user is created
|
Create profile if user is created
|
Python
|
mit
|
lockhawksp/beethoven,lockhawksp/beethoven
|
6034265dfdfb2a7e1e4881076cc0f011ff0e639d
|
netbox/extras/migrations/0022_custom_links.py
|
netbox/extras/migrations/0022_custom_links.py
|
# Generated by Django 2.2 on 2019-04-15 19:28
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
('extras', '0021_add_color_comments_changelog_to_tag'),
]
operations = [
migrations.CreateModel(
name='CustomLink',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False)),
('name', models.CharField(max_length=100, unique=True)),
('text', models.CharField(max_length=200)),
('url', models.CharField(max_length=200)),
('weight', models.PositiveSmallIntegerField(default=100)),
('group_name', models.CharField(blank=True, max_length=50)),
('button_class', models.CharField(default='default', max_length=30)),
('new_window', models.BooleanField()),
('content_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType')),
],
options={
'ordering': ['group_name', 'weight', 'name'],
},
),
]
|
# Generated by Django 2.2 on 2019-04-15 19:28
from django.db import migrations, models
import django.db.models.deletion
import extras.models
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
('extras', '0021_add_color_comments_changelog_to_tag'),
]
operations = [
migrations.CreateModel(
name='CustomLink',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False)),
('name', models.CharField(max_length=100, unique=True)),
('text', models.CharField(max_length=200)),
('url', models.CharField(max_length=200)),
('weight', models.PositiveSmallIntegerField(default=100)),
('group_name', models.CharField(blank=True, max_length=50)),
('button_class', models.CharField(default='default', max_length=30)),
('new_window', models.BooleanField()),
('content_type', models.ForeignKey(limit_choices_to=extras.models.get_custom_link_models, on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType')),
],
options={
'ordering': ['group_name', 'weight', 'name'],
},
),
]
|
Add limit_choices_to to CustomLink.content_type field
|
Add limit_choices_to to CustomLink.content_type field
|
Python
|
apache-2.0
|
lampwins/netbox,lampwins/netbox,digitalocean/netbox,digitalocean/netbox,digitalocean/netbox,lampwins/netbox,lampwins/netbox,digitalocean/netbox
|
32f38eb01c3a203ae35d70b485fcee7b13f1acde
|
tests/help_generation_test.py
|
tests/help_generation_test.py
|
# Copyright 2016 PerfKitBenchmarker Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test that we can generate help for PKB."""
import os
import unittest
from perfkitbenchmarker import flags
# Import pkb to add all flag definitions to flags.FLAGS.
from perfkitbenchmarker import pkb # NOQA
class HelpTest(unittest.TestCase):
def testHelp(self):
# Test that help generation finishes without errors
flags.FLAGS.GetHelp()
class HelpXMLTest(unittest.TestCase):
def testHelpXML(self):
with open(os.devnull, 'w') as out:
flags.FLAGS.WriteHelpInXMLFormat(outfile=out)
if __name__ == '__main__':
unittest.main()
|
# Copyright 2016 PerfKitBenchmarker Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test that we can generate help for PKB."""
import os
import unittest
from perfkitbenchmarker import flags
# Import pkb to add all flag definitions to flags.FLAGS.
from perfkitbenchmarker import pkb # NOQA
class HelpTest(unittest.TestCase):
def testHelp(self):
# Test that help generation finishes without errors
if hasattr(flags.FLAGS, 'get_help'):
flags.FLAGS.get_help()
else:
flags.FLAGS.GetHelp()
class HelpXMLTest(unittest.TestCase):
def testHelpXML(self):
with open(os.devnull, 'w') as out:
flags.FLAGS.WriteHelpInXMLFormat(outfile=out)
if __name__ == '__main__':
unittest.main()
|
Call FLAGS.get_help if it's available.
|
Call FLAGS.get_help if it's available.
|
Python
|
apache-2.0
|
GoogleCloudPlatform/PerfKitBenchmarker,GoogleCloudPlatform/PerfKitBenchmarker,meteorfox/PerfKitBenchmarker,GoogleCloudPlatform/PerfKitBenchmarker,GoogleCloudPlatform/PerfKitBenchmarker,meteorfox/PerfKitBenchmarker
|
d8c15667e76ce6d0dfa96a16312e75b83c63479b
|
tests/test_response.py
|
tests/test_response.py
|
"""Unit test some basic response rendering functionality.
These tests use the unittest.mock mechanism to provide a simple Assistant
instance for the _Response initialization.
"""
from unittest.mock import patch
from flask import Flask
from flask_assistant import Assistant
from flask_assistant.response import _Response
@patch('flask_assistant.response.current_app')
def test_response_with_speech(mock):
mock = Assistant(Flask(__name__))
resp = _Response('foobar')
assert resp._response['speech'] == 'foobar'
@patch('flask_assistant.response.current_app')
def test_response_with_None_speech(mock):
mock = Assistant(Flask(__name__))
resp = _Response(None)
assert resp._response['speech'] is None
@patch('flask_assistant.response.current_app')
def test_response_speech_escaping(mock):
mock = Assistant(Flask(__name__))
resp = _Response('foo & bar')
assert resp._response['speech'] == 'foo & bar'
|
"""Unit test some basic response rendering functionality.
These tests use the unittest.mock mechanism to provide a simple Assistant
instance for the _Response initialization.
"""
from flask import Flask
from flask_assistant import Assistant
from flask_assistant.response import _Response
import pytest
patch = pytest.importorskip('unittest.mock.patch')
@patch('flask_assistant.response.current_app')
def test_response_with_speech(mock):
mock = Assistant(Flask(__name__))
resp = _Response('foobar')
assert resp._response['speech'] == 'foobar'
@patch('flask_assistant.response.current_app')
def test_response_with_None_speech(mock):
mock = Assistant(Flask(__name__))
resp = _Response(None)
assert resp._response['speech'] is None
@patch('flask_assistant.response.current_app')
def test_response_speech_escaping(mock):
mock = Assistant(Flask(__name__))
resp = _Response('foo & bar')
assert resp._response['speech'] == 'foo & bar'
|
Disable test for py27 (mock not available)
|
Disable test for py27 (mock not available)
|
Python
|
apache-2.0
|
treethought/flask-assistant
|
4b578fb7683054727444f1ed5c2a7d9732a3d8e9
|
ci_scripts/installPandoc.py
|
ci_scripts/installPandoc.py
|
import os
from subprocess import call, check_output
import sys
from shutil import copy2
platform = sys.platform
def checkAndInstall():
try:
check_output('pandoc -v'.split())
except OSError:
cudir = os.path.abspath(os.curdir)
os.chdir(os.path.abspath(os.path.join(os.path.pardir, 'downloads')))
def getFile():
from requests import get
with open(pandocFile, "wb") as file:
response = get(source)
file.write(response.content)
if platform == 'win32':
pandocFile = 'pandoc-2.1.3-windows.msi'
source = 'https://github.com/jgm/pandoc/releases/download/2.1.3/' + pandocFile
getFile()
call('msiexec.exe /i "{}" /norestart'.format(pandocFile))
else:
pandocFile = 'pandoc-2.1.3-linux.tar.gz'
source = 'https://github.com/jgm/pandoc/releases/download/2.1.3/' + pandocFile
getFile()
call("tar -xvzf {}".format(pandocFile).split())
copy2('./pandoc-2.1.3/bin/pandoc', '/usr/local/bin')
copy2('./pandoc-2.1.3/bin/pandoc-citeproc', '/usr/local/bin')
os.chdir(cudir)
if __name__ == '__main__':
checkAndInstall()
|
import os
from subprocess import call, check_output
import sys
from shutil import copy2
platform = sys.platform
def checkAndInstall():
try:
check_output('pandoc -v'.split())
except OSError:
def getFile():
from requests import get
with open(pandocFile, "wb") as file:
response = get(source)
file.write(response.content)
if platform == 'win32':
pandocFile = 'pandoc-2.1.3-windows.msi'
source = 'https://github.com/jgm/pandoc/releases/download/2.1.3/' + pandocFile
getFile()
call('msiexec.exe /i "{}" /norestart'.format(pandocFile))
else:
pandocFile = 'pandoc-2.1.3-linux.tar.gz'
source = 'https://github.com/jgm/pandoc/releases/download/2.1.3/' + pandocFile
getFile()
call("tar -xvzf {}".format(pandocFile).split())
copy2('./pandoc-2.1.3/bin/pandoc', '/usr/local/bin')
copy2('./pandoc-2.1.3/bin/pandoc-citeproc', '/usr/local/bin')
if __name__ == '__main__':
checkAndInstall()
|
Fix build wheels with Pandoc 2.
|
Fix build wheels with Pandoc 2.
|
Python
|
bsd-3-clause
|
jr-garcia/AssimpCy,jr-garcia/AssimpCy
|
afa94ea297c6042f4444c0ce833c9b1ee02373c1
|
stowaway.py
|
stowaway.py
|
import time
import socket
import datetime
from ipaddress import ip_address
import zmq
import yaml
import quick2wire.i2c as i2c
from database import Writer
from database import Temperature, Base
if __name__ == '__main__':
context = zmq.Context()
publisher = context.socket(zmq.PUB)
database = context.socket(zmq.PUB)
server = yaml.load(open('config.yaml'))['server']
host = server['host']
try:
ip_address(host)
except ValueError:
host = socket.gethostbyname(host)
publisher.bind('tcp://{}:{}'.format(host, server['port']))
database.bind('inproc://dbwrite')
writer = Writer(context)
writer.start()
while True:
with i2c.I2CMaster() as bus:
data = bus.transaction(i2c.reading(8, 6))
now = datetime.datetime.utcnow()
temp = data[0][-2:]
temp = int.from_bytes(temp, byteorder='little', signed=True) / 100.
print(now, temp)
publisher.send_pyobj(('TEMP', now, temp))
database.send_pyobj(('TEMP', now, temp))
time.sleep(0.05)
|
import time
import socket
import datetime
from ipaddress import ip_address
import zmq
import yaml
import quick2wire.i2c as i2c
from database import Writer
from database import Temperature, Base
if __name__ == '__main__':
context = zmq.Context()
publisher = context.socket(zmq.PUB)
database = context.socket(zmq.PUB)
server = yaml.load(open('config.yaml'))['server']
host = server['host']
try:
ip_address(host)
except ValueError:
host = socket.gethostbyname(host)
publisher.bind('tcp://{}:{}'.format(host, server['port']))
database.bind('inproc://dbwrite')
writer = Writer(context)
writer.start()
while True:
with i2c.I2CMaster() as bus:
data = bus.transaction(i2c.reading(8, 6))
now = datetime.datetime.utcnow()
temp = data[0][-2:]
temp = int.from_bytes(temp, byteorder='little', signed=True) / 100.
print(now, temp)
publisher.send_pyobj(('TEMP', now.timestamp(), temp))
database.send_pyobj(('TEMP', now, temp))
time.sleep(0.05)
|
Send timestamp to the outside world
|
Send timestamp to the outside world
|
Python
|
bsd-3-clause
|
CojoCompany/stowaway
|
34369635a22bf05abbabe47e708a2ed80db258e5
|
MeetingMinutes.py
|
MeetingMinutes.py
|
import sublime, sublime_plugin
from .mistune import markdown
class CreateMinuteCommand(sublime_plugin.TextCommand):
def run(self, edit):
region = sublime.Region(0, self.view.size())
md_source = self.view.substr(region)
md_source.encode(encoding='UTF-8',errors='strict')
html_source = '<!DOCTYPE html><html><head><meta charset="utf-8"></head><body>' + markdown(md_source) + '</body></html>'
print(html_source)
|
import sublime, sublime_plugin
import os
import re
from subprocess import call
from .mistune import markdown
class CreateMinuteCommand(sublime_plugin.TextCommand):
def run(self, edit):
region = sublime.Region(0, self.view.size())
md_source = self.view.substr(region)
md_source.encode(encoding='UTF-8',errors='strict')
html_source = '<!DOCTYPE html><html><head><meta charset="utf-8"></head><body>' + markdown(md_source) + '</body></html>'
file_name = self.view.file_name()
html_file, extension = os.path.splitext(file_name)
html_file += ".html"
with open(html_file, 'w+') as file_:
file_.write(html_source)
print(file_name)
print(html_file)
|
Save the created html in a HTML file.
|
Save the created html in a HTML file.
|
Python
|
mit
|
Txarli/sublimetext-meeting-minutes,Txarli/sublimetext-meeting-minutes
|
fbad1649e9939a3be4194e0d508ff5889f48bb6f
|
unleash/plugins/utils_assign.py
|
unleash/plugins/utils_assign.py
|
import re
# regular expression for finding assignments
_quotes = "['|\"|\"\"\"]"
BASE_ASSIGN_PATTERN = r'({}\s*=\s*[ubr]?' + _quotes + r')(.*?)(' +\
_quotes + r')'
def find_assign(data, varname):
"""Finds a substring that looks like an assignment.
:param data: Source to search in.
:param varname: Name of the variable for which an assignment should be
found.
"""
ASSIGN_RE = re.compile(BASE_ASSIGN_PATTERN.format(varname))
if len(ASSIGN_RE.findall(data)) > 1:
raise ValueError('Found multiple {}-strings.'.format(varname))
if len(ASSIGN_RE.findall(data)) < 1:
raise ValueError('No version assignment ("{}") found.'.format(varname))
return ASSIGN_RE.search(data).group(2)
def replace_assign(data, varname, new_value):
ASSIGN_RE = re.compile(BASE_ASSIGN_PATTERN.format(varname))
def repl(m):
return m.group(1) + new_value + m.group(3)
return ASSIGN_RE.sub(repl, data)
|
from unleash.exc import PluginError
import re
# regular expression for finding assignments
_quotes = "['|\"|\"\"\"]"
BASE_ASSIGN_PATTERN = r'({}\s*=\s*[ubr]?' + _quotes + r')(.*?)(' +\
_quotes + r')'
def find_assign(data, varname):
"""Finds a substring that looks like an assignment.
:param data: Source to search in.
:param varname: Name of the variable for which an assignment should be
found.
"""
ASSIGN_RE = re.compile(BASE_ASSIGN_PATTERN.format(varname))
if len(ASSIGN_RE.findall(data)) > 1:
raise PluginError('Found multiple {}-strings.'.format(varname))
if len(ASSIGN_RE.findall(data)) < 1:
raise PluginError('No version assignment ("{}") found.'
.format(varname))
return ASSIGN_RE.search(data).group(2)
def replace_assign(data, varname, new_value):
ASSIGN_RE = re.compile(BASE_ASSIGN_PATTERN.format(varname))
def repl(m):
return m.group(1) + new_value + m.group(3)
return ASSIGN_RE.sub(repl, data)
|
Raise PluginErrors instead of ValueErrors in versions.
|
Raise PluginErrors instead of ValueErrors in versions.
|
Python
|
mit
|
mbr/unleash
|
f339af2e48f0e485f13d368dad47f541264c4f58
|
web/processors/user.py
|
web/processors/user.py
|
from django.contrib.auth.models import User
from django_countries import countries
def get_user(user_id):
user = User.objects.get(id=user_id)
return user
def get_user_profile(user_id):
user = User.objects.get(id=user_id)
return user.profile
def get_ambassadors():
ambassadors = []
aambassadors = User.objects.filter(groups__name='ambassadors')
for ambassador in aambassadors:
ambassadors.append(ambassador.profile)
return ambassadors
def get_ambassadors_for_countries():
ambassadors = get_ambassadors()
countries_ambassadors = []
for code, name in list(countries):
readable_name = unicode(name)
found_ambassadors = []
for ambassador in ambassadors:
if ambassador.country == code:
found_ambassadors.append(ambassador)
countries_ambassadors.append((readable_name,found_ambassadors))
countries_ambassadors.sort()
return countries_ambassadors
def update_user_email(user_id, new_email):
user = User.objects.get(id=user_id)
user.email = new_email
user.save(update_fields=["email"])
return user
|
from django.contrib.auth.models import User
from django_countries import countries
def get_user(user_id):
user = User.objects.get(id=user_id)
return user
def get_user_profile(user_id):
user = User.objects.get(id=user_id)
return user.profile
def get_ambassadors():
ambassadors = []
aambassadors = User.objects.filter(groups__name='ambassadors').order_by('date_joined')
for ambassador in aambassadors:
ambassadors.append(ambassador.profile)
return ambassadors
def get_ambassadors_for_countries():
ambassadors = get_ambassadors()
countries_ambassadors = []
for code, name in list(countries):
readable_name = unicode(name)
found_ambassadors = []
for ambassador in ambassadors:
if ambassador.country == code:
found_ambassadors.append(ambassador)
countries_ambassadors.append((readable_name,found_ambassadors))
countries_ambassadors.sort()
return countries_ambassadors
def update_user_email(user_id, new_email):
user = User.objects.get(id=user_id)
user.email = new_email
user.save(update_fields=["email"])
return user
|
Sort listed ambassadors by date_joined
|
Sort listed ambassadors by date_joined
|
Python
|
mit
|
ercchy/coding-events,michelesr/coding-events,joseihf/coding-events,ioana-chiorean/coding-events,joseihf/coding-events,codeeu/coding-events,michelesr/coding-events,joseihf/coding-events,ercchy/coding-events,codeeu/coding-events,ercchy/coding-events,michelesr/coding-events,ioana-chiorean/coding-events,codeeu/coding-events,codeeu/coding-events,ioana-chiorean/coding-events,ioana-chiorean/coding-events,codeeu/coding-events,ercchy/coding-events,ercchy/coding-events,michelesr/coding-events,joseihf/coding-events,joseihf/coding-events,michelesr/coding-events,ioana-chiorean/coding-events
|
bb2234447039df6bee80842749b0ecdb19fb62fc
|
aerende/models.py
|
aerende/models.py
|
import uuid
class Note(object):
"""
A note.
Currently has a title, tags, texr and a priority."""
def __init__(self, title, tags, text, priority=1, unique_id=None):
if unique_id is None:
self.id = str(uuid.uuid4())
else:
self.id = unique_id
self.title = title
self.tags = tags
self.text = text
self.priority = priority
def __str__(self):
return str(self.to_dictionary)
def to_dictionary(self):
return {
self.id: {
'title': self.title,
'tags': str(self.tags),
'text': self.text,
'priority': self.priority,
}
}
class Tag(object):
"""A note tag, for categorisation/filtering"""
def __init__(self, type, frequency):
self.type = type
self.frequency = frequency
def __str__(self):
return "[{0}] {1}".format(self.frequency, self.type)
|
import uuid
class Note(object):
"""
A note.
Currently has a title, tags, texr and a priority."""
def __init__(self, title, tags, text, priority=1, unique_id=None):
if unique_id is None:
self.id = str(uuid.uuid4())
else:
self.id = unique_id
self.title = title
self.tags = self.__verify_tags(tags)
self.text = text
self.priority = priority
def __str__(self):
return str(self.to_dictionary)
def __verify_tags(self, tags):
return list(set(tags))
def to_dictionary(self):
return {
self.id: {
'title': self.title,
'tags': str(self.tags),
'text': self.text,
'priority': self.priority,
}
}
class Tag(object):
"""A note tag, for categorisation/filtering"""
def __init__(self, type, frequency):
self.type = type
self.frequency = frequency
def __str__(self):
return "[{0}] {1}".format(self.frequency, self.type)
|
Remove duplicate tags during init
|
Remove duplicate tags during init
|
Python
|
mit
|
Autophagy/aerende
|
83919e74b7d20688811a4f782d4fccaf3bc3c055
|
comics/comics/hijinksensue.py
|
comics/comics/hijinksensue.py
|
from comics.aggregator.crawler import CrawlerBase, CrawlerImage
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = "HijiNKS Ensue"
language = "en"
url = "http://hijinksensue.com/"
start_date = "2007-05-11"
rights = "Joel Watson"
class Crawler(CrawlerBase):
history_capable_days = 180
time_zone = "US/Central"
def crawl(self, pub_date):
feed = self.parse_feed("http://hijinksensue.com/feed/")
for entry in feed.for_date(pub_date):
if "/comic/" not in entry.link:
continue
url = entry.content0.src('img[src*="-300x120"]')
if not url:
continue
url = url.replace("-300x120", "")
title = entry.title
return CrawlerImage(url, title)
|
from comics.aggregator.crawler import CrawlerBase, CrawlerImage
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = "HijiNKS Ensue"
language = "en"
url = "http://hijinksensue.com/"
start_date = "2007-05-11"
rights = "Joel Watson"
active = False
class Crawler(CrawlerBase):
history_capable_date = '2015-03-11'
time_zone = "US/Central"
def crawl(self, pub_date):
feed = self.parse_feed("http://hijinksensue.com/feed/")
for entry in feed.for_date(pub_date):
if "/comic/" not in entry.link:
continue
url = entry.content0.src('img[srcset*="-300x120"]')
if not url:
continue
url = url.replace("-300x120", "")
title = entry.title
return CrawlerImage(url, title)
|
Update "HijiNKS Ensue" after feed change
|
Update "HijiNKS Ensue" after feed change
|
Python
|
agpl-3.0
|
datagutten/comics,datagutten/comics,jodal/comics,jodal/comics,jodal/comics,datagutten/comics,datagutten/comics,jodal/comics
|
7e025a5fa40d5f7ba5721ad01951ad2020ed2485
|
phoxpy/tests/test_client.py
|
phoxpy/tests/test_client.py
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2011 Alexander Shorin
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
#
import unittest
from phoxpy import client
from phoxpy.tests.lisserver import MockHttpSession
class SessionTestCase(unittest.TestCase):
def test_login(self):
session = client.Session(login='John', password='Doe',
client_id='foo-bar-baz')
self.assertFalse(session.is_active())
session.open('localhost', http_session=MockHttpSession())
self.assertTrue(session.is_active())
def test_logout(self):
session = client.Session(login='John', password='Doe',
client_id='foo-bar-baz')
self.assertFalse(session.is_active())
session.open('localhost', http_session=MockHttpSession())
self.assertTrue(session.is_active())
session.close()
self.assertFalse(session.is_active())
if __name__ == '__main__':
unittest.main()
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2011 Alexander Shorin
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
#
import unittest
from phoxpy import client
from phoxpy.server import MockHttpSession, SimpleLISServer
class SessionTestCase(unittest.TestCase):
def setUp(self):
self.server = SimpleLISServer('4.2', '31415')
self.server.ext_auth.add_license('foo-bar-baz')
self.server.ext_auth.add_user('John', 'Doe')
def test_login(self):
session = client.Session(login='John', password='Doe',
client_id='foo-bar-baz')
self.assertFalse(session.is_active())
session.open('localhost', http_session=MockHttpSession(self.server))
self.assertTrue(session.is_active())
def test_logout(self):
session = client.Session(login='John', password='Doe',
client_id='foo-bar-baz')
self.assertFalse(session.is_active())
session.open('localhost', http_session=MockHttpSession(self.server))
self.assertTrue(session.is_active())
session.close()
self.assertFalse(session.is_active())
if __name__ == '__main__':
unittest.main()
|
Update tests for new environment.
|
Update tests for new environment.
|
Python
|
bsd-3-clause
|
kxepal/phoxpy
|
20b13d500ea1cfa4b06413f0f02114bed60ca98f
|
apps/challenges/auth.py
|
apps/challenges/auth.py
|
OWNER_PERMISSIONS = ['challenges.%s_submission' % v for v in ['edit', 'delete']]
class SubmissionBackend(object):
"""Provide custom permission logic for submissions."""
supports_object_permissions = True
supports_anonymous_user = True
def authenticate(self):
"""This backend doesn't provide any authentication functionality."""
return None
def has_perm(self, user_obj, perm, obj=None):
if perm in OWNER_PERMISSIONS:
# Owners can edit and delete their own submissions
if obj is not None and user_obj == obj.created_by.user:
return True
if perm == 'challenges.view_submission' and obj is not None:
# Live, non-draft submissions are visible to anyone. Other
# submissions are visible only to admins and their owners
return ((obj.is_live and not obj.is_draft) or
user_obj == obj.created_by.user)
return False
|
OWNER_PERMISSIONS = ['challenges.%s_submission' % v for v in ['edit', 'delete']]
class SubmissionBackend(object):
"""Provide custom permission logic for submissions."""
supports_object_permissions = True
supports_anonymous_user = True
def authenticate(self):
"""This backend doesn't provide any authentication functionality."""
return None
def has_perm(self, user_obj, perm, obj=None):
if perm in OWNER_PERMISSIONS:
# Owners can edit and delete their own submissions
if obj is not None and user_obj == obj.created_by.user:
return True
if perm == 'challenges.view_submission' and obj is not None:
# Live, non-draft submissions are visible to anyone. Other
# submissions are visible only to admins and their owners
return ((not obj.is_draft) or user_obj == obj.created_by.user)
return False
|
Remove 'is_live' from draft visibility check.
|
Remove 'is_live' from draft visibility check.
|
Python
|
bsd-3-clause
|
mozilla/mozilla-ignite,mozilla/mozilla-ignite,mozilla/mozilla-ignite,mozilla/mozilla-ignite
|
ba4c2dc22aae5dd4f862aad7c388eecf36acfbd8
|
app/main/forms.py
|
app/main/forms.py
|
from flask_wtf import Form
from wtforms.validators import DataRequired, Email
from dmutils.forms import StripWhitespaceStringField
class EmailAddressForm(Form):
email_address = StripWhitespaceStringField('Email address', validators=[
DataRequired(message="Email can not be empty"),
Email(message="Please enter a valid email address")
])
class MoveUserForm(Form):
user_to_move_email_address = StripWhitespaceStringField('Move an existing user to this supplier', validators=[
DataRequired(message="Email can not be empty"),
Email(message="Please enter a valid email address")
])
class EmailDomainForm(Form):
new_buyer_domain = StripWhitespaceStringField('Add a buyer email domain', validators=[
DataRequired(message="The domain field can not be empty.")
])
|
from flask_wtf import Form
from wtforms import validators
from dmutils.forms import StripWhitespaceStringField
class EmailAddressForm(Form):
email_address = StripWhitespaceStringField('Email address', validators=[
validators.DataRequired(message="Email can not be empty"),
validators.Email(message="Please enter a valid email address")
])
class MoveUserForm(Form):
user_to_move_email_address = StripWhitespaceStringField('Move an existing user to this supplier', validators=[
validators.DataRequired(message="Email can not be empty"),
validators.Email(message="Please enter a valid email address")
])
class EmailDomainForm(Form):
new_buyer_domain = StripWhitespaceStringField('Add a buyer email domain', validators=[
validators.DataRequired(message="The domain field can not be empty.")
])
|
Change import to indicate function of imported classes
|
Change import to indicate function of imported classes
|
Python
|
mit
|
alphagov/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend
|
b167b1c8099dc184c366416dab9a7c6e5be7423a
|
api/base/exceptions.py
|
api/base/exceptions.py
|
from rest_framework import status
from rest_framework.exceptions import APIException
def json_api_exception_handler(exc, context):
""" Custom exception handler that returns errors object as an array """
# Import inside method to avoid errors when the OSF is loaded without Django
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Error objects may have the following members. Title removed to avoid clash with node "title" errors.
top_level_error_keys = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
if response:
message = response.data
if isinstance(message, dict):
for key, value in message.iteritems():
if key in top_level_error_keys:
errors.append({key: value})
else:
if isinstance(value, list):
value = value[0]
errors.append({'detail': value, 'meta': {'field': key}})
elif isinstance(message, (list, tuple)):
for error in message:
errors.append({'detail': error})
else:
errors.append({'detail': message})
response.data = {'errors': errors}
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
|
from rest_framework import status
from rest_framework.exceptions import APIException
def json_api_exception_handler(exc, context):
""" Custom exception handler that returns errors object as an array """
# Import inside method to avoid errors when the OSF is loaded without Django
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Error objects may have the following members. Title removed to avoid clash with node "title" errors.
top_level_error_keys = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
if response:
message = response.data
if isinstance(message, dict):
for key, value in message.iteritems():
if key in top_level_error_keys:
errors.append({key: value})
else:
if isinstance(value, list):
for reason in value:
errors.append({'detail': reason, 'meta': {'field': key}})
else:
errors.append({'detail': value, 'meta': {'field': key}})
elif isinstance(message, (list, tuple)):
for error in message:
errors.append({'detail': error})
else:
errors.append({'detail': message})
response.data = {'errors': errors}
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
|
Handle cases where there are multiple values for a field.
|
Handle cases where there are multiple values for a field.
|
Python
|
apache-2.0
|
Ghalko/osf.io,caneruguz/osf.io,felliott/osf.io,binoculars/osf.io,icereval/osf.io,ticklemepierce/osf.io,caneruguz/osf.io,mattclark/osf.io,brianjgeiger/osf.io,HalcyonChimera/osf.io,brandonPurvis/osf.io,laurenrevere/osf.io,aaxelb/osf.io,MerlinZhang/osf.io,caseyrygt/osf.io,doublebits/osf.io,Nesiehr/osf.io,kch8qx/osf.io,acshi/osf.io,sbt9uc/osf.io,sloria/osf.io,danielneis/osf.io,erinspace/osf.io,billyhunt/osf.io,icereval/osf.io,erinspace/osf.io,baylee-d/osf.io,emetsger/osf.io,alexschiller/osf.io,kwierman/osf.io,felliott/osf.io,saradbowman/osf.io,KAsante95/osf.io,adlius/osf.io,njantrania/osf.io,leb2dg/osf.io,cslzchen/osf.io,arpitar/osf.io,petermalcolm/osf.io,danielneis/osf.io,RomanZWang/osf.io,adlius/osf.io,KAsante95/osf.io,sbt9uc/osf.io,brianjgeiger/osf.io,TomBaxter/osf.io,ticklemepierce/osf.io,GageGaskins/osf.io,ZobairAlijan/osf.io,abought/osf.io,mluo613/osf.io,HalcyonChimera/osf.io,GageGaskins/osf.io,njantrania/osf.io,samanehsan/osf.io,samanehsan/osf.io,asanfilippo7/osf.io,DanielSBrown/osf.io,jnayak1/osf.io,mluke93/osf.io,ticklemepierce/osf.io,MerlinZhang/osf.io,doublebits/osf.io,felliott/osf.io,arpitar/osf.io,adlius/osf.io,KAsante95/osf.io,icereval/osf.io,Ghalko/osf.io,zachjanicki/osf.io,KAsante95/osf.io,acshi/osf.io,TomHeatwole/osf.io,ckc6cz/osf.io,jmcarp/osf.io,rdhyee/osf.io,sbt9uc/osf.io,hmoco/osf.io,mattclark/osf.io,RomanZWang/osf.io,zamattiac/osf.io,leb2dg/osf.io,ZobairAlijan/osf.io,TomBaxter/osf.io,Ghalko/osf.io,Nesiehr/osf.io,binoculars/osf.io,billyhunt/osf.io,petermalcolm/osf.io,rdhyee/osf.io,crcresearch/osf.io,ZobairAlijan/osf.io,mluo613/osf.io,cslzchen/osf.io,SSJohns/osf.io,adlius/osf.io,haoyuchen1992/osf.io,monikagrabowska/osf.io,amyshi188/osf.io,amyshi188/osf.io,kwierman/osf.io,kch8qx/osf.io,alexschiller/osf.io,Ghalko/osf.io,sbt9uc/osf.io,chrisseto/osf.io,brandonPurvis/osf.io,caseyrollins/osf.io,cwisecarver/osf.io,wearpants/osf.io,zamattiac/osf.io,DanielSBrown/osf.io,amyshi188/osf.io,mfraezz/osf.io,kwierman/osf.io,haoyuchen1992/osf.io,caseyrygt/osf.io,asanfilippo7/osf.io,abought/osf.io,caseyrygt/osf.io,KAsante95/osf.io,abought/osf.io,GageGaskins/osf.io,mluke93/osf.io,caneruguz/osf.io,ZobairAlijan/osf.io,Nesiehr/osf.io,DanielSBrown/osf.io,mfraezz/osf.io,chennan47/osf.io,Nesiehr/osf.io,samanehsan/osf.io,TomHeatwole/osf.io,cslzchen/osf.io,alexschiller/osf.io,billyhunt/osf.io,jmcarp/osf.io,jnayak1/osf.io,acshi/osf.io,cosenal/osf.io,TomBaxter/osf.io,cwisecarver/osf.io,kch8qx/osf.io,kch8qx/osf.io,brianjgeiger/osf.io,Johnetordoff/osf.io,baylee-d/osf.io,mluke93/osf.io,doublebits/osf.io,RomanZWang/osf.io,arpitar/osf.io,HalcyonChimera/osf.io,brandonPurvis/osf.io,Johnetordoff/osf.io,TomHeatwole/osf.io,zamattiac/osf.io,samchrisinger/osf.io,leb2dg/osf.io,sloria/osf.io,RomanZWang/osf.io,jnayak1/osf.io,erinspace/osf.io,TomHeatwole/osf.io,SSJohns/osf.io,cwisecarver/osf.io,rdhyee/osf.io,sloria/osf.io,monikagrabowska/osf.io,Johnetordoff/osf.io,alexschiller/osf.io,ckc6cz/osf.io,caseyrollins/osf.io,SSJohns/osf.io,leb2dg/osf.io,Johnetordoff/osf.io,pattisdr/osf.io,chrisseto/osf.io,cslzchen/osf.io,ckc6cz/osf.io,doublebits/osf.io,monikagrabowska/osf.io,MerlinZhang/osf.io,pattisdr/osf.io,aaxelb/osf.io,jmcarp/osf.io,CenterForOpenScience/osf.io,monikagrabowska/osf.io,jmcarp/osf.io,mluo613/osf.io,caseyrygt/osf.io,danielneis/osf.io,chennan47/osf.io,hmoco/osf.io,mluo613/osf.io,zamattiac/osf.io,samanehsan/osf.io,felliott/osf.io,abought/osf.io,njantrania/osf.io,SSJohns/osf.io,emetsger/osf.io,wearpants/osf.io,wearpants/osf.io,laurenrevere/osf.io,danielneis/osf.io,binoculars/osf.io,CenterForOpenScience/osf.io,caseyrollins/osf.io,GageGaskins/osf.io,emetsger/osf.io,asanfilippo7/osf.io,kch8qx/osf.io,acshi/osf.io,CenterForOpenScience/osf.io,GageGaskins/osf.io,zachjanicki/osf.io,mattclark/osf.io,jnayak1/osf.io,mfraezz/osf.io,samchrisinger/osf.io,samchrisinger/osf.io,arpitar/osf.io,chennan47/osf.io,zachjanicki/osf.io,acshi/osf.io,amyshi188/osf.io,haoyuchen1992/osf.io,caneruguz/osf.io,MerlinZhang/osf.io,hmoco/osf.io,billyhunt/osf.io,haoyuchen1992/osf.io,zachjanicki/osf.io,hmoco/osf.io,cosenal/osf.io,chrisseto/osf.io,ticklemepierce/osf.io,aaxelb/osf.io,alexschiller/osf.io,crcresearch/osf.io,monikagrabowska/osf.io,cosenal/osf.io,mluo613/osf.io,mluke93/osf.io,samchrisinger/osf.io,mfraezz/osf.io,brandonPurvis/osf.io,brianjgeiger/osf.io,petermalcolm/osf.io,laurenrevere/osf.io,aaxelb/osf.io,RomanZWang/osf.io,cwisecarver/osf.io,CenterForOpenScience/osf.io,HalcyonChimera/osf.io,billyhunt/osf.io,wearpants/osf.io,ckc6cz/osf.io,doublebits/osf.io,emetsger/osf.io,asanfilippo7/osf.io,chrisseto/osf.io,crcresearch/osf.io,kwierman/osf.io,brandonPurvis/osf.io,rdhyee/osf.io,njantrania/osf.io,pattisdr/osf.io,cosenal/osf.io,saradbowman/osf.io,baylee-d/osf.io,DanielSBrown/osf.io,petermalcolm/osf.io
|
0155ed7c37fd4cafa2650911d4f902a3a8982761
|
test/test_bot.py
|
test/test_bot.py
|
import re
import unittest
from gather.bot import ListenerBot
class TestGatherBot(unittest.TestCase):
def test_register(self):
bot = ListenerBot()
self.assertEqual({}, bot.actions)
regex = r'^test'
action = unittest.mock.Mock()
bot.register_action(regex, action)
self.assertEqual(
{regex: (re.compile(regex, re.IGNORECASE), action)},
bot.actions
)
if __name__ == '__main__':
unittest.main()
|
import asyncio
import re
import unittest
from unittest import mock
from gather.bot import ListenerBot
def async_test(f):
# http://stackoverflow.com/a/23036785/304210
def wrapper(*args, **kwargs):
coro = asyncio.coroutine(f)
future = coro(*args, **kwargs)
loop = asyncio.get_event_loop()
loop.run_until_complete(future)
return wrapper
class TestGatherBot(unittest.TestCase):
def test_register(self):
bot = ListenerBot()
self.assertEqual({}, bot.actions)
regex = r'^test'
action = mock.Mock()
bot.register_action(regex, action)
self.assertEqual(
{regex: (re.compile(regex, re.IGNORECASE), action)},
bot.actions
)
@async_test
def test_on_message_from_bot(self):
bot = ListenerBot()
bot.username = 'testuser'
regex = r'^test'
action = mock.Mock()
bot.actions = {regex: (re.compile(regex, re.IGNORECASE), action)}
bot.on_message(mock.Mock(), mock.Mock, 'test')
action.assert_not_called()
if __name__ == '__main__':
unittest.main()
|
Add a test for on_message
|
Add a test for on_message
|
Python
|
mit
|
veryhappythings/discord-gather
|
158f1101c3c13db5df916329a66517c7bb85e132
|
plata/context_processors.py
|
plata/context_processors.py
|
import plata
def plata_context(request):
"""
Adds a few variables from Plata to the context if they are available:
* ``plata.shop``: The current :class:`plata.shop.views.Shop` instance
* ``plata.order``: The current order
* ``plata.contact``: The current contact instance
* ``plata.price_includes_tax``: Whether prices include tax or not
"""
shop = plata.shop_instance()
return {'plata': {
'shop': shop,
'order': shop.order_from_request(request),
'contact': (shop.contact_from_user(request.user)
if hasattr(request, 'user') else None),
}} if shop else {}
|
import plata
def plata_context(request):
"""
Adds a few variables from Plata to the context if they are available:
* ``plata.shop``: The current :class:`plata.shop.views.Shop` instance
* ``plata.order``: The current order
* ``plata.contact``: The current contact instance
* ``plata.price_includes_tax``: Whether prices include tax or not
"""
shop = plata.shop_instance()
return {'plata': {
'shop': shop,
'order': shop.order_from_request(request),
'contact': (shop.contact_from_user(request.user)
if hasattr(request, 'user') else None),
'price_includes_tax': shop.price_includes_tax(request),
}} if shop else {}
|
Add current value for price_includes_tax to context
|
Add current value for price_includes_tax to context
|
Python
|
bsd-3-clause
|
armicron/plata,armicron/plata,armicron/plata
|
22c7da6d3de76cf6c36b4206f204a9ee979ba5f7
|
strides/graphs.py
|
strides/graphs.py
|
import pandas as pd
import matplotlib
matplotlib.use("pdf")
import matplotlib.pyplot as plt
import sys
import os.path
figdir = "figures"
df = pd.read_csv(sys.stdin, " ", header=None, index_col=0,
names=[2**(i) for i in range(6)]+["rand"])
print(df)
#print(df["X2"]/2**df.index)
df.plot(logy=True)
plt.title("run time for array access")
plt.xlabel("scale")
plt.ylabel("seconds")
plt.savefig(os.path.join([figdir,"graph.pdf"]))
plt.figure()
sizes = 2**df.index
print(sizes)
petf = (df.T/sizes).T
print( petf )
petf.plot(logy=True)
plt.title("normalized running time")
plt.xlabel("scale")
plt.ylabel("nanoseconds per element")
plt.savefig(os.path.join([figdir,"perelement.pdf"]))
|
import pandas as pd
import matplotlib
matplotlib.use("pdf")
import matplotlib.pyplot as plt
import sys
df = pd.read_csv(sys.stdin, " ", header=None, index_col=0)
print(df)
print(df["X2"]/2**df.index)
df.plot(logy=True)
plt.savefig("graph.pdf")
|
Add perelement figures write figures into subdirectory
|
Add perelement figures write figures into subdirectory
|
Python
|
bsd-3-clause
|
jpfairbanks/cse6140,jpfairbanks/cse6140,jpfairbanks/cse6140
|
1579eb8d2de5aa49ad7012ab08350659a20725e1
|
basis/managers.py
|
basis/managers.py
|
from django.db import models
class BasisModelManager(models.Manager):
def get_query_set(self):
return super(BasisModelManager, self).get_query_set().filter(deleted=False)
|
from django.db import models
from .compat import DJANGO16
if DJANGO16:
class BasisModelManager(models.Manager):
def get_queryset(self):
return super(BasisModelManager, self).get_queryset().filter(deleted=False)
else:
class BasisModelManager(models.Manager):
def get_query_set(self):
return super(BasisModelManager, self).get_query_set().filter(deleted=False)
|
Fix deprecation warning for get_query_set
|
Fix deprecation warning for get_query_set
get_query_set was renamed get_queryset in django 1.6
|
Python
|
mit
|
frecar/django-basis
|
4b000960edc30d9917b80646a0374fb8bf99efcb
|
storage/tests/testtools.py
|
storage/tests/testtools.py
|
"""
Test tools for the storage service.
"""
import unittest
from storage.storage import app as storage_app, db
class InMemoryStorageTests(unittest.TestCase):
"""
Set up and tear down an application with an in memory database for testing.
"""
def setUp(self):
storage_app.config['TESTING'] = True
storage_app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///:memory:'
self.storage_app = storage_app.test_client()
self.storage_url_map = storage_app.url_map
with storage_app.app_context():
db.create_all()
def tearDown(self):
with storage_app.app_context():
db.session.remove()
db.drop_all()
|
"""
Test tools for the storage service.
"""
import unittest
from storage.storage import app, db
class InMemoryStorageTests(unittest.TestCase):
"""
Set up and tear down an application with an in memory database for testing.
"""
def setUp(self):
app.config['TESTING'] = True
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///:memory:'
self.storage_app = app.test_client()
self.storage_url_map = app.url_map
with app.app_context():
db.create_all()
def tearDown(self):
with app.app_context():
db.session.remove()
db.drop_all()
|
Remove unnecessary rename on input
|
Remove unnecessary rename on input
|
Python
|
mit
|
jenca-cloud/jenca-authentication
|
505456fed7bdbd6b2cd78eae10b3b64657cd377b
|
tests/unit/test_commands.py
|
tests/unit/test_commands.py
|
import pytest
from pip._internal.commands import commands_dict, create_command
def test_commands_dict__order():
"""
Check the ordering of commands_dict.
"""
names = list(commands_dict)
# A spot-check is sufficient to check that commands_dict encodes an
# ordering.
assert names[0] == 'install'
assert names[-1] == 'help'
@pytest.mark.parametrize('name', list(commands_dict))
def test_create_command(name):
"""Test creating an instance of each available command."""
command = create_command(name)
assert command.name == name
assert command.summary == commands_dict[name].summary
|
import pytest
from pip._internal.cli.req_command import (
IndexGroupCommand,
RequirementCommand,
SessionCommandMixin,
)
from pip._internal.commands import commands_dict, create_command
def check_commands(pred, expected):
"""
Check the commands satisfying a predicate.
"""
commands = [create_command(name) for name in sorted(commands_dict)]
actual = [command.name for command in commands if pred(command)]
assert actual == expected, 'actual: {}'.format(actual)
def test_commands_dict__order():
"""
Check the ordering of commands_dict.
"""
names = list(commands_dict)
# A spot-check is sufficient to check that commands_dict encodes an
# ordering.
assert names[0] == 'install'
assert names[-1] == 'help'
@pytest.mark.parametrize('name', list(commands_dict))
def test_create_command(name):
"""Test creating an instance of each available command."""
command = create_command(name)
assert command.name == name
assert command.summary == commands_dict[name].summary
def test_session_commands():
"""
Test which commands inherit from SessionCommandMixin.
"""
def is_session_command(command):
return isinstance(command, SessionCommandMixin)
expected = ['download', 'install', 'list', 'search', 'uninstall', 'wheel']
check_commands(is_session_command, expected)
def test_index_group_commands():
"""
Test the commands inheriting from IndexGroupCommand.
"""
expected = ['download', 'install', 'list', 'wheel']
def is_index_group_command(command):
return isinstance(command, IndexGroupCommand)
check_commands(is_index_group_command, expected)
# Also check that the commands inheriting from IndexGroupCommand are
# exactly the commands with the --no-index option.
def has_option_no_index(command):
return command.parser.has_option('--no-index')
check_commands(has_option_no_index, expected)
def test_requirement_commands():
"""
Test which commands inherit from RequirementCommand.
"""
def is_requirement_command(command):
return isinstance(command, RequirementCommand)
check_commands(is_requirement_command, ['download', 'install', 'wheel'])
|
Test the command class inheritance for each command.
|
Test the command class inheritance for each command.
|
Python
|
mit
|
pradyunsg/pip,xavfernandez/pip,pfmoore/pip,rouge8/pip,xavfernandez/pip,pypa/pip,sbidoul/pip,pfmoore/pip,pypa/pip,rouge8/pip,rouge8/pip,pradyunsg/pip,xavfernandez/pip,sbidoul/pip
|
e183578b6211d7311d62100ad643cbaf8408de99
|
tests/__init__.py
|
tests/__init__.py
|
import unittest.mock
def _test_module_init(module, main_name="main"):
with unittest.mock.patch.object(module, main_name, return_value=0):
with unittest.mock.patch.object(module, "__name__", "__main__"):
with unittest.mock.patch.object(module.sys, "exit") as exit:
module.module_init()
return exit.call_args[0][0] == 0
|
import unittest.mock
def _test_module_init(module, main_name="main"):
with unittest.mock.patch.object(
module, main_name, return_value=0
), unittest.mock.patch.object(
module, "__name__", "__main__"
), unittest.mock.patch.object(
module.sys, "exit"
) as exit:
module.module_init()
return exit.call_args[0][0] == 0
|
Use multiple context managers on one with statement (thanks Anna)
|
Use multiple context managers on one with statement (thanks Anna)
|
Python
|
mpl-2.0
|
rfinnie/2ping,rfinnie/2ping
|
d96e52c346314622afc904a2917416028c6784e3
|
swampdragon_live/models.py
|
swampdragon_live/models.py
|
# -*- coding: utf-8 -*-
from django.contrib.contenttypes.models import ContentType
from django.db.models.signals import post_save
from django.dispatch import receiver
from .tasks import push_new_content
@receiver(post_save)
def post_save_handler(sender, instance, **kwargs):
instance_type = ContentType.objects.get_for_model(instance.__class__)
push_new_content.apply_async(countdown=1, kwargs={'instance_type_pk': instance_type.pk,
'instance_pk': instance.pk})
|
# -*- coding: utf-8 -*-
from django.contrib.contenttypes.models import ContentType
from django.db.models.signals import post_save
from django.dispatch import receiver
from .tasks import push_new_content
@receiver(post_save)
def post_save_handler(sender, instance, **kwargs):
if ContentType.objects.exists():
instance_type = ContentType.objects.get_for_model(instance.__class__)
push_new_content.apply_async(countdown=1, kwargs={'instance_type_pk': instance_type.pk,
'instance_pk': instance.pk})
|
Fix initial migration until ContentType is available
|
Fix initial migration until ContentType is available
|
Python
|
mit
|
mback2k/swampdragon-live,mback2k/swampdragon-live
|
1c2b6c0daea1d04985ef6ddff35527ba207ec191
|
qual/tests/test_calendar.py
|
qual/tests/test_calendar.py
|
import unittest
from datetime import date
import qual
class TestProlepticGregorianCalendar(unittest.TestCase):
def setUp(self):
self.calendar = qual.ProlepticGregorianCalendar()
def check_valid_date(self, year, month, day):
d = self.calendar.date(year, month, day)
self.assertIsNotNone(d)
def test_leap_year_from_before_1582(self):
"""Pope Gregory introduced the calendar in 1582"""
self.check_valid_date(1200, 2, 29)
|
import unittest
from datetime import date
import qual
class TestProlepticGregorianCalendar(unittest.TestCase):
def setUp(self):
self.calendar = qual.ProlepticGregorianCalendar()
def check_valid_date(self, year, month, day):
d = self.calendar.date(year, month, day)
self.assertIsNotNone(d)
def check_invalid_date(self, year, month, day):
self.assertRaises(Exception, lambda : self.calendar(year, month, day))
def test_leap_year_from_before_1582(self):
"""Pope Gregory introduced the calendar in 1582"""
self.check_valid_date(1200, 2, 29)
def test_Julian_leap_day_is_not_a_valid_date(self):
"""This day /was/ a leap day contemporaneously,
but is not a valid date of the Gregorian calendar."""
self.check_invalid_date(1300, 2, 29)
|
Check that a certain date is invalid.
|
Check that a certain date is invalid.
This distinguishes correctly between the proleptic Gregorian calendar, and the historical or astronomical calendars, where this date would be valid.
|
Python
|
apache-2.0
|
jwg4/calexicon,jwg4/qual
|
e50655479c0d3a96edd4005f834541889839fca3
|
binary_to_text.py
|
binary_to_text.py
|
#! /usr/bin/env python
import Gen.caffe_pb2 as pb2
import google.protobuf.text_format as pb2_text
import sys
def binary_to_text(binary_file, text_file):
msg = pb2.NetParameter()
with open(binary_file) as f:
msg.ParseFromString(f.read())
with open(text_file, "w") as f:
f.write(pb2_text.MessageToString(msg))
if __name__ == "__main__":
binary_file = sys.argv[1]
text_file = sys.argv[2]
binary_to_text(binary_file, text_file)
|
#! /usr/bin/env python
import Gen.caffe_pb2 as pb2
import google.protobuf.text_format as pb2_text
import sys
class ParameterTypeException(Exception): pass
def binary_to_text(binary_file, text_file, parameter_type):
if (parameter_type == "Net"):
msg = pb2.NetParameter()
elif (parameter_type == "Solver"):
msg = pb2.SolverParameter()
else:
raise ParameterTypeException("Unexpected Parameter Type: " + parameter_type)
with open(binary_file) as f:
msg.ParseFromString(f.read())
with open(text_file, "w") as f:
f.write(pb2_text.MessageToString(msg))
if __name__ == "__main__":
binary_file = sys.argv[1]
text_file = sys.argv[2]
try:
parameter_type = sys.argv[3]
except IndexError:
parameter_type = "Net"
binary_to_text(binary_file, text_file, parameter_type)
|
Add option to process SolverParameters.
|
Add option to process SolverParameters.
|
Python
|
bsd-3-clause
|
BeautifulDestinations/dnngraph,BeautifulDestinations/dnngraph
|
86ac48a3dcb71a4e504dcf04e30a00262d168e5f
|
test/parseJaguar.py
|
test/parseJaguar.py
|
import os
from cclib.parser import Jaguar
os.chdir(os.path.join("..","data","Jaguar","basicJaguar"))
os.chdir("eg01")
for file in ["dvb_gopt.out"]:
t = Jaguar(file)
t.parse()
print t.moenergies[0,:]
print t.homos[0]
print t.moenergies[0,t.homos[0]]
|
import os
from cclib.parser import Jaguar
os.chdir(os.path.join("..","data","Jaguar","basicJaguar"))
files = [ ["eg01","dvb_gopt.out"],
["eg02","dvb_sp.out"],
["eg03","dvb_ir.out"],
["eg06","dvb_un_sp.out"] ]
for f in files:
t = Jaguar(os.path.join(f[0],f[1]))
t.parse()
if f[0]!="eg03":
print t.scfvalues
|
Test the parsing of all of the uploaded Jaguar files
|
Test the parsing of all of the uploaded Jaguar files
|
Python
|
bsd-3-clause
|
jchodera/cclib,ben-albrecht/cclib,Schamnad/cclib,cclib/cclib,berquist/cclib,jchodera/cclib,berquist/cclib,gaursagar/cclib,ben-albrecht/cclib,Clyde-fare/cclib,andersx/cclib,berquist/cclib,andersx/cclib,gaursagar/cclib,ATenderholt/cclib,ghutchis/cclib,Schamnad/cclib,cclib/cclib,langner/cclib,cclib/cclib,Clyde-fare/cclib,langner/cclib,ATenderholt/cclib,ghutchis/cclib,langner/cclib
|
651663d2af72f46e7952d2835126f1512741f635
|
UserInput.py
|
UserInput.py
|
"""Like the raw_input built-in, but with bells and whistles."""
import getpass
def user_input(field, default='', choices=None, password=False, empty_ok=False, accept=False):
"""Prompt user for input until a value is retrieved or default
is accepted. Return the input.
Arguments:
field Description of the input being prompted for.
default Default value for the input accepted with a Return-key.
password Whether the user input should not be echoed to screen.
empty_ok Whether it's okay to accept an empty input.
accept Whether to skip getting actual user input and just accept
the default value, unless prevented by the combination of
arguments "empty_ok" and "default". That is, unless "default"
is an empty string and "empty_ok" is False.
"""
result = ''
while not result:
prompt = field
if default:
prompt += ' [{:}]'.format(default)
prompt += ': '
if accept and not (not default and not empty_ok):
print(prompt)
result = '{:}'.format(default)
else:
if password:
result = getpass.getpass(prompt)
else:
result = raw_input(prompt)
result = result.strip()
if not result:
result = default
if choices and result not in choices:
print('Must be one of {:}'.format(choices))
result = ''
if empty_ok:
break
return result
|
"""Like the input built-in, but with bells and whistles."""
import getpass
# Use raw_input for Python 2.x
try:
input = raw_input
except NameError:
pass
def user_input(field, default='', choices=None, password=False, empty_ok=False, accept=False):
"""Prompt user for input until a value is retrieved or default
is accepted. Return the input.
Arguments:
field Description of the input being prompted for.
default Default value for the input accepted with a Return-key.
password Whether the user input should not be echoed to screen.
empty_ok Whether it's okay to accept an empty input.
accept Whether to skip getting actual user input and just accept
the default value, unless prevented by the combination of
arguments "empty_ok" and "default". That is, unless "default"
is an empty string and "empty_ok" is False.
"""
result = ''
while not result:
prompt = field
if default:
prompt += ' [{0}]'.format(default)
prompt += ': '
if accept and not (not default and not empty_ok):
print(prompt)
result = '{0}'.format(default)
else:
if password:
result = getpass.getpass(prompt)
else:
result = input(prompt)
result = result.strip()
if not result:
result = default
if choices and result not in choices:
print('Must be one of {0}'.format(choices))
result = ''
if empty_ok:
break
return result
|
Fix for Python 2.6 and Python 3.
|
Fix for Python 2.6 and Python 3.
|
Python
|
mit
|
vmlaker/coils
|
58fc39ae95522ce152b4ff137071f74c5490e14e
|
chatterbot/constants.py
|
chatterbot/constants.py
|
"""
ChatterBot constants
"""
'''
The maximum length of characters that the text of a statement can contain.
This should be enforced on a per-model basis by the data model for each
storage adapter.
'''
STATEMENT_TEXT_MAX_LENGTH = 400
'''
The maximum length of characters that the text label of a conversation can contain.
The number 32 was chosen because that is the length of the string representation
of a UUID4 with no hyphens.
'''
CONVERSATION_LABEL_MAX_LENGTH = 32
'''
The maximum length of text that can be stored in the persona field of the statement model.
'''
PERSONA_MAX_LENGTH = 50
# The maximum length of characters that the name of a tag can contain
TAG_NAME_MAX_LENGTH = 50
DEFAULT_DJANGO_APP_NAME = 'django_chatterbot'
|
"""
ChatterBot constants
"""
'''
The maximum length of characters that the text of a statement can contain.
The number 255 is used because that is the maximum length of a char field
in most databases. This value should be enforced on a per-model basis by
the data model for each storage adapter.
'''
STATEMENT_TEXT_MAX_LENGTH = 255
'''
The maximum length of characters that the text label of a conversation can contain.
The number 32 was chosen because that is the length of the string representation
of a UUID4 with no hyphens.
'''
CONVERSATION_LABEL_MAX_LENGTH = 32
'''
The maximum length of text that can be stored in the persona field of the statement model.
'''
PERSONA_MAX_LENGTH = 50
# The maximum length of characters that the name of a tag can contain
TAG_NAME_MAX_LENGTH = 50
DEFAULT_DJANGO_APP_NAME = 'django_chatterbot'
|
Change statement text max-length to 255
|
Change statement text max-length to 255
|
Python
|
bsd-3-clause
|
vkosuri/ChatterBot,gunthercox/ChatterBot
|
27021bfa7062219a41ad29c40b97643ecf16f72b
|
doc/mkapidoc.py
|
doc/mkapidoc.py
|
#!/usr/bin/env python
# Generates the *public* API documentation.
# Remember to hide your private parts, people!
import os, re, sys
project = 'Exscript'
base_dir = os.path.join('..', 'src', project)
doc_dir = 'api'
# Create the documentation directory.
if not os.path.exists(doc_dir):
os.makedirs(doc_dir)
# Generate the API documentation.
os.system('epydoc ' + ' '.join(['--name', project,
'--exclude Exscript.AccountManager',
'--exclude Exscript.HostAction',
'--exclude Exscript.Log',
'--exclude Exscript.Logfile',
'--exclude Exscript.QueueLogger',
'--exclude Exscript.QueueListener',
'--exclude Exscript.util.otp',
'--exclude Exscript.interpreter',
'--exclude Exscript.protocols.AbstractMethod',
'--exclude Exscript.protocols.telnetlib',
'--exclude Exscript.stdlib',
'--exclude Exscript.workqueue',
'--exclude Exscript.version',
'--html',
'--no-private',
'--no-source',
'--no-frames',
'--inheritance=included',
'-v',
'-o %s' % doc_dir,
base_dir]))
|
#!/usr/bin/env python
# Generates the *public* API documentation.
# Remember to hide your private parts, people!
import os, re, sys
project = 'Exscript'
base_dir = os.path.join('..', 'src', project)
doc_dir = 'api'
# Create the documentation directory.
if not os.path.exists(doc_dir):
os.makedirs(doc_dir)
# Generate the API documentation.
os.system('epydoc ' + ' '.join(['--name', project,
'--exclude Exscript.AbstractMethod',
'--exclude Exscript.AccountManager',
'--exclude Exscript.HostAction',
'--exclude Exscript.Log',
'--exclude Exscript.Logfile',
'--exclude Exscript.QueueLogger',
'--exclude Exscript.QueueListener',
'--exclude Exscript.util.otp',
'--exclude Exscript.interpreter',
'--exclude Exscript.protocols.AbstractMethod',
'--exclude Exscript.protocols.telnetlib',
'--exclude Exscript.stdlib',
'--exclude Exscript.workqueue',
'--exclude Exscript.version',
'--html',
'--no-private',
'--no-source',
'--no-frames',
'--inheritance=included',
'-v',
'-o %s' % doc_dir,
base_dir]))
|
Hide AbstractMethod class from the docs.
|
Hide AbstractMethod class from the docs.
|
Python
|
mit
|
knipknap/exscript,maximumG/exscript,knipknap/exscript,maximumG/exscript
|
208760340d3314f666d7e6437817cc96e0e16194
|
organizer/urls/tag.py
|
organizer/urls/tag.py
|
from django.conf.urls import url
from ..views import (
TagCreate, TagDelete, TagDetail, TagList,
TagUpdate)
urlpatterns = [
url(r'^$',
TagList.as_view(),
name='organizer_tag_list'),
url(r'^create/$',
TagCreate.as_view(),
name='organizer_tag_create'),
url(r'^(?P<slug>[\w\-]+)/$',
TagDetail.as_view(),
name='organizer_tag_detail'),
url(r'^(?P<slug>[\w-]+)/delete/$',
TagDelete.as_view(),
name='organizer_tag_delete'),
url(r'^(?P<slug>[\w\-]+)/update/$',
TagUpdate.as_view(),
name='organizer_tag_update'),
]
|
from django.conf.urls import url
from django.contrib.auth.decorators import \
login_required
from ..views import (
TagCreate, TagDelete, TagDetail, TagList,
TagUpdate)
urlpatterns = [
url(r'^$',
TagList.as_view(),
name='organizer_tag_list'),
url(r'^create/$',
login_required(
TagCreate.as_view()),
name='organizer_tag_create'),
url(r'^(?P<slug>[\w\-]+)/$',
TagDetail.as_view(),
name='organizer_tag_detail'),
url(r'^(?P<slug>[\w-]+)/delete/$',
TagDelete.as_view(),
name='organizer_tag_delete'),
url(r'^(?P<slug>[\w\-]+)/update/$',
TagUpdate.as_view(),
name='organizer_tag_update'),
]
|
Use login_required decorator in URL pattern.
|
Ch20: Use login_required decorator in URL pattern.
|
Python
|
bsd-2-clause
|
jambonrose/DjangoUnleashed-1.8,jambonrose/DjangoUnleashed-1.8
|
1f253c5bdf90055ff2d00a3b8d18c152c3b7031f
|
versions.py
|
versions.py
|
#!/usr/bin/env python
import os
import warnings
warnings.filterwarnings('ignore', category=DeprecationWarning)
def test_for_version(filename):
stdin, stdout = os.popen4('%s -V' % filename, 'r')
response = stdout.read()
if response.find('command not found') > 0:
return False
return '.'.join(response.strip().split(' ')[1].split('.')[:-1])
versions = ['python', 'python2.4', 'python2.5', 'python2.6']
valid = {}
for filename in versions:
version = test_for_version(filename)
if version and version not in valid:
valid[version] = filename
# Prefer the latest version of python
output = []
if '2.6' in valid:
output.append(valid['2.6'])
for version in valid.keys():
if valid[version] not in output:
output.append(valid[version])
print ' '.join(output)
|
#!/usr/bin/env python
import os
import warnings
warnings.filterwarnings('ignore', category=DeprecationWarning)
def test_for_version(filename):
stdin, stdout = os.popen4('%s -V' % filename, 'r')
response = stdout.read()
if response.find('command not found') > 0:
return False
return '.'.join(response.strip().split(' ')[1].split('.')[:-1])
versions = ['python', 'python2.4', 'python2.5', 'python2.6', 'python2.7']
valid = {}
for filename in versions:
version = test_for_version(filename)
if version and version not in valid:
valid[version] = filename
# Prefer 2.6 major version of python since that's my main development env
output = []
if '2.6' in valid:
output.append(valid['2.6'])
for version in valid.keys():
if valid[version] not in output:
output.append(valid[version])
print ' '.join(output)
|
Check for Python 2.7 as well
|
Check for Python 2.7 as well
|
Python
|
bsd-3-clause
|
hugoxia/pika,vrtsystems/pika,vitaly-krugl/pika,fkarb/pika-python3,Tarsbot/pika,Zephor5/pika,zixiliuyue/pika,renshawbay/pika-python3,reddec/pika,jstnlef/pika,skftn/pika,pika/pika,shinji-s/pika,knowsis/pika,benjamin9999/pika
|
539bd8a9df362f285bda375732ec71b3df1bcaae
|
orbeon_xml_api/tests/test_runner.py
|
orbeon_xml_api/tests/test_runner.py
|
from .test_common import CommonTestCase
from ..runner import Runner, RunnerForm
from ..utils import xml_from_file
class RunnerTestCase(CommonTestCase):
def setUp(self):
super(RunnerTestCase, self).setUp()
self.runner_xml = xml_from_file('tests/data', 'test_controls_runner.xml')
self.builder_xml = xml_from_file('tests/data', 'test_controls_builder.xml')
self.runner = Runner(self.runner_xml, None, self.builder_xml)
# TODO
def _test_constructor(self):
self.assertRaisesRegex(
Runner(self.runner_xml)
)
self.assertRaisesRegex(
Runner(self.runner_xml, self.builder_xml)
)
self.assertRaisesRegex(
Runner(self.runner_xml, self.builder)
)
self.assertRaisesRegex(
Runner(self.runner_xml, self.builder_xml, self.builder)
)
# Ok tests
runner = Runner(self.runner_xml, None, self.builder_xml)
self.assertIsInstance(runner, Runner)
runner = Runner(self.runner_xml, self.builder)
self.assertIsInstance(runner, Runner)
self.assertIsInstance(self.runner.form, RunnerForm)
|
from xmlunittest import XmlTestCase
from .test_common import CommonTestCase
from ..runner import Runner, RunnerForm
from ..utils import xml_from_file
class RunnerTestCase(CommonTestCase, XmlTestCase):
def setUp(self):
super(RunnerTestCase, self).setUp()
self.runner_xml = xml_from_file('tests/data', 'test_controls_runner_no-image-attachments-iteration.xml')
self.builder_xml = xml_from_file('tests/data', 'test_controls_builder_no-image-attachments-iteration.xml')
self.runner = Runner(self.runner_xml, None, self.builder_xml)
def test_constructor_validation_ok(self):
runner = Runner(self.runner_xml, None, self.builder_xml)
self.assertIsInstance(runner, Runner)
runner = Runner(self.runner_xml, self.builder)
self.assertIsInstance(runner, Runner)
self.assertIsInstance(self.runner.form, RunnerForm)
def test_constructor_validation_fails(self):
with self.assertRaisesRegexp(Exception, "Provide either the argument: builder or builder_xml."):
Runner(self.runner_xml)
with self.assertRaisesRegexp(Exception, "Constructor accepts either builder or builder_xml."):
Runner(self.runner_xml, self.builder, self.builder_xml)
|
Add Runner unit-tests: constructor with validation.
|
Add Runner unit-tests: constructor with validation.
|
Python
|
mit
|
bobslee/orbeon-xml-api
|
cde7dbd5a1bb83e85e15559120189d108f6f66aa
|
tortilla/utils.py
|
tortilla/utils.py
|
# -*- coding: utf-8 -*-
import six
from formats import FormatBank, discover_json, discover_yaml
formats = FormatBank()
discover_json(formats, content_type='application/json')
discover_yaml(formats, content_type='application/x-yaml')
def run_from_ipython():
return getattr(__builtins__, "__IPYTHON__", False)
class Bunch(dict):
def __init__(self, kwargs=None):
if kwargs is None:
kwargs = {}
for key, value in six.iteritems(kwargs):
kwargs[key] = bunchify(value)
super().__init__(kwargs)
self.__dict__ = self
def bunchify(obj):
if isinstance(obj, (list, tuple)):
return [bunchify(item) for item in obj]
if isinstance(obj, dict):
return Bunch(obj)
return obj
|
# -*- coding: utf-8 -*-
import six
from formats import FormatBank, discover_json, discover_yaml
formats = FormatBank()
discover_json(formats, content_type='application/json')
discover_yaml(formats, content_type='application/x-yaml')
def run_from_ipython():
return getattr(__builtins__, "__IPYTHON__", False)
class Bunch(dict):
def __init__(self, kwargs=None):
if kwargs is None:
kwargs = {}
for key, value in six.iteritems(kwargs):
kwargs[key] = bunchify(value)
super(Bunch, self).__init__(kwargs)
self.__dict__ = self
def bunchify(obj):
if isinstance(obj, (list, tuple)):
return [bunchify(item) for item in obj]
if isinstance(obj, dict):
return Bunch(obj)
return obj
|
Fix super() call for python <= 3.2
|
Fix super() call for python <= 3.2
|
Python
|
mit
|
redodo/tortilla
|
0de0818e5a0c52dde8c841d8e8254e2f4a3f9633
|
app/sense.py
|
app/sense.py
|
#!/usr/bin/env python3
from Sensor import SenseController
from KeyDispatcher import KeyDispatcher
from Display import Display
from DataLogger import SQLiteLogger
import time
DEVICE = "PiSense"
DELAY = 0.0
class Handler:
def __init__(self, display, logger, sensor):
self.display = display
self.logger = logger
self.sensor = sensor
self.logger.log(DEVICE, "running", 1)
def read(self):
values = {}
for reading in self.sensor.get_data():
values[reading[1]] = reading[2]
self.logger.log(DEVICE, reading[1], reading[2], reading[0])
display.show_properties(values, self.sensor.get_properties())
return True
def quit(self):
self.logger.log(DEVICE, "running", 0)
return False
with SenseController() as sensor, KeyDispatcher() as dispatcher, SQLiteLogger() as logger:
# setup display
display = Display("PiSense")
# setup key handlers
handler = Handler(display, logger, sensor)
dispatcher.add("q", handler, "quit")
# start processing key presses
while True:
if dispatcher.can_process_key():
if not dispatcher.process_key():
break
else:
handler.read()
time.sleep(DELAY)
|
#!/usr/bin/env python3
from Sensor import SenseController
from KeyDispatcher import KeyDispatcher
from Display import Display
from DataLogger import SQLiteLogger
import time
DEVICE = "PiSense"
DELAY = 0.25
class Handler:
def __init__(self, display, logger, sensor):
self.display = display
self.logger = logger
self.sensor = sensor
self.recording = False
self.logger.log(DEVICE, "running", 1)
def read(self):
values = {}
if self.recording:
for reading in self.sensor.get_data():
values[reading[1]] = reading[2]
self.logger.log(DEVICE, reading[1], reading[2], reading[0])
display.show_properties(values, self.sensor.get_properties())
else:
values["recording"] = False
display.show_properties(values)
return True
def record(self):
self.recording = not self.recording
if self.recording:
self.logger.log(DEVICE, "recording", 1)
else:
self.logger.log(DEVICE, "recording", 0)
return True
def quit(self):
self.logger.log(DEVICE, "running", 0)
return False
with SenseController() as sensor, KeyDispatcher() as dispatcher, SQLiteLogger() as logger:
# setup display
display = Display("PiSense", "[r]ecord [q]uit")
# setup key handlers
handler = Handler(display, logger, sensor)
dispatcher.add("r", handler, "record")
dispatcher.add("q", handler, "quit")
# start processing key presses
while True:
if dispatcher.can_process_key():
if not dispatcher.process_key():
break
else:
handler.read()
time.sleep(DELAY)
|
Allow PiSense readings to be toggled on/off
|
Allow PiSense readings to be toggled on/off
|
Python
|
mit
|
gizmo-cda/g2x,thelonious/g2x,thelonious/g2x,gizmo-cda/g2x,gizmo-cda/g2x,gizmo-cda/g2x
|
990f4f3ec850525ac4fcb78b33031b60dbe25ce4
|
versebot/verse.py
|
versebot/verse.py
|
"""
VerseBot for reddit
By Matthieu Grieger
verse.py
Copyright (c) 2015 Matthieu Grieger (MIT License)
"""
class Verse:
""" Class that holds the properties and methods of a Verse object. """
def __init__(self, book, chapter, verse, translation):
""" Initializes a Verse object with book, chapter, verse (if
exists), and translation (if exists). """
self.book = book
self.chapter = chapter
self.verse = verse
self.translation = translation
|
"""
VerseBot for reddit
By Matthieu Grieger
verse.py
Copyright (c) 2015 Matthieu Grieger (MIT License)
"""
class Verse:
""" Class that holds the properties and methods of a Verse object. """
def __init__(self, book, chapter, verse, translation):
""" Initializes a Verse object with book, chapter, verse (if
exists), and translation (if exists). """
self.book = book.lower().replace(" ", "")
self.chapter = int(chapter.replace(" ", ""))
self.verse = verse.replace(" ", "")
self.translation = translation.replace(" ", "")
|
Remove spaces and set to lowercase
|
Remove spaces and set to lowercase
|
Python
|
mit
|
Matthew-Arnold/slack-versebot,Matthew-Arnold/slack-versebot
|
fd87d09b03be003dcd13d778c175f796c4fdf7d6
|
test_http2_server.py
|
test_http2_server.py
|
from echo_client import client
def test_ok():
response = client('GET a_web_page.html HTTP/1.1').split('\r\n')
first_line = response[0]
assert first_line == 'HTTP/1.1 200 OK'
def test_body():
response = client('GET sample.txt HTTP/1.1').split('\r\n')
body = response[4]
assert 'This is a very simple text file.' in body
def test_directory():
response = client('GET / HTTP/1.1').split('\r\n')
body = response[4]
assert 'make_time.py' in body
def test_404():
response = client('GET does/not/exist.html HTTP/1.1').split('\r\n')
first_line = response[0]
assert first_line == 'HTTP/1.1 404 Not Found'
|
from echo_client import client
def test_ok():
response = client('GET a_web_page.html HTTP/1.1').split('\r\n')
first_line = response[0]
assert first_line == 'HTTP/1.1 200 OK'
def test_body():
response = client('GET sample.txt HTTP/1.1').split('\r\n')
body = response[4]
assert 'This is a very simple text file.' in body
def test_directory():
response = client('GET / HTTP/1.1').split('\r\n')
body = response[4]
assert "<a href='make_time.py'>make_time.py</a>" in body
def test_404():
response = client('GET does/not/exist.html HTTP/1.1').split('\r\n')
first_line = response[0]
assert first_line == 'HTTP/1.1 404 Not Found'
|
Change directory test to look for link, rather than just file name
|
Change directory test to look for link, rather than just file name
|
Python
|
mit
|
jwarren116/network-tools,jwarren116/network-tools
|
666d9c999ebf0cc388d8f045a04756424c2d9b62
|
gdemo/util.py
|
gdemo/util.py
|
"""Share utility functions."""
from urllib import parse
def get_route_value(environ, name):
value = environ['wsgiorg.routing_args'][1][name]
value = parse.unquote(value)
return value.replace('%2F', '/')
|
"""Share utility functions."""
try:
from urllib import parse
except ImportError:
import urllib as parse
def get_route_value(environ, name):
value = environ['wsgiorg.routing_args'][1][name]
value = parse.unquote(value)
return value.replace('%2F', '/')
|
Make it work for Python 2
|
Make it work for Python 2
Gabbi is designed to work with both Python 2.7 and 3.4.
|
Python
|
apache-2.0
|
cdent/gabbi-demo,cdent/gabbi-demo
|
f60363b3d24d2f4af5ddb894cc1f6494b371b18e
|
mass_mailing_switzerland/wizards/mailchimp_export_update_wizard.py
|
mass_mailing_switzerland/wizards/mailchimp_export_update_wizard.py
|
##############################################################################
#
# Copyright (C) 2020 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import api, models, fields, _
from odoo.exceptions import UserError
class ExportMailchimpWizard(models.TransientModel):
_inherit = "partner.export.mailchimp"
@api.multi
def get_mailing_contact_id(self, partner_id, force_create=False):
# Avoid exporting opt_out partner
if force_create:
partner = self.env["res.partner"].browse(partner_id)
if partner.opt_out:
return False
# Push the partner_id in mailing_contact creation
return super(
ExportMailchimpWizard, self.with_context(default_partner_id=partner_id)
).get_mailing_contact_id(partner_id, force_create)
|
##############################################################################
#
# Copyright (C) 2020 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import api, models, fields, _
from odoo.exceptions import UserError
class ExportMailchimpWizard(models.TransientModel):
_inherit = "partner.export.mailchimp"
@api.multi
def get_mailing_contact_id(self, partner_id, force_create=False):
# Avoid exporting opt_out partner
if force_create and partner_id.opt_out:
return False
# Push the partner_id in mailing_contact creation
return super(
ExportMailchimpWizard, self.with_context(default_partner_id=partner_id)
).get_mailing_contact_id(partner_id, force_create)
|
FIX opt_out prevention for mailchimp export
|
FIX opt_out prevention for mailchimp export
|
Python
|
agpl-3.0
|
CompassionCH/compassion-switzerland,eicher31/compassion-switzerland,CompassionCH/compassion-switzerland,CompassionCH/compassion-switzerland,eicher31/compassion-switzerland,eicher31/compassion-switzerland
|
3ef1531f6934055a416cdddc694f6ca75694d649
|
voltron/common.py
|
voltron/common.py
|
import logging
import logging.config
LOG_CONFIG = {
'version': 1,
'formatters': {
'standard': {'format': 'voltron: [%(levelname)s] %(message)s'}
},
'handlers': {
'default': {
'class': 'logging.StreamHandler',
'formatter': 'standard'
}
},
'loggers': {
'voltron': {
'handlers': ['default'],
'level': 'INFO',
'propogate': True,
}
}
}
VOLTRON_DIR = '~/.voltron/'
VOLTRON_CONFIG = VOLTRON_DIR + 'config'
def configure_logging():
logging.config.dictConfig(LOG_CONFIG)
log = logging.getLogger('voltron')
return log
|
import logging
import logging.config
LOG_CONFIG = {
'version': 1,
'formatters': {
'standard': {'format': 'voltron: [%(levelname)s] %(message)s'}
},
'handlers': {
'default': {
'class': 'logging.StreamHandler',
'formatter': 'standard'
}
},
'loggers': {
'voltron': {
'handlers': ['default'],
'level': 'INFO',
'propogate': True,
}
}
}
VOLTRON_DIR = os.path.expanduser('~/.voltron/')
VOLTRON_CONFIG = VOLTRON_DIR + 'config'
def configure_logging():
logging.config.dictConfig(LOG_CONFIG)
log = logging.getLogger('voltron')
return log
|
Make use of expanduser() more sane
|
Make use of expanduser() more sane
|
Python
|
mit
|
snare/voltron,snare/voltron,snare/voltron,snare/voltron
|
22207247c286ad3c656c3f6b550d869cf92f6e92
|
fs/sshfs/__init__.py
|
fs/sshfs/__init__.py
|
from __future__ import absolute_import
from __future__ import unicode_literals
from .sshfs import SSHFS
|
from __future__ import absolute_import
from __future__ import unicode_literals
from .sshfs import SSHFS
from ..opener import Opener, registry
@registry.install
class SSHOpener(Opener):
protocols = ['ssh']
def open_fs(self, fs_url, parse_result, writeable, create, cwd):
#from .sshfs import SSHFS
ssh_host, _, dir_path = parse_result.resource.partition('/')
ssh_host, _, ftp_port = ssh_host.partition(':')
ssh_port = int(ftp_port) if ftp_port.isdigit() else 22
ssh_fs = SSHFS(
ssh_host,
port=ssh_port,
user=parse_result.username,
passwd=parse_result.password,
)
return ssh_fs.opendir(dir_path) if dir_path else ssh_fs
|
Add fs Opener based on the builtin FTPFS opener
|
Add fs Opener based on the builtin FTPFS opener
|
Python
|
lgpl-2.1
|
althonos/fs.sshfs
|
d0bf235af3742a17c722488fe3679d5b73a0d945
|
thinc/neural/_classes/softmax.py
|
thinc/neural/_classes/softmax.py
|
from .affine import Affine
from ... import describe
from ...describe import Dimension, Synapses, Biases
from ...check import has_shape
from ... import check
@describe.attributes(
W=Synapses("Weights matrix",
lambda obj: (obj.nO, obj.nI),
lambda W, ops: None)
)
class Softmax(Affine):
name = 'softmax'
@check.arg(1, has_shape(('nB', 'nI')))
def predict(self, input__BI):
output__BO = self.ops.affine(self.W, self.b, input__BI)
output__BO = self.ops.softmax(output__BO, inplace=False)
return output__BO
@check.arg(1, has_shape(('nB', 'nI')))
def begin_update(self, input__BI, drop=0.):
output__BO = self.predict(input__BI)
@check.arg(0, has_shape(('nB', 'nO')))
def finish_update(grad__BO, sgd=None):
self.d_W += self.ops.batch_outer(grad__BO, input__BI)
self.d_b += grad__BO.sum(axis=0)
grad__BI = self.ops.dot(grad__BO, self.W)
if sgd is not None:
sgd(self._mem.weights, self._mem.gradient, key=self.id)
return grad__BI
return output__BO, finish_update
|
from .affine import Affine
from ... import describe
from ...describe import Dimension, Synapses, Biases
from ...check import has_shape
from ... import check
@describe.attributes(
W=Synapses("Weights matrix",
lambda obj: (obj.nO, obj.nI),
lambda W, ops: None)
)
class Softmax(Affine):
name = 'softmax'
@check.arg(1, has_shape(('nB', 'nI')))
def predict(self, input__BI):
output__BO = self.ops.affine(self.W, self.b, input__BI)
output__BO = self.ops.softmax(output__BO, inplace=False)
return output__BO
@check.arg(1, has_shape(('nB', 'nI')))
def begin_update(self, input__BI, drop=0.):
output__BO = self.predict(input__BI)
@check.arg(0, has_shape(('nB', 'nO')))
def finish_update(grad__BO, sgd=None):
self.d_W += self.ops.gemm(grad__BO, input__BI, trans1=True)
self.d_b += grad__BO.sum(axis=0)
grad__BI = self.ops.gemm(grad__BO, self.W)
if sgd is not None:
sgd(self._mem.weights, self._mem.gradient, key=self.id)
return grad__BI
return output__BO, finish_update
|
Fix gemm calls in Softmax
|
Fix gemm calls in Softmax
|
Python
|
mit
|
spacy-io/thinc,explosion/thinc,explosion/thinc,spacy-io/thinc,explosion/thinc,spacy-io/thinc,explosion/thinc
|
246971d8dd7d6c5fdc480c55e4e79ffd7a840b9b
|
Cura/View/View.py
|
Cura/View/View.py
|
#Abstract for all views
class View(object):
def __init__(self):
self._renderer = None
|
#Abstract for all views
class View(object):
def __init__(self):
self._renderer = None
def render(self, glcontext):
pass
|
Add a render method to view that should be reimplemented
|
Add a render method to view that should be reimplemented
|
Python
|
agpl-3.0
|
onitake/Uranium,onitake/Uranium
|
ca674b743b6d48593f45d999335ae893cf2a90d6
|
base/config/production.py
|
base/config/production.py
|
" Production settings must be here. "
from .core import *
from os import path as op
SECRET_KEY = 'SecretKeyForSessionSigning'
ADMINS = frozenset([MAIL_USERNAME])
# flask.ext.collect
# -----------------
COLLECT_STATIC_ROOT = op.join(op.dirname(ROOTDIR), 'static')
# auth.oauth
# ----------
OAUTH_TWITTER = dict(
# flask-base-template app
consumer_key='ydcXz2pWyePfc3MX3nxJw',
consumer_secret='Pt1t2PjzKu8vsX5ixbFKu5gNEAekYrbpJrlsQMIwquc'
)
# dealer
DEALER_PARAMS = dict(
backends=('git', 'mercurial', 'simple', 'null')
)
# pymode:lint_ignore=W0614,W404
|
" Production settings must be here. "
from .core import *
from os import path as op
SECRET_KEY = 'SecretKeyForSessionSigning'
ADMINS = frozenset([MAIL_USERNAME])
# flask.ext.collect
# -----------------
COLLECT_STATIC_ROOT = op.join(op.dirname(ROOTDIR), 'static')
# auth.oauth
# ----------
OAUTH_TWITTER = dict(
consumer_key='750sRyKzvdGPJjPd96yfgw',
consumer_secret='UGcyjDCUOb1q44w1nUk8FA7aXxvwwj1BCbiFvYYI',
)
OAUTH_FACEBOOK = dict(
consumer_key='413457268707622',
consumer_secret='48e9be9f4e8abccd3fb916a3f646dd3f',
)
OAUTH_GITHUB = dict(
consumer_key='8bdb217c5df1c20fe632',
consumer_secret='a3aa972b2e66e3fac488b4544d55eda2aa2768b6',
)
# dealer
DEALER_PARAMS = dict(
backends=('git', 'mercurial', 'simple', 'null')
)
# pymode:lint_ignore=W0614,W404
|
Add github and facebook oauth credentials.
|
Add github and facebook oauth credentials.
|
Python
|
bsd-3-clause
|
klen/Flask-Foundation,klen/fquest,klen/tweetchi
|
6722e16aef43f9cfe03e7e76fc578582139721f6
|
vint/linting/env.py
|
vint/linting/env.py
|
import os
import os.path
import re
import logging
from pathlib import Path
VIM_SCRIPT_FILE_NAME_PATTERNS = r'(?:[\._]g?vimrc|.*\.vim$)'
def build_environment(cmdargs):
return {
'cmdargs': cmdargs,
'home_path': _get_home_path(cmdargs),
'cwd': _get_cwd(cmdargs),
'file_paths': _get_file_paths(cmdargs)
}
def _get_cwd(cmdargs):
return Path(os.getcwd())
def _get_home_path(cmdargs):
return Path(os.path.expanduser('~'))
def _get_file_paths(cmdargs):
if 'files' not in cmdargs:
return []
found_files = _collect_files([Path(path) for path in cmdargs['files']])
return found_files
def _collect_files(paths):
result = set()
for path in paths:
if path.is_dir():
dir_path = path
result |= _collect_files(tuple(dir_path.iterdir()))
elif _is_vim_script(path):
file_path = path
result.add(file_path)
else:
logging.debug('ignore not Vim script file: `{file_path}`'.format(
file_path=str(path)))
return result
def _is_vim_script(path):
file_name = path.name
return bool(re.search(VIM_SCRIPT_FILE_NAME_PATTERNS, file_name))
|
import os
import os.path
from pathlib import Path
from vint.linting.file_filter import find_vim_script
def build_environment(cmdargs):
return {
'cmdargs': cmdargs,
'home_path': _get_home_path(cmdargs),
'cwd': _get_cwd(cmdargs),
'file_paths': _get_file_paths(cmdargs)
}
def _get_cwd(cmdargs):
return Path(os.getcwd())
def _get_home_path(cmdargs):
return Path(os.path.expanduser('~'))
def _get_file_paths(cmdargs):
if 'files' not in cmdargs:
return []
found_file_paths = find_vim_script(map(Path, cmdargs['files']))
return set(found_file_paths)
|
Split file collecting algorithm to FileFilter
|
Split file collecting algorithm to FileFilter
|
Python
|
mit
|
Kuniwak/vint,RianFuro/vint,RianFuro/vint,Kuniwak/vint
|
ba8e567592c96dacb697e067004dc71799e4e93f
|
ctypeslib/test/stdio.py
|
ctypeslib/test/stdio.py
|
import os
from ctypeslib.dynamic_module import include
from ctypes import *
if os.name == "nt":
_libc = CDLL("msvcrt")
else:
_libc = CDLL(None)
include("""\
#include <stdio.h>
#ifdef _MSC_VER
# include <fcntl.h>
#else
# include <sys/fcntl.h>
#endif
""",
persist=False)
|
import os
from ctypeslib.dynamic_module import include
from ctypes import *
if os.name == "nt":
_libc = CDLL("msvcrt")
else:
_libc = CDLL(None)
_gen_basename = include("""\
#include <stdio.h>
#ifdef _MSC_VER
# include <fcntl.h>
#else
# include <sys/fcntl.h>
#endif
/* Silly comment */
""",
persist=False)
|
Store the basename of the generated files, to allow the unittests to clean up in the tearDown method.
|
Store the basename of the generated files, to allow the unittests to
clean up in the tearDown method.
|
Python
|
mit
|
sugarmanz/ctypeslib
|
1ff4dab34d4aa6935d4d1b54aa354882790b9b44
|
astroquery/astrometry_net/__init__.py
|
astroquery/astrometry_net/__init__.py
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
<Put Your Tool Name Here>
-------------------------
:author: <your name> (<your email>)
"""
# Make the URL of the server, timeout and other items configurable
# See <http://docs.astropy.org/en/latest/config/index.html#developer-usage>
# for docs and examples on how to do this
# Below is a common use case
from astropy import config as _config
class Conf(_config.ConfigNamespace):
""" Configuration parameters for `astroquery.astrometry_net` """
api_key = _config.ConfigItem(
'',
"The Astrometry.net API key."
)
conf = Conf()
# Now import your public class
# Should probably have the same name as your module
from .core import AstrometryNet, AstrometryNetClass
__all__ = ['AstrometryNet', 'AstrometryNetClass']
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
<Put Your Tool Name Here>
-------------------------
:author: <your name> (<your email>)
"""
# Make the URL of the server, timeout and other items configurable
# See <http://docs.astropy.org/en/latest/config/index.html#developer-usage>
# for docs and examples on how to do this
# Below is a common use case
from astropy import config as _config
class Conf(_config.ConfigNamespace):
""" Configuration parameters for `astroquery.astrometry_net` """
api_key = _config.ConfigItem(
'',
"The Astrometry.net API key."
)
server = _config.ConfigItem('http://nova.astrometry.net', 'Name of server')
timeout = _config.ConfigItem(60,
'Default timeout for connecting to server')
conf = Conf()
# Now import your public class
# Should probably have the same name as your module
from .core import AstrometryNet, AstrometryNetClass
__all__ = ['AstrometryNet', 'AstrometryNetClass']
|
Add config items for server, timeout
|
Add config items for server, timeout
|
Python
|
bsd-3-clause
|
imbasimba/astroquery,ceb8/astroquery,ceb8/astroquery,imbasimba/astroquery
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.