commit
stringlengths
40
40
old_file
stringlengths
4
236
new_file
stringlengths
4
236
old_contents
stringlengths
1
3.26k
new_contents
stringlengths
16
4.43k
subject
stringlengths
16
624
message
stringlengths
17
3.29k
lang
stringclasses
5 values
license
stringclasses
13 values
repos
stringlengths
5
91.5k
2e5ec8483930ad328b0a212ccc4b746f73b18c4c
pinax/ratings/tests/tests.py
pinax/ratings/tests/tests.py
from django.test import TestCase from django.contrib.auth.models import User from pinax.ratings.models import Rating from .models import Car class Tests(TestCase): def setUp(self): self.paltman = User.objects.create(username="paltman") self.jtauber = User.objects.create(username="jtauber") self.bronco = Car.objects.create(name="Ford Bronco") self.blazer = Car.objects.create(name="Cheverolet Blazer") self.expedition = Car.objects.create(name="Ford Expedition") def test_rating(self): overall = Rating.update(self.bronco, self.paltman, rating=5) self.assertEquals(overall, 5) overall = Rating.update(self.bronco, self.jtauber, rating=2) self.assertEquals(overall, 3.5)
from decimal import Decimal from django.test import TestCase from django.contrib.auth.models import User from pinax.ratings.models import Rating from .models import Car class Tests(TestCase): def setUp(self): self.paltman = User.objects.create(username="paltman") self.jtauber = User.objects.create(username="jtauber") self.bronco = Car.objects.create(name="Ford Bronco") self.blazer = Car.objects.create(name="Cheverolet Blazer") self.expedition = Car.objects.create(name="Ford Expedition") def test_rating(self): overall = Rating.update(self.bronco, self.paltman, rating=5) self.assertEquals(overall, Decimal("5")) overall = Rating.update(self.bronco, self.jtauber, rating=2) self.assertEquals(overall, Decimal("3.5"))
Use explicit Decimal in test
Use explicit Decimal in test
Python
mit
rizumu/pinax-ratings,pinax/pinax-ratings,arthur-wsw/pinax-ratings,arthur-wsw/pinax-ratings,pinax/pinax-ratings,arthur-wsw/pinax-ratings,pinax/pinax-ratings,rizumu/pinax-ratings,rizumu/pinax-ratings
41a0fa6412427dadfb33c77da45bc88c576fa67c
rdo/drivers/base.py
rdo/drivers/base.py
from subprocess import call class BaseDriver(object): def __init__(self, config): self.config = config def do(self, cmd): cmd = self.command(cmd) call(cmd) def command(self): raise NotImplementedError()
from subprocess import call class BaseDriver(object): def __init__(self, config): self.config = config def working_dir(self, cmd): command = ' '.join(cmd) working_dir = self.config.get('directory') if working_dir: command = 'cd %s && %s' % (working_dir, command) return command def do(self, cmd): cmd = self.command(cmd) call(cmd) def command(self): raise NotImplementedError()
Add a common function for deriving the working dir.
Add a common function for deriving the working dir.
Python
bsd-3-clause
ionrock/rdo
3940fd8b58b6a21627ef0ff62f7480593e5108eb
remedy/radremedy.py
remedy/radremedy.py
#!/usr/bin/env python """ radremedy.py Main web application file. Contains initial setup of database, API, and other components. Also contains the setup of the routes. """ from flask import Flask, url_for, request, abort from flask.ext.script import Manager from flask.ext.migrate import Migrate, MigrateCommand from rad.models import db, Resource def create_app(config, models=()): from remedyblueprint import remedy, url_for_other_page app = Flask(__name__) app.config.from_object(config) app.register_blueprint(remedy) # searching configurations app.jinja_env.trim_blocks = True # Register the paging helper method with Jinja2 app.jinja_env.globals['url_for_other_page'] = url_for_other_page db.init_app(app) Migrate(app, db, directory=app.config['MIGRATIONS_DIR']) manager = Manager(app) manager.add_command('db', MigrateCommand) # turning API off for now # from api_manager import init_api_manager # api_manager = init_api_manager(app, db) # map(lambda m: api_manager.create_api(m), models) return app, manager if __name__ == '__main__': app, manager = create_app('config.BaseConfig', (Resource, )) with app.app_context(): manager.run()
#!/usr/bin/env python """ radremedy.py Main web application file. Contains initial setup of database, API, and other components. Also contains the setup of the routes. """ from flask import Flask, url_for, request, abort from flask.ext.script import Manager from flask.ext.migrate import Migrate, MigrateCommand from flask.ext.login import current_user from rad.models import db, Resource def create_app(config, models=()): app = Flask(__name__) app.config.from_object(config) from remedyblueprint import remedy, url_for_other_page app.register_blueprint(remedy) from auth.user_auth import auth, login_manager app.register_blueprint(auth) login_manager.init_app(app) # searching configurations app.jinja_env.trim_blocks = True # Register the paging helper method with Jinja2 app.jinja_env.globals['url_for_other_page'] = url_for_other_page app.jinja_env.globals['logged_in'] = lambda : not current_user.is_anonymous() db.init_app(app) Migrate(app, db, directory=app.config['MIGRATIONS_DIR']) manager = Manager(app) manager.add_command('db', MigrateCommand) # turning API off for now # from api_manager import init_api_manager # api_manager = init_api_manager(app, db) # map(lambda m: api_manager.create_api(m), models) return app, manager if __name__ == '__main__': application, manager = create_app('config.BaseConfig', (Resource, )) with application.app_context(): manager.run()
Move around imports and not shadow app
Move around imports and not shadow app
Python
mpl-2.0
radremedy/radremedy,radioprotector/radremedy,radioprotector/radremedy,AllieDeford/radremedy,radremedy/radremedy,radremedy/radremedy,radioprotector/radremedy,radremedy/radremedy,AllieDeford/radremedy,AllieDeford/radremedy,radioprotector/radremedy
e985163d189883a2419e34021971709c9c7498c0
request/__init__.py
request/__init__.py
__version__ = 0.23 __copyright__ = 'Copyright (c) 2009, Kyle Fuller' __licence__ = 'BSD' __author__ = 'Kyle Fuller <inbox@kylefuller.co.uk>, krisje8 <krisje8@gmail.com>' __URL__ = 'http://kylefuller.co.uk/project/django-request/'
__version__ = 0.23 __copyright__ = 'Copyright (c) 2009, Kyle Fuller' __licence__ = 'BSD' __author__ = 'Kyle Fuller <inbox@kylefuller.co.uk>, Jannis Leidel (jezdez), krisje8 <krisje8@gmail.com>' __URL__ = 'http://kylefuller.co.uk/project/django-request/'
Add jezdez to the authors
Add jezdez to the authors
Python
bsd-2-clause
gnublade/django-request,kylef/django-request,kylef/django-request,kylef/django-request,gnublade/django-request,gnublade/django-request
5881436bea688ee49175192452dec18fad4ba9b2
airflow/executors/__init__.py
airflow/executors/__init__.py
import logging from airflow import configuration from airflow.executors.base_executor import BaseExecutor from airflow.executors.local_executor import LocalExecutor from airflow.executors.sequential_executor import SequentialExecutor # TODO Fix this emergency fix try: from airflow.executors.celery_executor import CeleryExecutor except: pass try: from airflow.contrib.executors.mesos_executor import MesosExecutor except: pass from airflow.utils import AirflowException _EXECUTOR = configuration.get('core', 'EXECUTOR') if _EXECUTOR == 'LocalExecutor': DEFAULT_EXECUTOR = LocalExecutor() elif _EXECUTOR == 'CeleryExecutor': DEFAULT_EXECUTOR = CeleryExecutor() elif _EXECUTOR == 'SequentialExecutor': DEFAULT_EXECUTOR = SequentialExecutor() elif _EXECUTOR == 'MesosExecutor': DEFAULT_EXECUTOR = MesosExecutor() else: # Loading plugins from airflow.plugins_manager import executors as _executors for _executor in _executors: globals()[_executor.__name__] = _executor if _EXECUTOR in globals(): DEFAULT_EXECUTOR = globals()[_EXECUTOR]() else: raise AirflowException("Executor {0} not supported.".format(_EXECUTOR)) logging.info("Using executor " + _EXECUTOR)
import logging from airflow import configuration from airflow.executors.base_executor import BaseExecutor from airflow.executors.local_executor import LocalExecutor from airflow.executors.sequential_executor import SequentialExecutor from airflow.utils import AirflowException _EXECUTOR = configuration.get('core', 'EXECUTOR') if _EXECUTOR == 'LocalExecutor': DEFAULT_EXECUTOR = LocalExecutor() elif _EXECUTOR == 'CeleryExecutor': from airflow.executors.celery_executor import CeleryExecutor DEFAULT_EXECUTOR = CeleryExecutor() elif _EXECUTOR == 'SequentialExecutor': DEFAULT_EXECUTOR = SequentialExecutor() elif _EXECUTOR == 'MesosExecutor': from airflow.contrib.executors.mesos_executor import MesosExecutor DEFAULT_EXECUTOR = MesosExecutor() else: # Loading plugins from airflow.plugins_manager import executors as _executors for _executor in _executors: globals()[_executor.__name__] = _executor if _EXECUTOR in globals(): DEFAULT_EXECUTOR = globals()[_EXECUTOR]() else: raise AirflowException("Executor {0} not supported.".format(_EXECUTOR)) logging.info("Using executor " + _EXECUTOR)
Remove hack by only importing when configured
Remove hack by only importing when configured
Python
apache-2.0
asnir/airflow,DEVELByte/incubator-airflow,yati-sagade/incubator-airflow,OpringaoDoTurno/airflow,yk5/incubator-airflow,spektom/incubator-airflow,owlabs/incubator-airflow,preete-dixit-ck/incubator-airflow,malmiron/incubator-airflow,alexvanboxel/airflow,wndhydrnt/airflow,bolkedebruin/airflow,dhuang/incubator-airflow,ledsusop/airflow,mylons/incubator-airflow,easytaxibr/airflow,lxneng/incubator-airflow,hgrif/incubator-airflow,modsy/incubator-airflow,vineet-rh/incubator-airflow,sergiohgz/incubator-airflow,andyxhadji/incubator-airflow,wooga/airflow,ProstoMaxim/incubator-airflow,artwr/airflow,jesusfcr/airflow,yoziru-desu/airflow,adrpar/incubator-airflow,Acehaidrey/incubator-airflow,adrpar/incubator-airflow,hgrif/incubator-airflow,sdiazb/airflow,forevernull/incubator-airflow,skudriashev/incubator-airflow,mtustin-handy/airflow,moritzpein/airflow,bolkedebruin/airflow,stverhae/incubator-airflow,dgies/incubator-airflow,apache/incubator-airflow,CloverHealth/airflow,mtdewulf/incubator-airflow,vineet-rh/incubator-airflow,brandsoulmates/incubator-airflow,andrewmchen/incubator-airflow,nathanielvarona/airflow,mrares/incubator-airflow,dud225/incubator-airflow,rishibarve/incubator-airflow,btallman/incubator-airflow,kerzhner/airflow,vijaysbhat/incubator-airflow,juvoinc/airflow,Twistbioscience/incubator-airflow,sdiazb/airflow,mistercrunch/airflow,malmiron/incubator-airflow,ledsusop/airflow,ronfung/incubator-airflow,d-lee/airflow,danielvdende/incubator-airflow,mtustin-handy/airflow,mylons/incubator-airflow,caseyching/incubator-airflow,asnir/airflow,DEVELByte/incubator-airflow,jfantom/incubator-airflow,moritzpein/airflow,holygits/incubator-airflow,skudriashev/incubator-airflow,asnir/airflow,lyft/incubator-airflow,ronfung/incubator-airflow,spektom/incubator-airflow,opensignal/airflow,bolkedebruin/airflow,owlabs/incubator-airflow,jwi078/incubator-airflow,jiwang576/incubator-airflow,andrewmchen/incubator-airflow,ProstoMaxim/incubator-airflow,jlowin/airflow,sid88in/incubator-airflow,OpringaoDoTurno/airflow,modsy/incubator-airflow,modsy/incubator-airflow,NielsZeilemaker/incubator-airflow,yk5/incubator-airflow,RealImpactAnalytics/airflow,wolfier/incubator-airflow,dgies/incubator-airflow,hamedhsn/incubator-airflow,N3da/incubator-airflow,janczak10/incubator-airflow,Chedi/airflow,apache/airflow,DinoCow/airflow,andyxhadji/incubator-airflow,apache/airflow,easytaxibr/airflow,mistercrunch/airflow,hamedhsn/incubator-airflow,danielvdende/incubator-airflow,hamedhsn/incubator-airflow,ronfung/incubator-airflow,wndhydrnt/airflow,gilt/incubator-airflow,Acehaidrey/incubator-airflow,plypaul/airflow,RealImpactAnalytics/airflow,yk5/incubator-airflow,yiqingj/airflow,janczak10/incubator-airflow,ProstoMaxim/incubator-airflow,mrkm4ntr/incubator-airflow,nathanielvarona/airflow,yoziru-desu/airflow,griffinqiu/airflow,dud225/incubator-airflow,sekikn/incubator-airflow,gtoonstra/airflow,alexvanboxel/airflow,jhsenjaliya/incubator-airflow,nathanielvarona/airflow,fenglu-g/incubator-airflow,DinoCow/airflow,Acehaidrey/incubator-airflow,N3da/incubator-airflow,danielvdende/incubator-airflow,Fokko/incubator-airflow,neovintage/airflow,Acehaidrey/incubator-airflow,cjqian/incubator-airflow,mylons/incubator-airflow,cfei18/incubator-airflow,ty707/airflow,ty707/airflow,mattuuh7/incubator-airflow,d-lee/airflow,Twistbioscience/incubator-airflow,wileeam/airflow,janczak10/incubator-airflow,N3da/incubator-airflow,NielsZeilemaker/incubator-airflow,yoziru-desu/airflow,apache/incubator-airflow,jwi078/incubator-airflow,vineet-rh/incubator-airflow,airbnb/airflow,danielvdende/incubator-airflow,opensignal/airflow,cjqian/incubator-airflow,fenglu-g/incubator-airflow,sergiohgz/incubator-airflow,wndhydrnt/airflow,mistercrunch/airflow,jgao54/airflow,gritlogic/incubator-airflow,fenglu-g/incubator-airflow,dgies/incubator-airflow,r39132/airflow,btallman/incubator-airflow,mattuuh7/incubator-airflow,dmitry-r/incubator-airflow,kerzhner/airflow,holygits/incubator-airflow,hamedhsn/incubator-airflow,MortalViews/incubator-airflow,btallman/incubator-airflow,mrkm4ntr/incubator-airflow,zack3241/incubator-airflow,owlabs/incubator-airflow,cfei18/incubator-airflow,AllisonWang/incubator-airflow,adamhaney/airflow,dgies/incubator-airflow,wolfier/incubator-airflow,sergiohgz/incubator-airflow,r39132/airflow,jesusfcr/airflow,cfei18/incubator-airflow,MetrodataTeam/incubator-airflow,airbnb/airflow,andyxhadji/incubator-airflow,yiqingj/airflow,yati-sagade/incubator-airflow,preete-dixit-ck/incubator-airflow,cfei18/incubator-airflow,holygits/incubator-airflow,sid88in/incubator-airflow,holygits/incubator-airflow,forevernull/incubator-airflow,CloverHealth/airflow,yati-sagade/incubator-airflow,fenglu-g/incubator-airflow,ronfung/incubator-airflow,zoyahav/incubator-airflow,cjqian/incubator-airflow,cjqian/incubator-airflow,lxneng/incubator-airflow,forevernull/incubator-airflow,sergiohgz/incubator-airflow,DinoCow/airflow,akosel/incubator-airflow,bolkedebruin/airflow,nathanielvarona/airflow,AllisonWang/incubator-airflow,wndhydrnt/airflow,rishibarve/incubator-airflow,edgarRd/incubator-airflow,jhsenjaliya/incubator-airflow,jesusfcr/airflow,lxneng/incubator-airflow,neovintage/airflow,adamhaney/airflow,jgao54/airflow,sdiazb/airflow,cfei18/incubator-airflow,MortalViews/incubator-airflow,brandsoulmates/incubator-airflow,gritlogic/incubator-airflow,mtustin-handy/airflow,stverhae/incubator-airflow,saguziel/incubator-airflow,jfantom/incubator-airflow,malmiron/incubator-airflow,Chedi/airflow,saguziel/incubator-airflow,neovintage/airflow,KL-WLCR/incubator-airflow,OpringaoDoTurno/airflow,wileeam/airflow,aminghadersohi/airflow,jwi078/incubator-airflow,DinoCow/airflow,mrkm4ntr/incubator-airflow,jfantom/incubator-airflow,Tagar/incubator-airflow,wxiang7/airflow,kerzhner/airflow,dmitry-r/incubator-airflow,Fokko/incubator-airflow,jiwang576/incubator-airflow,andrewmchen/incubator-airflow,N3da/incubator-airflow,mrkm4ntr/incubator-airflow,mattuuh7/incubator-airflow,Chedi/airflow,jhsenjaliya/incubator-airflow,NielsZeilemaker/incubator-airflow,d-lee/airflow,zack3241/incubator-airflow,vijaysbhat/incubator-airflow,nathanielvarona/airflow,wooga/airflow,preete-dixit-ck/incubator-airflow,skudriashev/incubator-airflow,wolfier/incubator-airflow,preete-dixit-ck/incubator-airflow,zodiac/incubator-airflow,MetrodataTeam/incubator-airflow,wooga/airflow,easytaxibr/airflow,caseyching/incubator-airflow,malmiron/incubator-airflow,dmitry-r/incubator-airflow,jhsenjaliya/incubator-airflow,ledsusop/airflow,KL-WLCR/incubator-airflow,DEVELByte/incubator-airflow,jiwang576/incubator-airflow,d-lee/airflow,bolkedebruin/airflow,griffinqiu/airflow,wxiang7/airflow,MetrodataTeam/incubator-airflow,sekikn/incubator-airflow,criccomini/airflow,akosel/incubator-airflow,Tagar/incubator-airflow,DEVELByte/incubator-airflow,airbnb/airflow,opensignal/airflow,mtustin-handy/airflow,danielvdende/incubator-airflow,subodhchhabra/airflow,yiqingj/airflow,plypaul/airflow,mtagle/airflow,rishibarve/incubator-airflow,caseyching/incubator-airflow,rishibarve/incubator-airflow,gtoonstra/airflow,wileeam/airflow,wxiang7/airflow,apache/airflow,plypaul/airflow,CloverHealth/airflow,biln/airflow,hgrif/incubator-airflow,AllisonWang/incubator-airflow,dhuang/incubator-airflow,gilt/incubator-airflow,sid88in/incubator-airflow,ty707/airflow,edgarRd/incubator-airflow,mrares/incubator-airflow,adrpar/incubator-airflow,KL-WLCR/incubator-airflow,zack3241/incubator-airflow,Tagar/incubator-airflow,Acehaidrey/incubator-airflow,ProstoMaxim/incubator-airflow,wolfier/incubator-airflow,edgarRd/incubator-airflow,mistercrunch/airflow,dud225/incubator-airflow,griffinqiu/airflow,zoyahav/incubator-airflow,spektom/incubator-airflow,mtdewulf/incubator-airflow,andrewmchen/incubator-airflow,sekikn/incubator-airflow,alexvanboxel/airflow,Fokko/incubator-airflow,jbhsieh/incubator-airflow,r39132/airflow,jwi078/incubator-airflow,mtagle/airflow,mrares/incubator-airflow,hgrif/incubator-airflow,btallman/incubator-airflow,yati-sagade/incubator-airflow,stverhae/incubator-airflow,apache/airflow,Acehaidrey/incubator-airflow,dhuang/incubator-airflow,jbhsieh/incubator-airflow,modsy/incubator-airflow,AllisonWang/incubator-airflow,Tagar/incubator-airflow,jgao54/airflow,gritlogic/incubator-airflow,opensignal/airflow,sekikn/incubator-airflow,yiqingj/airflow,zodiac/incubator-airflow,artwr/airflow,ledsusop/airflow,vijaysbhat/incubator-airflow,vineet-rh/incubator-airflow,caseyching/incubator-airflow,Twistbioscience/incubator-airflow,artwr/airflow,artwr/airflow,mattuuh7/incubator-airflow,easytaxibr/airflow,andyxhadji/incubator-airflow,adamhaney/airflow,jbhsieh/incubator-airflow,Twistbioscience/incubator-airflow,owlabs/incubator-airflow,lxneng/incubator-airflow,apache/airflow,vijaysbhat/incubator-airflow,adrpar/incubator-airflow,MetrodataTeam/incubator-airflow,sid88in/incubator-airflow,jfantom/incubator-airflow,dud225/incubator-airflow,brandsoulmates/incubator-airflow,nathanielvarona/airflow,gilt/incubator-airflow,KL-WLCR/incubator-airflow,subodhchhabra/airflow,cademarkegard/airflow,Fokko/incubator-airflow,stverhae/incubator-airflow,jlowin/airflow,gtoonstra/airflow,ty707/airflow,apache/incubator-airflow,criccomini/airflow,adamhaney/airflow,jesusfcr/airflow,MortalViews/incubator-airflow,yk5/incubator-airflow,mtagle/airflow,mtdewulf/incubator-airflow,airbnb/airflow,aminghadersohi/airflow,aminghadersohi/airflow,dhuang/incubator-airflow,lyft/incubator-airflow,juvoinc/airflow,gritlogic/incubator-airflow,juvoinc/airflow,wooga/airflow,sdiazb/airflow,zoyahav/incubator-airflow,juvoinc/airflow,mrares/incubator-airflow,subodhchhabra/airflow,brandsoulmates/incubator-airflow,cademarkegard/airflow,cademarkegard/airflow,saguziel/incubator-airflow,skudriashev/incubator-airflow,alexvanboxel/airflow,jiwang576/incubator-airflow,dmitry-r/incubator-airflow,subodhchhabra/airflow,zodiac/incubator-airflow,wxiang7/airflow,biln/airflow,cfei18/incubator-airflow,gtoonstra/airflow,mtdewulf/incubator-airflow,biln/airflow,lyft/incubator-airflow,spektom/incubator-airflow,gilt/incubator-airflow,akosel/incubator-airflow,mtagle/airflow,saguziel/incubator-airflow,yoziru-desu/airflow,kerzhner/airflow,jlowin/airflow,griffinqiu/airflow,RealImpactAnalytics/airflow,asnir/airflow,wileeam/airflow,jbhsieh/incubator-airflow,cademarkegard/airflow,criccomini/airflow,apache/incubator-airflow,zodiac/incubator-airflow,plypaul/airflow,RealImpactAnalytics/airflow,akosel/incubator-airflow,mylons/incubator-airflow,aminghadersohi/airflow,r39132/airflow,NielsZeilemaker/incubator-airflow,apache/airflow,lyft/incubator-airflow,criccomini/airflow,zoyahav/incubator-airflow,moritzpein/airflow,MortalViews/incubator-airflow,forevernull/incubator-airflow,jgao54/airflow,OpringaoDoTurno/airflow,janczak10/incubator-airflow,edgarRd/incubator-airflow,neovintage/airflow,Chedi/airflow,jlowin/airflow,CloverHealth/airflow,biln/airflow,moritzpein/airflow,danielvdende/incubator-airflow,zack3241/incubator-airflow
5ac310b7c5cee4a8c5f247ae117fda17fc4cb61a
pypocketexplore/jobs.py
pypocketexplore/jobs.py
from datetime import datetime import requests as req from pymongo import MongoClient from pypocketexplore.config import MONGO_URI from time import sleep def extract_topic_items(topic): db = MongoClient(MONGO_URI).get_default_database() resp = req.get('http://localhost:5000/api/topic/{}'.format(topic)) data = resp.json() related_topics = data.get('related_topics') items = data.get('items') if items: res = db['items'].insert(items) db['topics'].update_many({'topic': topic}, {'$set': {'topic': topic, 'is_scraped': True, 'datetime_scraped': datetime.utcnow(), 'queued': True}}, upsert=True) for related_topic in related_topics: req.get('http://localhost:5000/api/topic/{}?async=true'.format(related_topic)).json() print("Rate limit! Going to sleep for 2 mins!") sleep(2 * 60) print("Wakey wakey eggs and bakey!") return res elif resp.ok and not items: return else: raise Exception if __name__ == '__main__': extract_topic_items('finance')
from datetime import datetime import requests as req from pymongo import MongoClient from pypocketexplore.config import MONGO_URI from time import sleep from redis import StrictRedis import rq def extract_topic_items(topic): r = StrictRedis() def topic_in_queue(topic): q = rq.Queue('topics', connection=StrictRedis()) if any(job.kwargs.get('topic') for job in q.get_jobs()): return True else: return False db = MongoClient(MONGO_URI).get_default_database() resp = req.get('http://localhost:5000/api/topic/{}'.format(topic)) data = resp.json() related_topics = data.get('related_topics') items = data.get('items') if resp.ok: print('Inserting {} items for topic {}'.format(len(items)), topic) res = db['items'].insert(items) r.sadd('scraped_topics', topic) for related_topic in related_topics: if not topic_in_queue(related_topic) and not r.sismember('scraped_topics', related_topic): print('Enqueuing related topic'.format(related_topic)) req.get('http://localhost:5000/api/topic/{}?async=true'.format(related_topic)).json() print("Rate limit! Going to sleep for 2 mins!") sleep(2 * 60) print("Wakey wakey eggs and bakey!") return res else: raise Exception('Something went wrong with topic {}. /api/explore/{} returned {}'.format(topic, topic, resp)) if __name__ == '__main__': extract_topic_items('finance')
Fix bug to avoid duplicating topics
Fix bug to avoid duplicating topics
Python
mit
Florents-Tselai/PyPocketExplore
edec2186f5a83789a5d6a5dbd112c9ff716c3d46
src/python/datamodels/output_models.py
src/python/datamodels/output_models.py
import hashlib class Store(object): def __init__(self): self.id = None self.name = None self.location = None def __repr__(self): return "%s,%s,%s" % (self.name, self.location.zipcode, self.location.coords) class Customer(object): def __init__(self): self.id = None self.name = None self.location = None def __repr__(self): return "(%s, %s, %s)" % \ (self.id, self.name, self.location.zipcode) class Transaction(object): def __init__(self, customer=None, trans_time=None, purchased_items=None, store=None, trans_count=None): self.store = store self.customer = customer self.trans_time = trans_time self.purchased_items = purchased_items self.trans_count = trans_count def transaction_id(self): return hashlib.md5(repr(self)).hexdigest() def __repr__(self): return "(%s, %s, %s, %s)" % (self.store.id, self.customer.id, self.trans_time, self.trans_count)
import hashlib class Store(object): """ Record for stores. id -- integer name -- string location -- ZipcodeRecord """ def __init__(self): self.id = None self.name = None self.location = None def __repr__(self): return "%s,%s,%s" % (self.name, self.location.zipcode, self.location.coords) class Customer(object): """ Record for customers. id -- integer name -- string location -- ZipcodeRecord """ def __init__(self): self.id = None self.name = None self.location = None def __repr__(self): return "(%s, %s, %s)" % \ (self.id, self.name, self.location.zipcode) class Transaction(object): """ Record for transactions store -- Store customer -- Customer trans_time -- transaction time in days since start of simulation. int or long purchased_items -- list of products purchased trans_count -- hidden transaction id """ def __init__(self, customer=None, trans_time=None, purchased_items=None, store=None, trans_count=None): self.store = store self.customer = customer self.trans_time = trans_time self.purchased_items = purchased_items self.trans_count = trans_count def transaction_id(self): """ Compute transaction id as a hash of the transaction. Returns a string """ return hashlib.md5(repr(self)).hexdigest() def __repr__(self): return "(%s, %s, %s, %s)" % (self.store.id, self.customer.id, self.trans_time, self.trans_count)
Add docstrings to output models
Add docstrings to output models
Python
apache-2.0
rnowling/bigpetstore-data-generator,rnowling/bigpetstore-data-generator,rnowling/bigpetstore-data-generator
fb4aa211f64ed6fdc0443d03dd02dc52fc882978
server/dummy/dummy_server.py
server/dummy/dummy_server.py
#!/usr/bin/env python import BaseHTTPServer ServerClass = BaseHTTPServer.HTTPServer RequestHandlerClass = BaseHTTPServer.BaseHTTPRequestHandler SERVER_NAME = '' SERVER_PORT = 9000 class JsonPostResponder(RequestHandlerClass): def _get_content_from_stream(self, length, stream): return stream.read(length) def do_POST(self): content_length = int(self.headers['Content-Length']) content = self._get_content_from_stream(content_length, self.rfile) print('\n--- %s%s\n%s' % (self.command, self.path, self.headers)) print content, '\n' self.send_response(200) self.end_headers() server_address = (SERVER_NAME, SERVER_PORT) httpd = ServerClass(server_address, JsonPostResponder) httpd.serve_forever()
#!/usr/bin/env python import BaseHTTPServer ServerClass = BaseHTTPServer.HTTPServer RequestHandlerClass = BaseHTTPServer.BaseHTTPRequestHandler SERVER_NAME = '' SERVER_PORT = 9000 class JsonPostResponder(RequestHandlerClass): def _get_content_from_stream(self, length, stream): return stream.read(length) def _transaction_string(self, command, path, headers, content): return '%s %s\n%s%s\n' % (command, path, headers, content) def _print_request(self, *request): print('--> %s' % self._transaction_string(*request)) def do_POST(self): content_length = int(self.headers['Content-Length']) content = self._get_content_from_stream(content_length, self.rfile) self._print_request(self.command, self.path, self.headers, content) self.send_response(200) self.end_headers() server_address = (SERVER_NAME, SERVER_PORT) httpd = ServerClass(server_address, JsonPostResponder) httpd.serve_forever()
Clean up and refactor printing of request
Clean up and refactor printing of request
Python
mit
jonspeicher/Puddle,jonspeicher/Puddle,jonspeicher/Puddle
3e3f7b827e226146ec7d3efe523f1f900ac4e99a
sjconfparts/type.py
sjconfparts/type.py
class Type: @classmethod def str_to_list(xcls, str_object): list = map(str.strip, str_object.split(',')) try: list.remove('') except ValueError: pass return list @classmethod def list_to_str(xcls, list_object): return ', '.join(list_object) @classmethod def str_to_bool(xcls, str_object): if str_object == "yes" or str_object == "on" or str_object == "true": return True elif str_object == "no" or str_object == "off" or str_object == "false": return False else: raise TypeError @classmethod def bool_to_str(xcls, bool_object): if bool_object: return "yes" else: return "no"
class Type: @classmethod def str_to_list(xcls, str_object): list = map(str.strip, str_object.split(',')) try: list.remove('') except ValueError: pass return list @classmethod def list_to_str(xcls, list_object): return ', '.join(list_object) @classmethod def str_to_bool(xcls, str_object): if str_object == "yes" or str_object == "on" or str_object == "true" or str_object == "enabled" or str_object == "enable": return True elif str_object == "no" or str_object == "off" or str_object == "false" or str_object == "disabled" or str_object == "disable": return False else: raise TypeError @classmethod def bool_to_str(xcls, bool_object): if bool_object: return "yes" else: return "no"
Allow “enabled“, “enable”, “disabled“, “disable” as boolean values
Allow “enabled“, “enable”, “disabled“, “disable” as boolean values
Python
lgpl-2.1
SmartJog/sjconf,SmartJog/sjconf
ba0a4aff1ea21670712b35061570805e62bb4159
Instanssi/admin_blog/forms.py
Instanssi/admin_blog/forms.py
# -*- coding: utf-8 -*- from django import forms from uni_form.helper import FormHelper from uni_form.layout import Submit, Layout, Fieldset, ButtonHolder from Instanssi.ext_blog.models import BlogEntry class BlogEntryForm(forms.ModelForm): def __init__(self, *args, **kwargs): super(BlogEntryForm, self).__init__(*args, **kwargs) self.helper = FormHelper() self.helper.layout = Layout( Fieldset( u'Blogientry', 'title', 'text', 'public', ButtonHolder ( Submit('submit', 'Tallenna') ) ) ) class Meta: model = BlogEntry fields = ('title','text','public')
# -*- coding: utf-8 -*- from django import forms from uni_form.helper import FormHelper from uni_form.layout import Submit, Layout, Fieldset, ButtonHolder from Instanssi.ext_blog.models import BlogEntry class BlogEntryForm(forms.ModelForm): def __init__(self, *args, **kwargs): super(BlogEntryForm, self).__init__(*args, **kwargs) self.helper = FormHelper() self.helper.layout = Layout( Fieldset( u'Blogientry', 'title', 'text', 'date', 'public', ButtonHolder ( Submit('submit', 'Tallenna') ) ) ) class Meta: model = BlogEntry fields = ('title','text','public','date')
Add date field to edit form.
admin_blog: Add date field to edit form.
Python
mit
Instanssi/Instanssi.org,Instanssi/Instanssi.org,Instanssi/Instanssi.org,Instanssi/Instanssi.org
1c2e17e31c00a52661706a3c90efbb3c93d6fbef
app/initialization.py
app/initialization.py
import sys import os import shutil import composer import configuration import downloader def run(): project_dir = os.getcwd()+'/' execution_dir = os.path.split(os.path.dirname(os.path.realpath(__file__)))[0]+'/' if len(sys.argv) == 2: project_dir = sys.argv[1] os.chdir(execution_dir) print '>>> Execution dir: '+execution_dir print '>>> Project dir: '+project_dir build_dir = project_dir+'build/' configuration.load(project_dir) configuration.add('project-dir', project_dir) configuration.add('build-dir', build_dir) composer.initialization() downloader.initialization() def update(): php_bin = 'php' if len(sys.argv) == 2: php_bin = sys.argv[1] print '>>> PHP version is: '+php_bin configuration.add('php', php_bin) composer.initialization() composer.update() downloader.update() def prepare_dir(path): if os.path.isdir(path): shutil.rmtree(path) os.makedirs(path)
import sys import os import shutil import composer import configuration import downloader def run(): try: project_dir = configuration.get_value('project-dir') except: project_dir = os.getcwd()+'/' execution_dir = os.path.split(os.path.dirname(os.path.realpath(__file__)))[0]+'/' if len(sys.argv) == 2: project_dir = sys.argv[1] os.chdir(execution_dir) print '>>> Execution dir: '+execution_dir print '>>> Project dir: '+project_dir build_dir = project_dir+'build/' configuration.load(project_dir) configuration.add('project-dir', project_dir) configuration.add('build-dir', build_dir) composer.initialization() downloader.initialization() def update(): php_bin = 'php' if len(sys.argv) == 2: php_bin = sys.argv[1] print '>>> PHP version is: '+php_bin configuration.add('php', php_bin) composer.initialization() composer.update() downloader.update() def prepare_dir(path): if os.path.isdir(path): shutil.rmtree(path) os.makedirs(path)
Fix issue with duplicated call
Fix issue with duplicated call
Python
mit
mi-schi/php-code-checker
b9fcd270f520f49fcbe85bcbc53940326f556fdf
Lib/test/test_import.py
Lib/test/test_import.py
from test_support import TESTFN import os import random source = TESTFN + ".py" pyc = TESTFN + ".pyc" pyo = TESTFN + ".pyo" f = open(source, "w") print >> f, "# This will test Python's ability to import a .py file" a = random.randrange(1000) b = random.randrange(1000) print >> f, "a =", a print >> f, "b =", b f.close() try: try: mod = __import__(TESTFN) except ImportError, err: raise ValueError, "import from .py failed: %s" % err if mod.a != a or mod.b != b: print a, "!=", mod.a print b, "!=", mod.b raise ValueError, "module loaded (%s) but contents invalid" % mod finally: os.unlink(source) try: try: reload(mod) except ImportError, err: raise ValueError, "import from .pyc/.pyo failed: %s" % err finally: try: os.unlink(pyc) except os.error: pass try: os.unlink(pyo) except os.error: pass
from test_support import TESTFN import os import random import sys sys.path.insert(0, os.curdir) source = TESTFN + ".py" pyc = TESTFN + ".pyc" pyo = TESTFN + ".pyo" f = open(source, "w") print >> f, "# This will test Python's ability to import a .py file" a = random.randrange(1000) b = random.randrange(1000) print >> f, "a =", a print >> f, "b =", b f.close() try: try: mod = __import__(TESTFN) except ImportError, err: raise ValueError, "import from .py failed: %s" % err if mod.a != a or mod.b != b: print a, "!=", mod.a print b, "!=", mod.b raise ValueError, "module loaded (%s) but contents invalid" % mod finally: os.unlink(source) try: try: reload(mod) except ImportError, err: raise ValueError, "import from .pyc/.pyo failed: %s" % err finally: try: os.unlink(pyc) except os.error: pass try: os.unlink(pyo) except os.error: pass del sys.path[0]
Insert the current directory to the front of sys.path -- and remove it at the end. This fixes a problem where
Insert the current directory to the front of sys.path -- and remove it at the end. This fixes a problem where python Lib/test/test_import.py failed while "make test" succeeded.
Python
mit
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
2951520fab9f213322584327c9e5841fe13fc993
tests/run.py
tests/run.py
#! /usr/bin/env python3 import sys from colour_runner.django_runner import ColourRunnerMixin from django.conf import settings settings.configure( INSTALLED_APPS=( # Put contenttypes before auth to work around test issue. # See: https://code.djangoproject.com/ticket/10827#comment:12 'django.contrib.contenttypes', 'django.contrib.auth', 'django.contrib.sessions', 'django.contrib.admin', 'django-admin-sso', 'django-crispy-forms', 'incuna_auth', ), PASSWORD_HASHERS = ('django.contrib.auth.hashers.MD5PasswordHasher',), AUTH_USER_MODEL='tests.User', ROOT_URLCONF='incuna_auth.urls', REST_FRAMEWORK={ 'DEFAULT_PERMISSION_CLASSES': ('rest_framework.permissions.IsAuthenticated',), 'DEFAULT_RENDERER_CLASSES': ('rest_framework.renderers.JSONRenderer',), }, ) from django.test.runner import DiscoverRunner class Runner(ColourRunnerMixin, DiscoverRunner): pass test_runner = Runner(verbosity=1) failures = test_runner.run_tests(['tests']) if failures: sys.exit(1)
import sys from colour_runner.django_runner import ColourRunnerMixin from django.conf import settings settings.configure( INSTALLED_APPS=( # Put contenttypes before auth to work around test issue. # See: https://code.djangoproject.com/ticket/10827#comment:12 'django.contrib.contenttypes', 'django.contrib.auth', 'django.contrib.sessions', 'django.contrib.admin', 'django-admin-sso', 'django-crispy-forms', 'incuna_auth', ), PASSWORD_HASHERS = ('django.contrib.auth.hashers.MD5PasswordHasher',), AUTH_USER_MODEL='tests.User', ROOT_URLCONF='incuna_auth.urls', REST_FRAMEWORK={ 'DEFAULT_PERMISSION_CLASSES': ('rest_framework.permissions.IsAuthenticated',), 'DEFAULT_RENDERER_CLASSES': ('rest_framework.renderers.JSONRenderer',), }, ) from django.test.runner import DiscoverRunner class Runner(ColourRunnerMixin, DiscoverRunner): pass test_runner = Runner(verbosity=1) failures = test_runner.run_tests(['tests']) if failures: sys.exit(1)
Remove unnecessary Python 3 declaration.
Remove unnecessary Python 3 declaration.
Python
bsd-2-clause
incuna/incuna-auth,incuna/incuna-auth,ghickman/incuna-auth,ghickman/incuna-auth
00aad9bc179aa4a090f703db9669e8ba49ff8f3c
bibliopixel/main/arguments.py
bibliopixel/main/arguments.py
from .. project import project """Common command line arguments for run and demo.""" def add_to_parser(parser): parser.add_argument( '-d', '--driver', default='simpixel', help='Default driver type if no driver is specified') parser.add_argument( '-l', '--layout', default='matrix', help='Default layout class if no layout is specified') parser.add_argument( '-t', '--ledtype', default=None, help='Default LED type if no LED type is specified') parser.add_argument( '-a', '--animation', default=None, help='Default animation type if no animation is specified') parser.add_argument( '-s', action='store_true', help='Run SimPixel at the default URL') parser.add_argument('--simpixel', help='Run SimPixel at a specific URL') def get_dict(args): result = {} for name in 'driver', 'layout', 'animation': value = args and getattr(args, name) result[name] = {'typename': value} if value else {} if args and args.ledtype: result['driver']['ledtype'] = args.ledtype return result def make_animation(args, desc): return project.project_to_animation(desc, get_dict(args))
import json from .. project import project """Common command line arguments for run and demo.""" COMPONENTS = 'driver', 'layout', 'animation' def add_to_parser(parser): parser.add_argument( '-d', '--driver', default='simpixel', help='Default driver type if no driver is specified') parser.add_argument( '-l', '--layout', default='matrix', help='Default layout class if no layout is specified') parser.add_argument( '-t', '--ledtype', default=None, help='Default LED type if no LED type is specified') parser.add_argument( '-a', '--animation', default=None, help='Default animation type if no animation is specified') parser.add_argument( '-s', action='store_true', help='Run SimPixel at the default URL') parser.add_argument('--simpixel', help='Run SimPixel at a specific URL') def get_dict(args): def get_value(name): value = args and getattr(args, name) if not value: return {} if '{' in value: return json.loads(value) return {'typename': value} result = {name: get_value(name) for name in COMPONENTS} if args and args.ledtype: result['driver']['ledtype'] = args.ledtype return result def make_animation(args, desc): return project.project_to_animation(desc, get_dict(args))
Allow json in component flags.
Allow json in component flags.
Python
mit
ManiacalLabs/BiblioPixel,ManiacalLabs/BiblioPixel,rec/BiblioPixel,ManiacalLabs/BiblioPixel,ManiacalLabs/BiblioPixel,rec/BiblioPixel,rec/BiblioPixel,rec/BiblioPixel
5b9e2849c6ee49d68968fdc2588fefd5a25e7bac
contrib/migrateticketmodel.py
contrib/migrateticketmodel.py
#!/usr/bin/env python # # This script completely migrates a <= 0.8.x Trac environment to use the new # default ticket model introduced in Trac 0.9. # # In particular, this means that the severity field is removed (or rather # disabled by removing all possible values), and the priority values are # changed to the more meaningful new defaults. # # Make sure to make a backup of the Trac environment before running this! import sys from trac.env import open_environment from trac.ticket.model import Priority, Severity priority_mapping = { 'highest': 'blocker', 'high': 'critical', 'normal': 'major', 'low': 'minor', 'lowest': 'trivial' } def main(): if len(sys.argv) < 2: print >> sys.stderr, 'usage: %s /path/to/projenv' \ % os.path.basename(sys.argv[0]) sys.exit(2) env = open_environment(sys.argv[1]) db = env.get_db_cnx() for oldprio, newprio in priority_mapping.items(): priority = Priority(env, oldprio, db) priority.name = newprio priority.update(db) for severity in list(Severity.select(env, db)): severity.delete(db) db.commit() if __name__ == '__main__': main()
#!/usr/bin/env python # # This script completely migrates a <= 0.8.x Trac environment to use the new # default ticket model introduced in Trac 0.9. # # In particular, this means that the severity field is removed (or rather # disabled by removing all possible values), and the priority values are # changed to the more meaningful new defaults. # # Make sure to make a backup of the Trac environment before running this! import os import sys from trac.env import open_environment from trac.ticket.model import Priority, Severity priority_mapping = { 'highest': 'blocker', 'high': 'critical', 'normal': 'major', 'low': 'minor', 'lowest': 'trivial' } def main(): if len(sys.argv) < 2: print >> sys.stderr, 'usage: %s /path/to/projenv' \ % os.path.basename(sys.argv[0]) sys.exit(2) env = open_environment(sys.argv[1]) db = env.get_db_cnx() for oldprio, newprio in priority_mapping.items(): priority = Priority(env, oldprio, db) priority.name = newprio priority.update(db) for severity in list(Severity.select(env, db)): severity.delete(db) db.commit() if __name__ == '__main__': main()
Fix missing import in contrib script added in [2630].
Fix missing import in contrib script added in [2630]. git-svn-id: 0d96b0c1a6983ccc08b3732614f4d6bfcf9cbb42@2631 af82e41b-90c4-0310-8c96-b1721e28e2e2
Python
bsd-3-clause
rbaumg/trac,rbaumg/trac,rbaumg/trac,rbaumg/trac
97ffd9f5271ffb93b04da06866591f6e6650d76b
bluebottle/settings/travis.py
bluebottle/settings/travis.py
SECRET_KEY = 'hbqnTEq+m7Tk61bvRV/TLANr3i0WZ6hgBXDh3aYpSU8m+E1iCtlU3Q==' DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', }, }
# NOTE: local.py must be an empty file when using this configuration. from .defaults import * # Put the travis-ci environment specific overrides below. SECRET_KEY = 'hbqnTEq+m7Tk61bvRV/TLANr3i0WZ6hgBXDh3aYpSU8m+E1iCtlU3Q==' DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', }, }
Fix Travis config so that the test run.
Fix Travis config so that the test run.
Python
bsd-3-clause
onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site
802d030087d7f15add5ccfa5d305555632575642
changes/jobs/cleanup_tasks.py
changes/jobs/cleanup_tasks.py
from __future__ import absolute_import from datetime import datetime, timedelta from changes.config import queue from changes.constants import Status from changes.experimental.stats import RCount from changes.models import Task from changes.queue.task import TrackedTask, tracked_task CHECK_TIME = timedelta(minutes=60) @tracked_task def cleanup_tasks(): with RCount('cleanup_tasks'): """ Find any tasks which haven't checked in within a reasonable time period and requeue them if necessary. """ now = datetime.utcnow() cutoff = now - CHECK_TIME pending_tasks = Task.query.filter( Task.status != Status.finished, Task.date_modified < cutoff, ) for task in pending_tasks: task_func = TrackedTask(queue.get_task(task.task_name)) task_func.delay( task_id=task.task_id.hex, parent_task_id=task.parent_id.hex if task.parent_id else None, **task.data['kwargs'] )
from __future__ import absolute_import from datetime import datetime, timedelta from changes.config import queue from changes.constants import Status from changes.experimental.stats import RCount, incr from changes.models import Task from changes.queue.task import TrackedTask, tracked_task CHECK_TIME = timedelta(minutes=60) @tracked_task def cleanup_tasks(): with RCount('cleanup_tasks'): """ Find any tasks which haven't checked in within a reasonable time period and requeue them if necessary. """ now = datetime.utcnow() cutoff = now - CHECK_TIME pending_tasks = Task.query.filter( Task.status != Status.finished, Task.date_modified < cutoff, ) for task in pending_tasks: incr('cleanup_unfinished') task_func = TrackedTask(queue.get_task(task.task_name)) task_func.delay( task_id=task.task_id.hex, parent_task_id=task.parent_id.hex if task.parent_id else None, **task.data['kwargs'] )
Add counter for cleanup tasks not following the decorator
Add counter for cleanup tasks not following the decorator
Python
apache-2.0
bowlofstew/changes,wfxiang08/changes,dropbox/changes,wfxiang08/changes,bowlofstew/changes,wfxiang08/changes,dropbox/changes,bowlofstew/changes,bowlofstew/changes,wfxiang08/changes,dropbox/changes,dropbox/changes
c69b9519c2984154dd15d31395d9590e00d689b5
allauth/socialaccount/providers/trello/provider.py
allauth/socialaccount/providers/trello/provider.py
from allauth.socialaccount.providers.base import ProviderAccount from allauth.socialaccount.providers.oauth.provider import OAuthProvider class TrelloAccount(ProviderAccount): def get_profile_url(self): return None def get_avatar_url(self): return None class TrelloProvider(OAuthProvider): id = 'trello' name = 'Trello' account_class = TrelloAccount def get_default_scope(self): return ['read'] def extract_uid(self, data): return data['id'] def get_auth_params(self, request, action): data = super(TrelloProvider, self).get_auth_params(request, action) app = self.get_app(request) data['type'] = 'web_server' data['name'] = app.name # define here for how long it will be, this can be configured on the # social app data['expiration'] = 'never' return data provider_classes = [TrelloProvider]
from allauth.socialaccount.providers.base import ProviderAccount from allauth.socialaccount.providers.oauth.provider import OAuthProvider class TrelloAccount(ProviderAccount): def get_profile_url(self): return None def get_avatar_url(self): return None class TrelloProvider(OAuthProvider): id = 'trello' name = 'Trello' account_class = TrelloAccount def get_default_scope(self): return ['read'] def extract_uid(self, data): return data['id'] def get_auth_params(self, request, action): data = super(TrelloProvider, self).get_auth_params(request, action) app = self.get_app(request) data['type'] = 'web_server' data['name'] = app.name data['scope'] = self.get_scope(request) # define here for how long it will be, this can be configured on the # social app data['expiration'] = 'never' return data provider_classes = [TrelloProvider]
Use 'scope' in TrelloProvider auth params. Allows overriding from django settings.
feat(TrelloProvider): Use 'scope' in TrelloProvider auth params. Allows overriding from django settings.
Python
mit
lukeburden/django-allauth,rsalmaso/django-allauth,lukeburden/django-allauth,bittner/django-allauth,rsalmaso/django-allauth,pennersr/django-allauth,bittner/django-allauth,bittner/django-allauth,pennersr/django-allauth,lukeburden/django-allauth,pennersr/django-allauth,rsalmaso/django-allauth
b6db7abfd59a1b97fbb4d1b867e3316c029c94ff
spec/Report_S06_spec.py
spec/Report_S06_spec.py
from expects import expect, equal from primestg.report import Report from ast import literal_eval with description('Report S06 example'): with before.all: self.data_filenames = [ 'spec/data/S06.xml', # 'spec/data/S06_empty.xml' ] self.report = [] for data_filename in self.data_filenames: with open(data_filename) as data_file: self.report.append(Report(data_file)) with it('generates the expected results for the whole report'): result_filenames = [] for data_filename in self.data_filenames: result_filenames.append('{}_result.txt'.format(data_filename)) for key, result_filename in enumerate(result_filenames): with open(result_filename) as result_file: result_string = result_file.read() expected_result = literal_eval(result_string) result = self.report[key].values expect(result).to(equal(expected_result)) # result_filename = '{}_result.txt'.format(self.data_filename) # # with open(result_filename) as result_file: # result_string = result_file.read() # self.expected_result = literal_eval(result_string) # # result = self.report.values # # expect(result).to(equal(self.expected_result))
from expects import expect, equal from primestg.report import Report from ast import literal_eval with description('Report S06 example'): with before.all: self.data_filenames = [ 'spec/data/S06.xml', 'spec/data/S06_with_error.xml', # 'spec/data/S06_empty.xml' ] self.report = [] for data_filename in self.data_filenames: with open(data_filename) as data_file: self.report.append(Report(data_file)) with it('generates the expected results for the whole report'): result_filenames = [] warnings = [] for data_filename in self.data_filenames: result_filenames.append('{}_result.txt'.format(data_filename)) for key, result_filename in enumerate(result_filenames): result = [] with open(result_filename) as result_file: result_string = result_file.read() expected_result = literal_eval(result_string) for cnc in self.report[key].concentrators: if cnc.meters: for meter in cnc.meters: for value in meter.values: result.append(value) warnings.append(meter.warnings) print('Result: {} \n Expected result: {} \n Warnings: {}'.format( result, expected_result, warnings)) expect(result).to(equal(expected_result)) expected_warnings = [[], ["ERROR: Cnc(CIR4621704174), " "Meter(ZIV42553686). Thrown exception: " "object of type 'NoneType' has no len()"], []] expect(warnings).to(equal(expected_warnings))
TEST for correct an with errors S06 report
TEST for correct an with errors S06 report
Python
agpl-3.0
gisce/primestg
d7ea1e9c7728b5e98e6c798ab3d5ef5b9066463c
barrage/basetestcases.py
barrage/basetestcases.py
from .baselauncher import BaseLauncher class BaseTestCases(BaseLauncher): def handle_problem_set(self, name, problems): for i, prob in enumerate(problems): answer_got = self.get_answer(prob, name, i, len(problems)) if not answer_got: return False if not prob.validate(answer_got): try: answer_expected = prob.Answer().for_problem(prob) except NotImplementedError: print("\nFAILED. STDIN:\n{}\nGOT:\n{}" .format(prob.to_stdin(), stdout)) else: print("\nFAILED. STDIN:\n{}\nEXPECTED:\n{}\nGOT:\n{}" .format(prob.to_stdin(), answer_expected.to_stdout(), stdout)) return False print("") return True
from .baselauncher import BaseLauncher class BaseTestCases(BaseLauncher): def handle_problem_set(self, name, problems): for i, prob in enumerate(problems): answer_got = self.get_answer(prob, name, i, len(problems)) if not answer_got: return False if not prob.validate(answer_got): try: answer_expected = prob.Answer().for_problem(prob) except NotImplementedError: print("\nFAILED. STDIN:\n{}\nGOT:\n{}" .format(prob.to_stdin(), answer_got.to_stdout())) else: print("\nFAILED. STDIN:\n{}\nEXPECTED:\n{}\nGOT:\n{}" .format(prob.to_stdin(), answer_expected.to_stdout(), answer_got.to_stdout())) return False print("") return True
Fix a bug with application stdout print
Fix a bug with application stdout print
Python
mit
vnetserg/barrage
8a6bc4a46141b42d4457fdc4d63df234f788253d
django_nose/plugin.py
django_nose/plugin.py
class ResultPlugin(object): """ Captures the TestResult object for later inspection. nose doesn't return the full test result object from any of its runner methods. Pass an instance of this plugin to the TestProgram and use ``result`` after running the tests to get the TestResult object. """ name = "result" enabled = True def finalize(self, result): self.result = result class DjangoSetUpPlugin(object): """ Configures Django to setup and tear down the environment. This allows coverage to report on all code imported and used during the initialisation of the test runner. """ name = "django setup" enabled = True # We need this to run before the coverage plugin (which has a score # of 500), so that we still have a stdout for the user interaction # Django sometimes wants to do during test database setup. score = 700 def __init__(self, runner): super(DjangoSetUpPlugin, self).__init__() self.runner = runner def begin(self): """Setup the environment""" self.runner.setup_test_environment() self.old_names = self.runner.setup_databases() def finalize(self, result): """Destroy the environment""" self.runner.teardown_databases(self.old_names) self.runner.teardown_test_environment()
import sys class ResultPlugin(object): """ Captures the TestResult object for later inspection. nose doesn't return the full test result object from any of its runner methods. Pass an instance of this plugin to the TestProgram and use ``result`` after running the tests to get the TestResult object. """ name = "result" enabled = True def finalize(self, result): self.result = result class DjangoSetUpPlugin(object): """ Configures Django to setup and tear down the environment. This allows coverage to report on all code imported and used during the initialisation of the test runner. """ name = "django setup" enabled = True def __init__(self, runner): super(DjangoSetUpPlugin, self).__init__() self.runner = runner self.sys_stdout = sys.stdout def begin(self): """Setup the environment""" sys_stdout = sys.stdout sys.stdout = self.sys_stdout self.runner.setup_test_environment() self.old_names = self.runner.setup_databases() sys.stdout = sys_stdout def finalize(self, result): """Destroy the environment""" self.runner.teardown_databases(self.old_names) self.runner.teardown_test_environment()
Allow coverage to work and keep stdout and be activated before initial imports.
Allow coverage to work and keep stdout and be activated before initial imports.
Python
bsd-3-clause
aristiden7o/django-nose,harukaeru/django-nose,disqus/django-nose,dgladkov/django-nose,mzdaniel/django-nose,sociateru/django-nose,krinart/django-nose,alexhayes/django-nose,daineX/django-nose,harukaeru/django-nose,mzdaniel/django-nose,Deepomatic/django-nose,krinart/django-nose,fabiosantoscode/django-nose-123-fix,alexhayes/django-nose,daineX/django-nose,dgladkov/django-nose,sociateru/django-nose,aristiden7o/django-nose,millerdev/django-nose,Deepomatic/django-nose,franciscoruiz/django-nose,360youlun/django-nose,disqus/django-nose,franciscoruiz/django-nose,fabiosantoscode/django-nose-123-fix,millerdev/django-nose,brilliant-org/django-nose,360youlun/django-nose,brilliant-org/django-nose
9c037ed3ebe7353b419562311bbc1f07875ab358
django_su/forms.py
django_su/forms.py
# -*- coding: utf-8 -*- from django import forms from django.conf import settings from django.utils.translation import ugettext_lazy as _ from . import get_user_model class UserSuForm(forms.Form): user = forms.ModelChoiceField( label=_('Users'), queryset=get_user_model()._default_manager.order_by( 'username'), required=True) # pylint: disable=W0212 use_ajax_select = False def __init__(self, *args, **kwargs): super(UserSuForm, self).__init__(*args, **kwargs) if 'ajax_select' in settings.INSTALLED_APPS and getattr( settings, 'AJAX_LOOKUP_CHANNELS', None): from ajax_select.fields import AutoCompleteSelectField lookup = settings.AJAX_LOOKUP_CHANNELS.get('django_su', None) if lookup is not None: old_field = self.fields['user'] self.fields['user'] = AutoCompleteSelectField( 'django_su', required=old_field.required, label=old_field.label, ) self.use_ajax_select = True def get_user(self): return self.cleaned_data.get('user', None) def __str__(self): if 'formadmin' in settings.INSTALLED_APPS: try: from formadmin.forms import as_django_admin return as_django_admin(self) except ImportError: pass return super(UserSuForm, self).__str__()
# -*- coding: utf-8 -*- from django import forms from django.conf import settings from django.utils.translation import ugettext_lazy as _ from . import get_user_model class UserSuForm(forms.Form): username_field = get_user_model().USERNAME_FIELD user = forms.ModelChoiceField( label=_('Users'), queryset=get_user_model()._default_manager.order_by( username_field), required=True) # pylint: disable=W0212 use_ajax_select = False def __init__(self, *args, **kwargs): super(UserSuForm, self).__init__(*args, **kwargs) if 'ajax_select' in settings.INSTALLED_APPS and getattr( settings, 'AJAX_LOOKUP_CHANNELS', None): from ajax_select.fields import AutoCompleteSelectField lookup = settings.AJAX_LOOKUP_CHANNELS.get('django_su', None) if lookup is not None: old_field = self.fields['user'] self.fields['user'] = AutoCompleteSelectField( 'django_su', required=old_field.required, label=old_field.label, ) self.use_ajax_select = True def get_user(self): return self.cleaned_data.get('user', None) def __str__(self): if 'formadmin' in settings.INSTALLED_APPS: try: from formadmin.forms import as_django_admin return as_django_admin(self) except ImportError: pass return super(UserSuForm, self).__str__()
Update UserSuForm to enhance compatibility with custom user models.
Update UserSuForm to enhance compatibility with custom user models. In custom user models, we cannot rely on there being a 'username' field. Instead, we should use whichever field has been specified as the username field.
Python
mit
adamcharnock/django-su,PetrDlouhy/django-su,adamcharnock/django-su,PetrDlouhy/django-su
f100faade749d86597e1c8c52b88d55261e7a4dc
suorganizer/wsgi.py
suorganizer/wsgi.py
""" WSGI config for suorganizer project. It exposes the WSGI callable as a module-level variable named ``application``. For more information on this file, see https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/ """ import os from django.core.wsgi import get_wsgi_application os.environ.setdefault("DJANGO_SETTINGS_MODULE", "suorganizer.settings") application = get_wsgi_application()
""" WSGI config for suorganizer project. It exposes the WSGI callable as a module-level variable named ``application``. For more information on this file, see https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/ """ import os from django.core.wsgi import get_wsgi_application from whitenoise.django import DjangoWhiteNoise os.environ.setdefault("DJANGO_SETTINGS_MODULE", "suorganizer.settings") application = get_wsgi_application() application = DjangoWhiteNoise(application)
Use WhiteNoise for static content.
Ch29: Use WhiteNoise for static content.
Python
bsd-2-clause
jambonrose/DjangoUnleashed-1.8,jambonrose/DjangoUnleashed-1.8
4412a59bfe8228698e5b5bbe8bb21c8e8a70d357
test/functional/feature_shutdown.py
test/functional/feature_shutdown.py
#!/usr/bin/env python3 # Copyright (c) 2018 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test bitcoind shutdown.""" from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_equal, get_rpc_proxy from threading import Thread def test_long_call(node): block = node.waitfornewblock() assert_equal(block['height'], 0) class ShutdownTest(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 1 def run_test(self): node = get_rpc_proxy(self.nodes[0].url, 1, timeout=600, coveragedir=self.nodes[0].coverage_dir) Thread(target=test_long_call, args=(node,)).start() # wait 1 second to ensure event loop waits for current connections to close self.stop_node(0, wait=1000) if __name__ == '__main__': ShutdownTest().main()
#!/usr/bin/env python3 # Copyright (c) 2018 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test bitcoind shutdown.""" from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_equal, get_rpc_proxy, wait_until from threading import Thread def test_long_call(node): block = node.waitfornewblock() assert_equal(block['height'], 0) class ShutdownTest(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 1 def run_test(self): node = get_rpc_proxy(self.nodes[0].url, 1, timeout=600, coveragedir=self.nodes[0].coverage_dir) # Force connection establishment by executing a dummy command. node.getblockcount() Thread(target=test_long_call, args=(node,)).start() # Wait until the server is executing the above `waitfornewblock`. wait_until(lambda: len(self.nodes[0].getrpcinfo()['active_commands']) == 2) # Wait 1 second after requesting shutdown but not before the `stop` call # finishes. This is to ensure event loop waits for current connections # to close. self.stop_node(0, wait=1000) if __name__ == '__main__': ShutdownTest().main()
Remove race between connecting and shutdown on separate connections
qa: Remove race between connecting and shutdown on separate connections
Python
mit
fujicoin/fujicoin,myriadteam/myriadcoin,apoelstra/bitcoin,prusnak/bitcoin,namecoin/namecore,midnightmagic/bitcoin,jamesob/bitcoin,fujicoin/fujicoin,pataquets/namecoin-core,r8921039/bitcoin,lateminer/bitcoin,DigitalPandacoin/pandacoin,Sjors/bitcoin,sipsorcery/bitcoin,bitcoin/bitcoin,AkioNak/bitcoin,bespike/litecoin,particl/particl-core,EthanHeilman/bitcoin,ajtowns/bitcoin,ahmedbodi/vertcoin,namecoin/namecoin-core,dscotese/bitcoin,GroestlCoin/GroestlCoin,domob1812/bitcoin,qtumproject/qtum,sipsorcery/bitcoin,monacoinproject/monacoin,instagibbs/bitcoin,litecoin-project/litecoin,jonasschnelli/bitcoin,jambolo/bitcoin,rnicoll/bitcoin,DigitalPandacoin/pandacoin,prusnak/bitcoin,droark/bitcoin,vertcoin/vertcoin,myriadcoin/myriadcoin,rnicoll/bitcoin,rnicoll/bitcoin,mruddy/bitcoin,ElementsProject/elements,namecoin/namecore,GroestlCoin/GroestlCoin,bespike/litecoin,droark/bitcoin,wellenreiter01/Feathercoin,jtimon/bitcoin,jonasschnelli/bitcoin,bitcoin/bitcoin,vertcoin/vertcoin,vertcoin/vertcoin,dscotese/bitcoin,jonasschnelli/bitcoin,monacoinproject/monacoin,nikkitan/bitcoin,ElementsProject/elements,DigitalPandacoin/pandacoin,domob1812/namecore,MarcoFalke/bitcoin,Sjors/bitcoin,FeatherCoin/Feathercoin,rnicoll/dogecoin,EthanHeilman/bitcoin,andreaskern/bitcoin,dscotese/bitcoin,CryptArc/bitcoin,droark/bitcoin,afk11/bitcoin,jamesob/bitcoin,litecoin-project/litecoin,gjhiggins/vcoincore,apoelstra/bitcoin,instagibbs/bitcoin,bitcoinsSG/bitcoin,sstone/bitcoin,GroestlCoin/bitcoin,GroestlCoin/bitcoin,mitchellcash/bitcoin,MarcoFalke/bitcoin,lateminer/bitcoin,afk11/bitcoin,kallewoof/bitcoin,anditto/bitcoin,andreaskern/bitcoin,AkioNak/bitcoin,domob1812/namecore,MeshCollider/bitcoin,achow101/bitcoin,tjps/bitcoin,pataquets/namecoin-core,CryptArc/bitcoin,peercoin/peercoin,bitcoin/bitcoin,ajtowns/bitcoin,namecoin/namecoin-core,myriadcoin/myriadcoin,alecalve/bitcoin,achow101/bitcoin,jamesob/bitcoin,CryptArc/bitcoin,domob1812/bitcoin,jamesob/bitcoin,wellenreiter01/Feathercoin,wellenreiter01/Feathercoin,JeremyRubin/bitcoin,pstratem/bitcoin,CryptArc/bitcoin,tjps/bitcoin,achow101/bitcoin,prusnak/bitcoin,dscotese/bitcoin,pstratem/bitcoin,Xekyo/bitcoin,bitcoinknots/bitcoin,kallewoof/bitcoin,jtimon/bitcoin,jtimon/bitcoin,bitcoinsSG/bitcoin,untrustbank/litecoin,MarcoFalke/bitcoin,cdecker/bitcoin,MeshCollider/bitcoin,GroestlCoin/GroestlCoin,untrustbank/litecoin,myriadcoin/myriadcoin,apoelstra/bitcoin,pstratem/bitcoin,Sjors/bitcoin,nikkitan/bitcoin,ElementsProject/elements,monacoinproject/monacoin,rnicoll/bitcoin,Xekyo/bitcoin,mm-s/bitcoin,cdecker/bitcoin,OmniLayer/omnicore,MeshCollider/bitcoin,pataquets/namecoin-core,jlopp/statoshi,litecoin-project/litecoin,bitcoinsSG/bitcoin,mitchellcash/bitcoin,anditto/bitcoin,midnightmagic/bitcoin,anditto/bitcoin,bespike/litecoin,jambolo/bitcoin,qtumproject/qtum,yenliangl/bitcoin,FeatherCoin/Feathercoin,n1bor/bitcoin,ahmedbodi/vertcoin,MarcoFalke/bitcoin,OmniLayer/omnicore,afk11/bitcoin,tjps/bitcoin,bitcoinknots/bitcoin,jlopp/statoshi,namecoin/namecore,alecalve/bitcoin,kallewoof/bitcoin,MarcoFalke/bitcoin,jlopp/statoshi,yenliangl/bitcoin,bitcoinknots/bitcoin,FeatherCoin/Feathercoin,pstratem/bitcoin,qtumproject/qtum,jnewbery/bitcoin,ElementsProject/elements,bitcoinknots/bitcoin,MeshCollider/bitcoin,monacoinproject/monacoin,gjhiggins/vcoincore,myriadcoin/myriadcoin,instagibbs/bitcoin,EthanHeilman/bitcoin,n1bor/bitcoin,pstratem/bitcoin,peercoin/peercoin,anditto/bitcoin,ajtowns/bitcoin,andreaskern/bitcoin,mitchellcash/bitcoin,CryptArc/bitcoin,tecnovert/particl-core,n1bor/bitcoin,bespike/litecoin,OmniLayer/omnicore,untrustbank/litecoin,DigitalPandacoin/pandacoin,tecnovert/particl-core,particl/particl-core,ahmedbodi/vertcoin,andreaskern/bitcoin,OmniLayer/omnicore,DigitalPandacoin/pandacoin,JeremyRubin/bitcoin,midnightmagic/bitcoin,sipsorcery/bitcoin,jambolo/bitcoin,lateminer/bitcoin,qtumproject/qtum,GroestlCoin/bitcoin,namecoin/namecoin-core,pataquets/namecoin-core,bespike/litecoin,jamesob/bitcoin,practicalswift/bitcoin,MeshCollider/bitcoin,myriadteam/myriadcoin,mruddy/bitcoin,OmniLayer/omnicore,instagibbs/bitcoin,jnewbery/bitcoin,FeatherCoin/Feathercoin,jnewbery/bitcoin,jlopp/statoshi,dscotese/bitcoin,midnightmagic/bitcoin,AkioNak/bitcoin,n1bor/bitcoin,cdecker/bitcoin,apoelstra/bitcoin,fanquake/bitcoin,rnicoll/dogecoin,instagibbs/bitcoin,wellenreiter01/Feathercoin,mitchellcash/bitcoin,afk11/bitcoin,alecalve/bitcoin,prusnak/bitcoin,qtumproject/qtum,ajtowns/bitcoin,sstone/bitcoin,rnicoll/bitcoin,peercoin/peercoin,namecoin/namecore,gjhiggins/vcoincore,prusnak/bitcoin,untrustbank/litecoin,rnicoll/dogecoin,bitcoin/bitcoin,apoelstra/bitcoin,GroestlCoin/GroestlCoin,achow101/bitcoin,jambolo/bitcoin,jtimon/bitcoin,bitcoin/bitcoin,domob1812/namecore,namecoin/namecoin-core,myriadteam/myriadcoin,prusnak/bitcoin,sstone/bitcoin,DigitalPandacoin/pandacoin,JeremyRubin/bitcoin,mm-s/bitcoin,practicalswift/bitcoin,practicalswift/bitcoin,namecoin/namecore,qtumproject/qtum,jonasschnelli/bitcoin,bitcoin/bitcoin,afk11/bitcoin,ElementsProject/elements,sipsorcery/bitcoin,sstone/bitcoin,rnicoll/dogecoin,cdecker/bitcoin,ahmedbodi/vertcoin,domob1812/bitcoin,jonasschnelli/bitcoin,apoelstra/bitcoin,fujicoin/fujicoin,jambolo/bitcoin,sstone/bitcoin,jtimon/bitcoin,tjps/bitcoin,GroestlCoin/bitcoin,jnewbery/bitcoin,litecoin-project/litecoin,peercoin/peercoin,pataquets/namecoin-core,ahmedbodi/vertcoin,mruddy/bitcoin,fujicoin/fujicoin,namecoin/namecore,JeremyRubin/bitcoin,wellenreiter01/Feathercoin,andreaskern/bitcoin,mm-s/bitcoin,fanquake/bitcoin,nikkitan/bitcoin,bitcoinsSG/bitcoin,lateminer/bitcoin,jnewbery/bitcoin,lateminer/bitcoin,CryptArc/bitcoin,JeremyRubin/bitcoin,myriadteam/myriadcoin,domob1812/bitcoin,Xekyo/bitcoin,myriadteam/myriadcoin,fanquake/bitcoin,vertcoin/vertcoin,midnightmagic/bitcoin,alecalve/bitcoin,Sjors/bitcoin,droark/bitcoin,FeatherCoin/Feathercoin,gjhiggins/vcoincore,mitchellcash/bitcoin,midnightmagic/bitcoin,monacoinproject/monacoin,particl/particl-core,AkioNak/bitcoin,FeatherCoin/Feathercoin,afk11/bitcoin,andreaskern/bitcoin,tecnovert/particl-core,GroestlCoin/bitcoin,practicalswift/bitcoin,domob1812/namecore,sstone/bitcoin,practicalswift/bitcoin,myriadcoin/myriadcoin,n1bor/bitcoin,tecnovert/particl-core,jambolo/bitcoin,bitcoinsSG/bitcoin,Sjors/bitcoin,domob1812/namecore,tecnovert/particl-core,qtumproject/qtum,cdecker/bitcoin,yenliangl/bitcoin,nikkitan/bitcoin,sipsorcery/bitcoin,particl/particl-core,mruddy/bitcoin,nikkitan/bitcoin,untrustbank/litecoin,mm-s/bitcoin,anditto/bitcoin,instagibbs/bitcoin,rnicoll/dogecoin,namecoin/namecoin-core,ahmedbodi/vertcoin,jlopp/statoshi,monacoinproject/monacoin,n1bor/bitcoin,achow101/bitcoin,myriadteam/myriadcoin,pstratem/bitcoin,jamesob/bitcoin,MeshCollider/bitcoin,bitcoinsSG/bitcoin,fanquake/bitcoin,r8921039/bitcoin,Xekyo/bitcoin,ElementsProject/elements,AkioNak/bitcoin,jtimon/bitcoin,untrustbank/litecoin,GroestlCoin/GroestlCoin,fanquake/bitcoin,particl/particl-core,kallewoof/bitcoin,EthanHeilman/bitcoin,JeremyRubin/bitcoin,achow101/bitcoin,cdecker/bitcoin,mm-s/bitcoin,tecnovert/particl-core,namecoin/namecoin-core,sipsorcery/bitcoin,droark/bitcoin,gjhiggins/vcoincore,tjps/bitcoin,bitcoinknots/bitcoin,domob1812/bitcoin,peercoin/peercoin,kallewoof/bitcoin,litecoin-project/litecoin,yenliangl/bitcoin,lateminer/bitcoin,mruddy/bitcoin,alecalve/bitcoin,anditto/bitcoin,nikkitan/bitcoin,gjhiggins/vcoincore,particl/particl-core,r8921039/bitcoin,fanquake/bitcoin,peercoin/peercoin,fujicoin/fujicoin,r8921039/bitcoin,vertcoin/vertcoin,yenliangl/bitcoin,myriadcoin/myriadcoin,rnicoll/bitcoin,Xekyo/bitcoin,yenliangl/bitcoin,r8921039/bitcoin,domob1812/namecore,bespike/litecoin,domob1812/bitcoin,GroestlCoin/GroestlCoin,mm-s/bitcoin,OmniLayer/omnicore,kallewoof/bitcoin,vertcoin/vertcoin,fujicoin/fujicoin,droark/bitcoin,alecalve/bitcoin,mitchellcash/bitcoin,Xekyo/bitcoin,tjps/bitcoin,wellenreiter01/Feathercoin,dscotese/bitcoin,pataquets/namecoin-core,mruddy/bitcoin,MarcoFalke/bitcoin,jlopp/statoshi,ajtowns/bitcoin,litecoin-project/litecoin,GroestlCoin/bitcoin,EthanHeilman/bitcoin,r8921039/bitcoin,EthanHeilman/bitcoin,practicalswift/bitcoin,ajtowns/bitcoin,AkioNak/bitcoin
8ae27080b8ff9fe124733005a8006261a3d22266
migrate/crud/versions/001_create_initial_tables.py
migrate/crud/versions/001_create_initial_tables.py
from sqlalchemy import * from migrate import * metadata = MetaData() table = Table('crud_versions', metadata, Column('id', Integer, primary_key=True), Column('object_type', Text, nullable=False), Column('object_id', Integer, nullable=False), Column('commit_time', DateTime, nullable=False), Column('data', Blob, nullable=False), Column('blame', Text, nullable=False), Column('comment', Text, nullable=False), ) def upgrade(migrate_engine): # Upgrade operations go here. Don't create your own engine; bind migrate_engine # to your metadata metadata.bind = migrate_engine table.create() def downgrade(migrate_engine): # Operations to reverse the above upgrade go here. table.drop()
from sqlalchemy import * from migrate import * metadata = MetaData() table = Table('crud_versions', metadata, Column('id', Integer, primary_key=True), Column('object_type', Text, nullable=False), Column('object_id', Integer, nullable=False), Column('commit_time', DateTime, nullable=False), Column('data', LargeBinary, nullable=False), Column('blame', Text, nullable=False), Column('comment', Text, nullable=False), ) def upgrade(migrate_engine): # Upgrade operations go here. Don't create your own engine; bind migrate_engine # to your metadata metadata.bind = migrate_engine table.create() def downgrade(migrate_engine): # Operations to reverse the above upgrade go here. table.drop()
Fix some of the schema.
Fix some of the schema.
Python
bsd-3-clause
mikeboers/Nitrogen,mikeboers/Nitrogen,mikeboers/Nitrogen,mikeboers/Nitrogen,mikeboers/Nitrogen,mikeboers/Nitrogen
c535d9e105284bb469d10003ee0f5533b8d8d5db
auditlog/__openerp__.py
auditlog/__openerp__.py
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2013 ABF OSIELL (<http://osiell.com>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { 'name': "Audit Log", 'version': "1.0", 'author': "ABF OSIELL", 'website': "http://www.osiell.com", 'category': "Tools", 'depends': [ 'base', ], 'data': [ 'security/ir.model.access.csv', 'views/auditlog_view.xml', ], 'application': True, 'installable': True, 'pre_init_hook': 'pre_init_hook', }
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2013 ABF OSIELL (<http://osiell.com>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { 'name': "Audit Log", 'version': "1.0", 'author': "ABF OSIELL,Odoo Community Association (OCA)", 'website': "http://www.osiell.com", 'category': "Tools", 'depends': [ 'base', ], 'data': [ 'security/ir.model.access.csv', 'views/auditlog_view.xml', ], 'application': True, 'installable': True, 'pre_init_hook': 'pre_init_hook', }
Add OCA as author of OCA addons
Add OCA as author of OCA addons In order to get visibility on https://www.odoo.com/apps the OCA board has decided to add the OCA as author of all the addons maintained as part of the association.
Python
agpl-3.0
brain-tec/server-tools,bmya/server-tools,bmya/server-tools,brain-tec/server-tools,brain-tec/server-tools,bmya/server-tools
5b94ce3796eb37301f2ac6928bfe0a0426bcf31e
docs/config/all.py
docs/config/all.py
# Global configuration information used across all the # translations of documentation. # # Import the base theme configuration from cakephpsphinx.config.all import * # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The full version, including alpha/beta/rc tags. release = '1.x' # The search index version. search_version = 'chronos-1' # The marketing display name for the book. version_name = '' # Project name shown in the black header bar project = 'Chronos' # Other versions that display in the version picker menu. version_list = [ {'name': '1.x', 'number': '/chronos/1.x', 'title': '1.x', 'current': True}, ] # Languages available. languages = ['en', 'fr', 'ja', 'pt'] # The GitHub branch name for this version of the docs # for edit links to point at. branch = 'master' # Current version being built version = '1.x' # Language in use for this directory. language = 'en' show_root_link = True repository = 'cakephp/chronos' source_path = 'docs/'
# Global configuration information used across all the # translations of documentation. # # Import the base theme configuration from cakephpsphinx.config.all import * # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The full version, including alpha/beta/rc tags. release = '2.x' # The search index version. search_version = 'chronos-2' # The marketing display name for the book. version_name = '' # Project name shown in the black header bar project = 'Chronos' # Other versions that display in the version picker menu. version_list = [ {'name': '1.x', 'number': '/chronos/1.x', 'title': '1.x'}, {'name': '2.x', 'number': '/chronos/2.x', 'title': '2.x', 'current': True}, ] # Languages available. languages = ['en', 'fr', 'ja', 'pt'] # The GitHub branch name for this version of the docs # for edit links to point at. branch = '2.x' # Current version being built version = '2.x' # Language in use for this directory. language = 'en' show_root_link = True repository = 'cakephp/chronos' source_path = 'docs/' is_prerelease = True
Update docs versions for 2.x
Update docs versions for 2.x
Python
mit
cakephp/chronos
91acec032abeb942bf90d6522a4d9d38ad624d46
tests/test_buffs.py
tests/test_buffs.py
import unittest from buffs import * class StatusEffectTests(unittest.TestCase): """ StatusEffect is the base class for buffs """ def test_init(self): test_name = 'testman' test_duration = 10 st_ef = StatusEffect(name=test_name, duration=test_duration) self.assertEqual(st_ef.name, test_name) self.assertEqual(st_ef.duration, test_duration) def test_str(self): test_name = 'testman' test_duration = 10 st_ef = StatusEffect(name=test_name, duration=test_duration) expected_str = "Default Status Effect" self.assertEqual(str(st_ef), "Default Status Effect") if __name__ == '__main__': unittest.main()
import unittest from buffs import * class StatusEffectTests(unittest.TestCase): """ StatusEffect is the base class for buffs """ def test_init(self): test_name = 'testman' test_duration = 10 st_ef = StatusEffect(name=test_name, duration=test_duration) self.assertEqual(st_ef.name, test_name) self.assertEqual(st_ef.duration, test_duration) def test_str(self): test_name = 'testman' test_duration = 10 st_ef = StatusEffect(name=test_name, duration=test_duration) expected_str = "Default Status Effect" self.assertEqual(str(st_ef), "Default Status Effect") class BeneficialBuffTests(unittest.TestCase): def test_init(self): name = 'BMW' stats_amounts = [('strength', 10), ('armor', 20), ('health', 30)] duration = 10 buff = BeneficialBuff(name=name, buff_stats_and_amounts=stats_amounts, duration=duration) self.assertEqual(buff.name, name) self.assertEqual(buff.buff_stats_and_amounts, stats_amounts) self.assertEqual(buff.duration, duration) if __name__ == '__main__': unittest.main()
Test for the BeneficialBuff class
Test for the BeneficialBuff class
Python
mit
Enether/python_wow
c90fd7d026cdeeff7d073c1d15ff550cc937f961
dusty/daemon.py
dusty/daemon.py
import sys import logging from .preflight import preflight_check from .notifier import notify def configure_logging(): logging.basicConfig(stream=sys.stdout, level=logging.INFO) logging.captureWarnings(True) def main(): notify('Dusty initializing...') configure_logging() preflight_check() if __name__ == '__main__': main()
import os import sys import logging import socket from .preflight import preflight_check from .notifier import notify SOCKET_PATH = '/var/run/dusty/dusty.sock' def _configure_logging(): logging.basicConfig(stream=sys.stdout, level=logging.INFO) logging.captureWarnings(True) def _clean_up_existing_socket(): try: os.unlink(SOCKET_PATH) except OSError: if os.path.exists(SOCKET_PATH): raise def _listen_on_socket(): _clean_up_existing_socket() sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) sock.bind(SOCKET_PATH) sock.listen(1) notify('Dusty is listening for commands') while True: connection, client_address = sock.accept() try: while True: data = connection.recv(1024) if not data: break print data finally: connection.close() def main(): notify('Dusty initializing...') _configure_logging() preflight_check() _listen_on_socket() if __name__ == '__main__': main()
Set up a Unix socket we can use for input
Set up a Unix socket we can use for input
Python
mit
gamechanger/dusty,gamechanger/dusty,gamechanger/dusty,gamechanger/dusty,gamechanger/dusty
9a74f11d4adfafbddec2e86251ecef17c4196bf2
tests/test_suite.py
tests/test_suite.py
#! /usr/bin/env python from __future__ import absolute_import import unittest from . import unittest_neos from . import unittest_sedumi_writer def main(): """ The main function. """ loader = unittest.TestLoader() suite = unittest.TestSuite() suite.addTest(loader.loadTestsFromModule(unittest_neos)) suite.addTest(loader.loadTestsFromModule(unittest_sedumi_writer)) unittest.TextTestRunner(verbosity=2).run(suite) if __name__ == "__main__": try: main() except KeyboardInterrupt: print "Test canceled."
#! /usr/bin/env python """ Test suite. """ from __future__ import absolute_import import sys import unittest from . import unittest_neos from . import unittest_sedumi_writer def main(): """ The main function. Returns: True if all tests are successful. """ loader = unittest.TestLoader() suite = unittest.TestSuite() suite.addTest(loader.loadTestsFromModule(unittest_neos)) suite.addTest(loader.loadTestsFromModule(unittest_sedumi_writer)) res = unittest.TextTestRunner(verbosity=2).run(suite) return res.wasSuccessful() if __name__ == "__main__": try: sys.exit(0 if main() else 1) except KeyboardInterrupt: print "Test canceled." sys.exit(-1)
Fix a bug to return error status code when tests are failed.
Fix a bug to return error status code when tests are failed.
Python
mit
TrishGillett/pysdpt3glue,discardthree/PySDPT3glue,TrishGillett/pysdpt3glue,discardthree/PySDPT3glue,TrishGillett/pysdpt3glue
6430785e60fcef9bbac3cf4e7c70981f5af6affa
fluent_contents/plugins/sharedcontent/models.py
fluent_contents/plugins/sharedcontent/models.py
from django.db import models from django.utils.translation import ugettext_lazy as _ from parler.models import TranslatableModel, TranslatedFields from fluent_contents.models import ContentItem, PlaceholderField class SharedContent(TranslatableModel): """ The parent hosting object for shared content """ translations = TranslatedFields( title = models.CharField(_("Title"), max_length=200) ) slug = models.SlugField(_("Template code"), unique=True, help_text=_("This unique name can be used refer to this content in in templates.")) contents = PlaceholderField("shared_content", verbose_name=_("Contents")) # NOTE: settings such as "template_name", and which plugins are allowed can be added later. class Meta: verbose_name = _("Shared content") verbose_name_plural = _("Shared content") def __unicode__(self): return self.title class SharedContentItem(ContentItem): """ The contentitem to include in a page. """ shared_content = models.ForeignKey(SharedContent, verbose_name=_('Shared content'), related_name='shared_content_items') class Meta: verbose_name = _('Shared content') verbose_name_plural = _('Shared content') def __unicode__(self): return unicode(self.shared_content)
from django.db import models from django.utils.translation import ugettext_lazy as _ from parler.models import TranslatableModel, TranslatedFields from fluent_contents.models import ContentItem, PlaceholderField, ContentItemRelation class SharedContent(TranslatableModel): """ The parent hosting object for shared content """ translations = TranslatedFields( title = models.CharField(_("Title"), max_length=200) ) slug = models.SlugField(_("Template code"), unique=True, help_text=_("This unique name can be used refer to this content in in templates.")) contents = PlaceholderField("shared_content", verbose_name=_("Contents")) # NOTE: settings such as "template_name", and which plugins are allowed can be added later. # Adding the reverse relation for ContentItem objects # causes the admin to list these objects when moving the shared content contentitem_set = ContentItemRelation() class Meta: verbose_name = _("Shared content") verbose_name_plural = _("Shared content") def __unicode__(self): return self.title class SharedContentItem(ContentItem): """ The contentitem to include in a page. """ shared_content = models.ForeignKey(SharedContent, verbose_name=_('Shared content'), related_name='shared_content_items') class Meta: verbose_name = _('Shared content') verbose_name_plural = _('Shared content') def __unicode__(self): return unicode(self.shared_content)
Add ContentItemRelation to SharedContent model
Add ContentItemRelation to SharedContent model Displays objects in the admin delete screen.
Python
apache-2.0
jpotterm/django-fluent-contents,django-fluent/django-fluent-contents,django-fluent/django-fluent-contents,ixc/django-fluent-contents,edoburu/django-fluent-contents,jpotterm/django-fluent-contents,django-fluent/django-fluent-contents,pombredanne/django-fluent-contents,jpotterm/django-fluent-contents,pombredanne/django-fluent-contents,ixc/django-fluent-contents,pombredanne/django-fluent-contents,ixc/django-fluent-contents,edoburu/django-fluent-contents,edoburu/django-fluent-contents
fc21802b68cf9a907218dab5b0e22cd8f1dc75d0
djcelery/backends/database.py
djcelery/backends/database.py
from celery.backends.base import BaseDictBackend from djcelery.models import TaskMeta, TaskSetMeta class DatabaseBackend(BaseDictBackend): """The database backends. Using Django models to store task metadata.""" def _store_result(self, task_id, result, status, traceback=None): """Store return value and status of an executed task.""" TaskMeta.objects.store_result(task_id, result, status, traceback=traceback) return result def _save_taskset(self, taskset_id, result): """Store the result of an executed taskset.""" TaskSetMeta.objects.store_result(taskset_id, result) return result def _get_task_meta_for(self, task_id): """Get task metadata for a task by id.""" meta = TaskMeta.objects.get_task(task_id) if meta: return meta.to_dict() def _restore_taskset(self, taskset_id): """Get taskset metadata for a taskset by id.""" meta = TaskSetMeta.objects.restore_taskset(taskset_id) if meta: return meta.to_dict() def cleanup(self): """Delete expired metadata.""" TaskMeta.objects.delete_expired() TaskSetMeta.objects.delete_expired()
from celery.backends.base import BaseDictBackend from djcelery.models import TaskMeta, TaskSetMeta class DatabaseBackend(BaseDictBackend): """The database backends. Using Django models to store task metadata.""" TaskModel = TaskMeta TaskSetModel = TaskSetMeta def _store_result(self, task_id, result, status, traceback=None): """Store return value and status of an executed task.""" self.TaskModel._default_manager.store_result(task_id, result, status, traceback=traceback) return result def _save_taskset(self, taskset_id, result): """Store the result of an executed taskset.""" self.TaskModel._default_manager.store_result(taskset_id, result) return result def _get_task_meta_for(self, task_id): """Get task metadata for a task by id.""" meta = self.TaskModel._default_manager.get_task(task_id) if meta: return meta.to_dict() def _restore_taskset(self, taskset_id): """Get taskset metadata for a taskset by id.""" meta = self.TaskSetModel._default_manager.restore_taskset(taskset_id) if meta: return meta.to_dict() def cleanup(self): """Delete expired metadata.""" for model in self.TaskModel, self.TaskSetModel: model._default_manager.delete_expired()
Make it possible to override the models used to store task/taskset state
DatabaseBackend: Make it possible to override the models used to store task/taskset state
Python
bsd-3-clause
Amanit/django-celery,kanemra/django-celery,axiom-data-science/django-celery,celery/django-celery,alexhayes/django-celery,digimarc/django-celery,tkanemoto/django-celery,iris-edu-int/django-celery,CloudNcodeInc/django-celery,Amanit/django-celery,CloudNcodeInc/django-celery,iris-edu-int/django-celery,CloudNcodeInc/django-celery,celery/django-celery,planorama/django-celery,nadios/django-celery,ask/django-celery,nadios/django-celery,georgewhewell/django-celery,iris-edu-int/django-celery,celery/django-celery,digimarc/django-celery,planorama/django-celery,georgewhewell/django-celery,ask/django-celery,digimarc/django-celery,tkanemoto/django-celery,georgewhewell/django-celery,alexhayes/django-celery,Amanit/django-celery,tkanemoto/django-celery,kanemra/django-celery,axiom-data-science/django-celery,axiom-data-science/django-celery,kanemra/django-celery
97535245f7da3d7e54d64dc384d6cd81caa9a689
tests/test_story.py
tests/test_story.py
from py101 import Story from py101 import variables from py101 import lists import unittest class TestStory(unittest.TestCase): def test_name(self): self.assertEqual(Story().name, 'py101', "name should be py101") class TestAdventureVariables(unittest.TestCase): good_solution = """ myinteger = 4 mystring = 'Python String Here' print(myinteger) print(mystring) """ def test_solution(self): test = variables.TestOutput(self.good_solution) test.setUp() try: test.runTest() finally: test.tearDown() class TestAdventureLists(unittest.TestCase): good_solution = """ languages = ["ADA", "Pascal", "Fortran", "Smalltalk"] print(languages) """ def test_solution(self): test = lists.TestOutput(self.good_solution) test.setUp() try: test.runTest() finally: test.tearDown()
import py101 import py101.boilerplate import py101.introduction import py101.lists import py101.variables import unittest class TestStory(unittest.TestCase): def test_name(self): self.assertEqual(py101.Story().name, 'py101', "name should be py101") class AdventureData(object): def __init__(self, test_module, good_solution): self.module = test_module self.good_solution = good_solution class TestAdventures(unittest.TestCase): adventures = [ AdventureData( py101.boilerplate, "" ), AdventureData( py101.introduction, """print('Hello World')""" ), AdventureData( py101.variables, """myinteger = 4; mystring = 'Python String Here'; print(myinteger); print(mystring)""" ), AdventureData( py101.lists, """languages = ["ADA", "Pascal", "Fortran", "Smalltalk"]; print(languages)""" ) ] def test_solution(self): for adventure in self.adventures: with self.subTest(adventure=adventure.module.__name__): test = adventure.module.TestOutput(adventure.good_solution) test.setUp() try: test.runTest() finally: test.tearDown()
Refactor tests to remove duplicate code
Refactor tests to remove duplicate code
Python
mit
sophilabs/py101
510afd0c93c333e86511fb6f6b9e96a434d54d00
zerver/migrations/0174_userprofile_delivery_email.py
zerver/migrations/0174_userprofile_delivery_email.py
# -*- coding: utf-8 -*- # Generated by Django 1.11.13 on 2018-07-05 17:57 from __future__ import unicode_literals from django.db import migrations, models from django.apps import apps from django.db.models import F from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor from django.db.migrations.state import StateApps def copy_email_field(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: UserProfile = apps.get_model('zerver', 'UserProfile') UserProfile.objects.all().update(delivery_email=F('email')) class Migration(migrations.Migration): dependencies = [ ('zerver', '0173_support_seat_based_plans'), ] operations = [ migrations.AddField( model_name='userprofile', name='delivery_email', field=models.EmailField(db_index=True, default='', max_length=254), preserve_default=False, ), migrations.RunPython(copy_email_field, reverse_code=migrations.RunPython.noop), ]
# -*- coding: utf-8 -*- # Generated by Django 1.11.13 on 2018-07-05 17:57 from __future__ import unicode_literals from django.db import migrations, models from django.apps import apps from django.db.models import F from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor from django.db.migrations.state import StateApps def copy_email_field(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: UserProfile = apps.get_model('zerver', 'UserProfile') UserProfile.objects.all().update(delivery_email=F('email')) class Migration(migrations.Migration): atomic = False dependencies = [ ('zerver', '0173_support_seat_based_plans'), ] operations = [ migrations.AddField( model_name='userprofile', name='delivery_email', field=models.EmailField(db_index=True, default='', max_length=254), preserve_default=False, ), migrations.RunPython(copy_email_field, reverse_code=migrations.RunPython.noop), ]
Disable atomic for delivery_email migration.
migrations: Disable atomic for delivery_email migration. I'm not sure theoretically why this should be required only for some installations, but these articles all suggest the root problem is doing these two migrations together atomically (creating the field and setting a value for it), so the right answer is to declare the migration as not atomic: https://stackoverflow.com/questions/12838111/django-db-migrations-cannot-alter-table-because-it-has-pending-trigger-events https://confluence.atlassian.com/confkb/upgrade-failed-with-the-error-message-error-cannot-alter-table-content-because-it-has-pending-trigger-events-747606853.html
Python
apache-2.0
dhcrzf/zulip,zulip/zulip,zulip/zulip,showell/zulip,dhcrzf/zulip,hackerkid/zulip,jackrzhang/zulip,eeshangarg/zulip,tommyip/zulip,brainwane/zulip,tommyip/zulip,synicalsyntax/zulip,tommyip/zulip,shubhamdhama/zulip,rht/zulip,dhcrzf/zulip,timabbott/zulip,shubhamdhama/zulip,rht/zulip,brainwane/zulip,hackerkid/zulip,synicalsyntax/zulip,andersk/zulip,dhcrzf/zulip,brainwane/zulip,timabbott/zulip,dhcrzf/zulip,punchagan/zulip,rht/zulip,zulip/zulip,shubhamdhama/zulip,rishig/zulip,kou/zulip,showell/zulip,rht/zulip,jackrzhang/zulip,zulip/zulip,hackerkid/zulip,punchagan/zulip,andersk/zulip,rishig/zulip,kou/zulip,punchagan/zulip,brainwane/zulip,dhcrzf/zulip,rishig/zulip,synicalsyntax/zulip,timabbott/zulip,hackerkid/zulip,jackrzhang/zulip,timabbott/zulip,timabbott/zulip,zulip/zulip,jackrzhang/zulip,hackerkid/zulip,kou/zulip,kou/zulip,shubhamdhama/zulip,brainwane/zulip,kou/zulip,rishig/zulip,zulip/zulip,synicalsyntax/zulip,zulip/zulip,eeshangarg/zulip,shubhamdhama/zulip,andersk/zulip,timabbott/zulip,rishig/zulip,brainwane/zulip,shubhamdhama/zulip,rht/zulip,punchagan/zulip,punchagan/zulip,timabbott/zulip,brainwane/zulip,eeshangarg/zulip,showell/zulip,showell/zulip,tommyip/zulip,rishig/zulip,punchagan/zulip,jackrzhang/zulip,eeshangarg/zulip,punchagan/zulip,rht/zulip,jackrzhang/zulip,andersk/zulip,eeshangarg/zulip,tommyip/zulip,hackerkid/zulip,tommyip/zulip,synicalsyntax/zulip,rishig/zulip,synicalsyntax/zulip,kou/zulip,andersk/zulip,andersk/zulip,kou/zulip,andersk/zulip,synicalsyntax/zulip,eeshangarg/zulip,showell/zulip,dhcrzf/zulip,showell/zulip,eeshangarg/zulip,tommyip/zulip,shubhamdhama/zulip,hackerkid/zulip,showell/zulip,jackrzhang/zulip,rht/zulip
ad477285f4458145bca378b74dcb8cfe3abeaf06
froide/bounce/apps.py
froide/bounce/apps.py
import json from django.apps import AppConfig from django.utils.translation import ugettext_lazy as _ class BounceConfig(AppConfig): name = 'froide.bounce' verbose_name = _('Bounce') def ready(self): from froide.account import account_canceled from froide.account.export import registry account_canceled.connect(cancel_user) registry.register(export_user_data) def cancel_user(sender, user=None, **kwargs): from .models import Bounce if user is None: return Bounce.objects.filter(user=user).delete() def export_user_data(user): from .models import Bounce bounces = Bounce.objects.filter(user=user) if not bounces: return yield ('bounces.json', json.dumps([ { 'last_update': ( b.last_update.isoformat() if b.last_update else None ), 'bounces': b.bounces, 'email': b.email, } for b in bounces]).encode('utf-8') )
import json from django.apps import AppConfig from django.utils.translation import ugettext_lazy as _ class BounceConfig(AppConfig): name = 'froide.bounce' verbose_name = _('Bounce') def ready(self): from froide.account import account_canceled from froide.account.export import registry from froide.helper.email_sending import mail_middleware_registry account_canceled.connect(cancel_user) registry.register(export_user_data) mail_middleware_registry.register(UnsubscribeReferenceMailMiddleware()) def cancel_user(sender, user=None, **kwargs): from .models import Bounce if user is None: return Bounce.objects.filter(user=user).delete() def export_user_data(user): from .models import Bounce bounces = Bounce.objects.filter(user=user) if not bounces: return yield ('bounces.json', json.dumps([ { 'last_update': ( b.last_update.isoformat() if b.last_update else None ), 'bounces': b.bounces, 'email': b.email, } for b in bounces]).encode('utf-8') ) class UnsubscribeReferenceMailMiddleware: ''' Moves unsubscribe_reference from mail render context to email sending kwargs ''' def enhance_email_kwargs(self, mail_intent, context, email_kwargs): unsubscribe_reference = context.get('unsubscribe_reference') if unsubscribe_reference is None: return return { 'unsubscribe_reference': unsubscribe_reference }
Add unsubscribe reference to mails through context
Add unsubscribe reference to mails through context
Python
mit
stefanw/froide,fin/froide,fin/froide,stefanw/froide,stefanw/froide,fin/froide,stefanw/froide,fin/froide,stefanw/froide
5d67def658f0b1bd206fdefe100d32269f1eb34e
falcom/api/uri/api_querier.py
falcom/api/uri/api_querier.py
# Copyright (c) 2017 The Regents of the University of Michigan. # All Rights Reserved. Licensed according to the terms of the Revised # BSD License. See LICENSE.txt for details. from time import sleep class APIQuerier: def __init__ (self, uri, url_opener, sleep_time=300, max_tries=0): self.uri = uri self.url_opener = url_opener self.sleep_time = sleep_time self.max_tries = max_tries def get (self, **kwargs): class SpecialNull: pass result = SpecialNull i = 1 while result is SpecialNull: try: result = self.__open_uri(kwargs) except ConnectionError: sleep(self.sleep_time) if i == self.max_tries: result = b"" else: i += 1 return result @staticmethod def utf8 (str_or_bytes): if isinstance(str_or_bytes, bytes): return str_or_bytes.decode("utf_8") else: return str_or_bytes def __open_uri (self, kwargs): with self.url_opener(self.uri(**kwargs)) as response: result = self.utf8(response.read()) return result
# Copyright (c) 2017 The Regents of the University of Michigan. # All Rights Reserved. Licensed according to the terms of the Revised # BSD License. See LICENSE.txt for details. from time import sleep class APIQuerier: def __init__ (self, uri, url_opener, sleep_time=300, max_tries=0): self.uri = uri self.url_opener = url_opener self.sleep_time = sleep_time self.max_tries = max_tries def get (self, **kwargs): class SpecialNull: pass self.result = SpecialNull self.attempt_number = 1 while self.result is SpecialNull: try: self.result = self.__open_uri(kwargs) except ConnectionError: sleep(self.sleep_time) if self.attempt_number == self.max_tries: self.result = b"" else: self.attempt_number += 1 return self.result @staticmethod def utf8 (str_or_bytes): if isinstance(str_or_bytes, bytes): return str_or_bytes.decode("utf_8") else: return str_or_bytes def __open_uri (self, kwargs): with self.url_opener(self.uri(**kwargs)) as response: result = self.utf8(response.read()) return result
Replace local variables with class variables
Replace local variables with class variables
Python
bsd-3-clause
mlibrary/image-conversion-and-validation,mlibrary/image-conversion-and-validation
50fa164c4b09845bfa262c2f6959a3c5dfd6f76b
fluentcheck/classes/is_cls.py
fluentcheck/classes/is_cls.py
from typing import Any from ..assertions_is.booleans import __IsBool from ..assertions_is.collections import __IsCollections from ..assertions_is.dicts import __IsDicts from ..assertions_is.emptiness import __IsEmptiness from ..assertions_is.geo import __IsGeo from ..assertions_is.numbers import __IsNumbers from ..assertions_is.sequences import __IsSequences from ..assertions_is.strings import __IsStrings from ..assertions_is.types import __IsTypes from ..assertions_is.uuids import __IsUUIDs class Is(__IsBool, __IsCollections, __IsDicts, __IsEmptiness, __IsGeo, __IsNumbers, __IsSequences, __IsStrings, __IsTypes, __IsUUIDs): def __init__(self, object_under_test: Any): super().__init__(object_under_test)
from typing import Any from ..assertions_is.booleans import __IsBool from ..assertions_is.collections import __IsCollections from ..assertions_is.dicts import __IsDicts from ..assertions_is.emptiness import __IsEmptiness from ..assertions_is.geo import __IsGeo from ..assertions_is.numbers import __IsNumbers from ..assertions_is.sequences import __IsSequences from ..assertions_is.strings import __IsStrings from ..assertions_is.types import __IsTypes from ..assertions_is.uuids import __IsUUIDs class Is(__IsBool, __IsCollections, __IsDicts, __IsEmptiness, __IsGeo, __IsNumbers, __IsSequences, __IsStrings, __IsTypes, __IsUUIDs): pass
Remove methods with unnecessary super delegation.
Remove methods with unnecessary super delegation.
Python
mit
csparpa/check
a15d2956cfd48e0d46d5d4cf567af05641b4c8e6
yunity/api/utils.py
yunity/api/utils.py
from django.http import JsonResponse class ApiBase(object): @classmethod def success(cls, data, status=200): """ :type data: dict :type status: int :rtype JsonResponse """ return JsonResponse(data, status=status) @classmethod def error(cls, error, status=400): """ :type error: str :type status: int :rtype JsonResponse """ return JsonResponse({'error': error}, status=status)
from functools import wraps from json import loads as load_json from django.http import JsonResponse class ApiBase(object): @classmethod def validation_failure(cls, message, status=400): """ :type message: str :type status: int :rtype JsonResponse """ return JsonResponse({'validation_failure': message}, status=status) @classmethod def success(cls, data, status=200): """ :type data: dict :type status: int :rtype JsonResponse """ return JsonResponse(data, status=status) @classmethod def error(cls, error, status=400): """ :type error: str :type status: int :rtype JsonResponse """ return JsonResponse({'error': error}, status=status) def json_request(expected_keys=None): """Decorator to validate that a request is in JSON and (optionally) has some specific keys in the JSON object. """ expected_keys = expected_keys or [] def decorator(func): @wraps(func) def wrapper(cls, request, *args, **kwargs): data = load_json(request.body.decode('utf8')) for expected_key in expected_keys: value = data.get(expected_key) if not value: return ApiBase.validation_failure('missing key: {}'.format(expected_key)) return func(cls, data, request, *args, **kwargs) return wrapper return decorator
Implement JSON request validation decorator
Implement JSON request validation decorator with @NerdyProjects
Python
agpl-3.0
yunity/foodsaving-backend,yunity/foodsaving-backend,yunity/yunity-core,yunity/foodsaving-backend,yunity/yunity-core
798bd79ddc2e9b212a82a7a8455428b3d32cfab4
bin/pymodules/apitest/jscomponent.py
bin/pymodules/apitest/jscomponent.py
import json import rexviewer as r import naali import urllib2 from componenthandler import DynamiccomponentHandler class JavascriptHandler(DynamiccomponentHandler): GUINAME = "Javascript Handler" def __init__(self): DynamiccomponentHandler.__init__(self) self.jsloaded = False def onChanged(self): print "-----------------------------------" ent = r.getEntity(self.comp.GetParentEntityId()) datastr = self.comp.GetAttribute() #print "GetAttr got:", datastr data = json.loads(datastr) js_src = data.get('js_src', None) if not self.jsloaded and js_src is not None: jscode = self.loadjs(js_src) print jscode ctx = { #'entity'/'this': self.entity 'component': self.comp } try: ent.touchable except AttributeError: pass else: ctx['touchable'] = ent.touchable naali.runjs(jscode, ctx) print "-- done with js" self.jsloaded = True def loadjs(self, srcurl): print "js source url:", srcurl f = urllib2.urlopen(srcurl) return f.read()
import json import rexviewer as r import naali import urllib2 from componenthandler import DynamiccomponentHandler class JavascriptHandler(DynamiccomponentHandler): GUINAME = "Javascript Handler" def __init__(self): DynamiccomponentHandler.__init__(self) self.jsloaded = False def onChanged(self): print "-----------------------------------" ent = r.getEntity(self.comp.GetParentEntityId()) datastr = self.comp.GetAttribute() #print "GetAttr got:", datastr data = json.loads(datastr) js_src = data.get('js_src', None) if not self.jsloaded and js_src is not None: jscode = self.loadjs(js_src) print jscode ctx = { #'entity'/'this': self.entity 'component': self.comp } try: ent.touchable except AttributeError: pass else: ctx['touchable'] = ent.touchable try: ent.placeable except: pass else: ctx['placeable'] = ent.placeable naali.runjs(jscode, ctx) print "-- done with js" self.jsloaded = True def loadjs(self, srcurl): print "js source url:", srcurl f = urllib2.urlopen(srcurl) return f.read()
Add placeable to javascript context
Add placeable to javascript context
Python
apache-2.0
BogusCurry/tundra,antont/tundra,pharos3d/tundra,antont/tundra,AlphaStaxLLC/tundra,jesterKing/naali,pharos3d/tundra,antont/tundra,pharos3d/tundra,BogusCurry/tundra,BogusCurry/tundra,antont/tundra,realXtend/tundra,BogusCurry/tundra,AlphaStaxLLC/tundra,jesterKing/naali,BogusCurry/tundra,pharos3d/tundra,antont/tundra,pharos3d/tundra,jesterKing/naali,AlphaStaxLLC/tundra,jesterKing/naali,jesterKing/naali,antont/tundra,realXtend/tundra,BogusCurry/tundra,jesterKing/naali,pharos3d/tundra,antont/tundra,AlphaStaxLLC/tundra,jesterKing/naali,AlphaStaxLLC/tundra,AlphaStaxLLC/tundra,realXtend/tundra,realXtend/tundra,realXtend/tundra,realXtend/tundra
31f887979d2129bec80311e94b91cf0f77772f26
zou/app/utils/fs.py
zou/app/utils/fs.py
import os import shutil import errno def mkdir_p(path): try: os.makedirs(path) except OSError as exception: if exception.errno == errno.EEXIST and os.path.isdir(path): pass else: raise def rm_rf(path): if os.path.exists(path): shutil.rmtree(path)
import os import shutil import errno def mkdir_p(path): try: os.makedirs(path) except OSError as exception: if exception.errno == errno.EEXIST and os.path.isdir(path): pass else: raise def rm_rf(path): if os.path.exists(path): shutil.rmtree(path) def copyfile(src, dest): shutil.copyfile(src, dest)
Add a new copy file util function
Add a new copy file util function
Python
agpl-3.0
cgwire/zou
463fa89c143cd4493ea3704f177c5aba0ebb2af7
idiokit/xmpp/_resolve.py
idiokit/xmpp/_resolve.py
from __future__ import absolute_import from .. import idiokit, dns DEFAULT_XMPP_PORT = 5222 @idiokit.stream def _add_port_and_count(port): count = 0 while True: try: family, ip = yield idiokit.next() except StopIteration: idiokit.stop(count) yield idiokit.send(family, ip, port) count += 1 def _resolve_host(host, port): return dns.host_lookup(host) | _add_port_and_count(port) @idiokit.stream def resolve(domain, forced_host=None, forced_port=None): if forced_host is not None: port = DEFAULT_XMPP_PORT if forced_port is None else forced_port yield _resolve_host(forced_host, port) return try: srv_records = yield dns.srv("_xmpp-client._tcp." + domain) except dns.ResponseError: srv_records = [] srv_count = 0 for srv_record in dns.ordered_srv_records(srv_records): port = srv_record.port if forced_port is None else forced_port srv_count += yield _resolve_host(srv_record.target, port) if srv_count == 0: port = DEFAULT_XMPP_PORT if forced_port is None else forced_port yield _resolve_host(domain, port)
from __future__ import absolute_import from .. import idiokit, dns DEFAULT_XMPP_PORT = 5222 @idiokit.stream def _add_port(port): while True: family, ip = yield idiokit.next() yield idiokit.send(family, ip, port) def _resolve_host(host, port): return dns.host_lookup(host) | _add_port(port) @idiokit.stream def resolve(domain, forced_host=None, forced_port=None): if forced_host is not None: port = DEFAULT_XMPP_PORT if forced_port is None else forced_port yield _resolve_host(forced_host, port) return try: srv_records = yield dns.srv("_xmpp-client._tcp." + domain) except (dns.ResponseError, dns.DNSTimeout): srv_records = [] if not srv_records: port = DEFAULT_XMPP_PORT if forced_port is None else forced_port yield _resolve_host(domain, port) return for srv_record in dns.ordered_srv_records(srv_records): port = srv_record.port if forced_port is None else forced_port yield _resolve_host(srv_record.target, port)
Fix SRV logic. RFC 6120 states that the fallback logic shouldn't be applied when the entity (client in this case) receives an answer to the SRV query but fails to establish a connection using the answer data.
idiokit.xmpp: Fix SRV logic. RFC 6120 states that the fallback logic shouldn't be applied when the entity (client in this case) receives an answer to the SRV query but fails to establish a connection using the answer data.
Python
mit
abusesa/idiokit
7e71e21734abb2b12e309ea37910c90f7b837651
go/base/tests/test_decorators.py
go/base/tests/test_decorators.py
"""Test for go.base.decorators.""" from go.vumitools.tests.helpers import djangotest_imports with djangotest_imports(globals()): from go.base.tests.helpers import GoDjangoTestCase from go.base.decorators import render_exception from django.template.response import TemplateResponse class CatchableDummyError(Exception): """Error that will be caught by DummyView.post.""" class UncatchableDummyError(Exception): """Error that will not be caught by DummyView.post.""" class DummyView(object): @render_exception(CatchableDummyError, 400, "Meep.") def post(self, request, err=None): if err is None: return "Success" raise err class TestRenderException(GoDjangoTestCase): def test_no_exception(self): d = DummyView() self.assertEqual(d.post("request"), "Success") def test_expected_exception(self): d = DummyView() self.assertRaises( UncatchableDummyError, d.post, "request", UncatchableDummyError()) def test_other_exception(self): d = DummyView() response = d.post("request", CatchableDummyError("foo")) self.assertTrue(isinstance(response, TemplateResponse)) self.assertEqual(response.template_name, 'error.html') self.assertEqual(response.status_code, 400)
"""Test for go.base.decorators.""" from go.vumitools.tests.helpers import djangotest_imports with djangotest_imports(globals()): from go.base.tests.helpers import GoDjangoTestCase from go.base.decorators import render_exception from django.template.response import TemplateResponse class CatchableDummyError(Exception): """Error that will be caught by DummyView.post.""" class UncatchableDummyError(Exception): """Error that will not be caught by DummyView.post.""" class DummyView(object): @render_exception(CatchableDummyError, 400, "Meep.") def post(self, request, err=None): if err is None: return "Success" raise err class TestRenderException(GoDjangoTestCase): def test_no_exception(self): d = DummyView() self.assertEqual(d.post("request"), "Success") def test_expected_exception(self): d = DummyView() self.assertRaises( UncatchableDummyError, d.post, "request", UncatchableDummyError()) def test_other_exception(self): d = DummyView() response = d.post("request", CatchableDummyError("foo")) self.assertTrue(isinstance(response, TemplateResponse)) self.assertEqual(response.template_name, 'error.html') self.assertEqual(response.status_code, 400)
Move Django-specific pieces into the django_imports block.
Move Django-specific pieces into the django_imports block.
Python
bsd-3-clause
praekelt/vumi-go,praekelt/vumi-go,praekelt/vumi-go,praekelt/vumi-go
a50aeb81a588f8297f194d793cb8f8cf0e15a411
lambda/list_member.py
lambda/list_member.py
from __future__ import print_function from enum import IntEnum import yaml MemberFlag = IntEnum('MemberFlag', [ 'digest', 'digest2', 'modPost', 'preapprove', 'noPost', 'diagnostic', 'moderator', 'myopic', 'superadmin', 'admin', 'protected', 'ccErrors', 'reports', 'vacation', 'ackPost', 'echoPost', 'hidden', ]) def member_flag_representer(dumper, data): return dumper.represent_scalar(u'!flag', data.name) yaml.add_representer(MemberFlag, member_flag_representer) def member_flag_constructor(loader, node): value = loader.construct_scalar(node) return MemberFlag[value] yaml.SafeLoader.add_constructor(u'!flag', member_flag_constructor) class ListMember(yaml.YAMLObject): yaml_tag = u'!Member' yaml_loader = yaml.SafeLoader def __init__(self, address, *args, **kwargs): self.address = address self.flags = set(a for a in args if isinstance(a, MemberFlag)) def __repr__(self): return u'{}({}, flags: {})'.format( self.__class__.__name__, self.address, ', '.join( map(lambda f: f.name, self.flags) ), )
from __future__ import print_function from enum import IntEnum import yaml MemberFlag = IntEnum('MemberFlag', [ 'digest', 'digest2', 'modPost', 'preapprove', 'noPost', 'diagnostic', 'moderator', 'myopic', 'superadmin', 'admin', 'protected', 'ccErrors', 'reports', 'vacation', 'ackPost', 'echoPost', 'hidden', ]) def member_flag_representer(dumper, data): return dumper.represent_scalar(u'!flag', data.name) yaml.add_representer(MemberFlag, member_flag_representer) def member_flag_constructor(loader, node): value = loader.construct_scalar(node) return MemberFlag[value] yaml.SafeLoader.add_constructor(u'!flag', member_flag_constructor) class ListMember(yaml.YAMLObject): yaml_tag = u'!Member' yaml_loader = yaml.SafeLoader def __init__(self, address, *args, **kwargs): if isinstance(address, unicode): # Attempt to down-convert unicode-string addresses to plain strings try: address = str(address) except UnicodeEncodeError: pass self.address = address self.flags = set(a for a in args if isinstance(a, MemberFlag)) def __repr__(self): return u'{}({}, flags: {})'.format( self.__class__.__name__, self.address, ', '.join( map(lambda f: f.name, self.flags) ), )
Convert list member addresses to non-unicode strings when possible.
Convert list member addresses to non-unicode strings when possible.
Python
mit
ilg/LambdaMLM
bd59db76bb81218d04224e44773eae9d3d9dfc21
rplugin/python3/denite/source/toc.py
rplugin/python3/denite/source/toc.py
# -*- coding: utf-8 -*- from .base import Base class Source(Base): def __init__(self, vim): super().__init__(vim) self.name = 'vimtex_toc' self.kind = 'file' @staticmethod def format_number(n): if not n or n['frontmatter'] or n['backmatter']: return '' num = [str(n[k]) for k in [ 'part', 'chapter', 'section', 'subsection', 'subsubsection', 'subsubsubsection'] if n[k] is not 0] if n['appendix']: num[0] = chr(int(num[0]) + 64) fnum = '.'.join(num) return fnum @staticmethod def create_candidate(e, depth): indent = (' ' * 2*(depth - e['level']) + e['title'])[:60] number = Source.format_number(e['number']) abbr = '{:65}{:10}'.format(indent, number) return {'word': e['title'], 'abbr': abbr, 'action__path': e['file'], 'action__line': e.get('line', 0)} def gather_candidates(self, context): entries = self.vim.eval('vimtex#toc#get_entries()') depth = max([e['level'] for e in entries]) return [Source.create_candidate(e, depth) for e in entries]
# -*- coding: utf-8 -*- from .base import Base class Source(Base): def __init__(self, vim): super().__init__(vim) self.name = 'vimtex_toc' self.kind = 'file' @staticmethod def format_number(n): if not n or not type(n) is dict or n['frontmatter'] or n['backmatter']: return '' num = [str(n[k]) for k in [ 'part', 'chapter', 'section', 'subsection', 'subsubsection', 'subsubsubsection'] if n[k] is not 0] if n['appendix']: num[0] = chr(int(num[0]) + 64) fnum = '.'.join(num) return fnum @staticmethod def create_candidate(e, depth): indent = (' ' * 2*(int(depth) - int(e['level'])) + e['title'])[:60] number = Source.format_number(e['number']) abbr = '{:65}{:10}'.format(indent, number) return {'word': e['title'], 'abbr': abbr, 'action__path': e['file'], 'action__line': e.get('line', 0)} def gather_candidates(self, context): entries = self.vim.eval('vimtex#toc#get_entries()') depth = max([int(e['level']) for e in entries]) return [Source.create_candidate(e, depth) for e in entries]
Fix Denite support for vim8.
Fix Denite support for vim8.
Python
mit
lervag/vimtex,Aster89/vimtex,Aster89/vimtex,kmarius/vimtex,lervag/vimtex,kmarius/vimtex
f4406d21546922363cd67f53d5697bc324306f2b
orders/views.py
orders/views.py
from django.http import HttpResponse from django.shortcuts import render from django.utils import timezone from orders.models import Order def order_details(request, order_pk): return HttpResponse("Hello, world!") def not_executed(request): orders = Order.objects.filter(valid_until__gt=timezone.now()) return render(request, 'orders/not_executed.html', {'orders': orders}) def outdated(request): orders = Order.objects.filter(valid_until__lte=timezone.now()) return render(request, 'orders/outdated.html', {'orders': orders}) def executed(request): return render(request, 'orders/executed.html')
from django.db.models import Sum from django.db.models.query import QuerySet from django.http import HttpResponse from django.shortcuts import render from django.utils import timezone from orders.models import Order def order_details(request, order_pk): return HttpResponse("Hello, world!") def not_executed(request): orders = get_orders().filter(valid_until__gt=timezone.now(), sold_count=0) return render(request, 'orders/not_executed.html', {'orders': orders}) def outdated(request): orders = get_orders().filter(valid_until__lte=timezone.now(), sold_count=0) return render(request, 'orders/outdated.html', {'orders': orders}) def executed(request): orders = get_orders().exclude(sold_count=0) return render(request, 'orders/executed.html', {'orders': orders}) def get_orders() -> QuerySet: """ The function returns QuerySet of Order model with all necessary values for displaying also selected/prefetched. :return: the QuerySet of Order model """ return Order.objects.select_related('user').prefetch_related('books').annotate(sold_count=Sum('books__sold'))
Implement actual filtering (not) executed Orders
Implement actual filtering (not) executed Orders
Python
agpl-3.0
m4tx/egielda,m4tx/egielda,m4tx/egielda
a4d2782ad902bde5229def1b3de35107a3918800
opps/article/views.py
opps/article/views.py
#!/usr/bin/env python # -*- coding: utf-8 -*- from django.views.generic.detail import DetailView from django.views.generic.list import ListView from opps.article.models import Post class OppsList(ListView): context_object_name = "context" @property def template_name(self): return 'channel/{0}.html'.format(self.kwargs['channel__long_slug']) @property def queryset(self): return Post.objects.filter( channel__long_slug=self.kwargs['channel__long_slug']).all() class OppsDetail(DetailView): context_object_name = "context" @property def template_name(self): return 'article/{0}/{1}.html'.format( self.kwargs['channel__long_slug'], self.kwargs['slug']) @property def queryset(self): return Post.objects.filter( channel__long_slug=self.kwargs['channel__long_slug'], slug=self.kwargs['slug']).all()
#!/usr/bin/env python # -*- coding: utf-8 -*- from django.views.generic.detail import DetailView from django.views.generic.list import ListView from opps.article.models import Post class OppsList(ListView): context_object_name = "context" @property def template_name(self): return 'channel/{0}.html'.format(self.kwargs['channel__long_slug']) @property def queryset(self): if not self.kwargs.get('channel__long_slug'): return Post.objects.filter(channel__homepage=True).all() return Post.objects.filter( channel__long_slug=self.kwargs['channel__long_slug']).all() class OppsDetail(DetailView): context_object_name = "context" @property def template_name(self): return 'article/{0}/{1}.html'.format( self.kwargs['channel__long_slug'], self.kwargs['slug']) @property def queryset(self): return Post.objects.filter( channel__long_slug=self.kwargs['channel__long_slug'], slug=self.kwargs['slug']).all()
Fix queryset on entry home page (/) on list page
Fix queryset on entry home page (/) on list page
Python
mit
YACOWS/opps,williamroot/opps,jeanmask/opps,williamroot/opps,opps/opps,opps/opps,YACOWS/opps,YACOWS/opps,YACOWS/opps,williamroot/opps,jeanmask/opps,williamroot/opps,jeanmask/opps,jeanmask/opps,opps/opps,opps/opps
888f6b07174943ba7f3b9d187348ceeebecc4a42
utils/00-cinspect.py
utils/00-cinspect.py
""" A startup script for IPython to patch it to 'inspect' using cinspect. """ # Place this file in ~/.ipython/<PROFILE_DIR>/startup to patch your IPython to # use cinspect for the code inspection. import inspect from cinspect import getsource, getfile import IPython.core.oinspect as OI from IPython.utils.py3compat import cast_unicode old_find_file = OI.find_file old_getsource = inspect.getsource inspect.getsource = getsource def patch_find_file(obj): fname = old_find_file(obj) if fname is None: try: fname = cast_unicode(getfile(obj)) except: pass return fname OI.find_file = patch_find_file ipy = get_ipython() old_format = ipy.inspector.format def c_format(raw, *args, **kwargs): return raw def my_format(raw, out = None, scheme = ''): try: output = old_format(raw, out, scheme) except: output = raw return output ipy.inspector.format = my_format
""" A startup script for IPython to patch it to 'inspect' using cinspect. """ # Place this file in ~/.ipython/<PROFILE_DIR>/startup to patch your IPython to # use cinspect for the code inspection. from cinspect import getsource, getfile import IPython.core.oinspect as OI from IPython.utils.py3compat import cast_unicode old_find_file = OI.find_file old_getsource = OI.getsource def patch_find_file(obj): fname = old_find_file(obj) if fname is None: try: fname = cast_unicode(getfile(obj)) except: pass return fname def patch_getsource(obj, is_binary=False): if is_binary: return cast_unicode(getsource(obj)) else: return old_getsource(obj, is_binary) OI.find_file = patch_find_file OI.getsource = patch_getsource
Update the IPython startup script for master.
Update the IPython startup script for master.
Python
bsd-3-clause
punchagan/cinspect,punchagan/cinspect
dc461956408ffa35e2391fccf4231d60144985f7
yunity/groups/api.py
yunity/groups/api.py
from rest_framework import filters from rest_framework import status, viewsets from rest_framework.decorators import detail_route from rest_framework.permissions import IsAuthenticated, IsAuthenticatedOrReadOnly from rest_framework.response import Response from yunity.groups.serializers import GroupSerializer from yunity.groups.models import Group as GroupModel class GroupViewSet(viewsets.ModelViewSet): queryset = GroupModel.objects.all() serializer_class = GroupSerializer filter_fields = ('members',) filter_backends = (filters.SearchFilter,) search_fields = ('name', 'description') permission_classes = (IsAuthenticatedOrReadOnly,) @detail_route(methods=['POST', 'GET'], permission_classes=(IsAuthenticated,)) def join(self, request, pk=None): group = self.get_object() group.members.add(request.user) return Response(status=status.HTTP_200_OK) @detail_route(methods=['POST', 'GET'], permission_classes=(IsAuthenticated,)) def leave(self, request, pk=None): group = self.get_object() if not group.members.filter(id=request.user.id).exists(): return Response("User not member of group", status=status.HTTP_400_BAD_REQUEST) group.members.remove(request.user) return Response(status=status.HTTP_200_OK)
from rest_framework import filters from rest_framework import status, viewsets from rest_framework.decorators import detail_route from rest_framework.permissions import IsAuthenticated, IsAuthenticatedOrReadOnly, BasePermission from rest_framework.response import Response from yunity.groups.serializers import GroupSerializer from yunity.groups.models import Group as GroupModel class IsMember(BasePermission): message = 'You are not a member.' def has_object_permission(self, request, view, obj): return request.user in obj.members.all() class GroupViewSet(viewsets.ModelViewSet): queryset = GroupModel.objects.all() serializer_class = GroupSerializer filter_fields = ('members',) filter_backends = (filters.SearchFilter,) search_fields = ('name', 'description') def get_permissions(self): if self.action in ('update', 'partial_update', 'destroy'): self.permission_classes = (IsMember,) else: self.permission_classes = (IsAuthenticatedOrReadOnly,) return super().get_permissions() @detail_route(methods=['POST', 'GET'], permission_classes=(IsAuthenticated,)) def join(self, request, pk=None): group = self.get_object() group.members.add(request.user) return Response(status=status.HTTP_200_OK) @detail_route(methods=['POST', 'GET'], permission_classes=(IsAuthenticated,)) def leave(self, request, pk=None): group = self.get_object() if not group.members.filter(id=request.user.id).exists(): return Response("User not member of group", status=status.HTTP_400_BAD_REQUEST) group.members.remove(request.user) return Response(status=status.HTTP_200_OK)
Fix permissions for groups endpoint
Fix permissions for groups endpoint
Python
agpl-3.0
yunity/yunity-core,yunity/yunity-core,yunity/foodsaving-backend,yunity/foodsaving-backend,yunity/foodsaving-backend
0f7ebec0442da08b12cd88f2558146d5c5a551ad
K2fov/tests/test_plot.py
K2fov/tests/test_plot.py
"""Tests K2fov.plot""" from .. import plot def test_basics(): """Make sure this runs without exception.""" try: import matplotlib plot.create_context_plot(180, 0) plot.create_context_plot_zoomed(180, 0) except ImportError: pass
"""Tests K2fov.plot""" from .. import plot """ def test_basics(): # Make sure this runs without exception. try: import matplotlib plot.create_context_plot(180, 0) plot.create_context_plot_zoomed(180, 0) except ImportError: pass """
Simplify plot test for now
Simplify plot test for now
Python
mit
KeplerGO/K2fov,mrtommyb/K2fov
3427b2583c38ed7ec5239c36faa82536f3f95a3b
automata/pda/stack.py
automata/pda/stack.py
#!/usr/bin/env python3 """Classes and methods for working with PDA stacks.""" class PDAStack(object): """A PDA stack.""" def __init__(self, stack, **kwargs): """Initialize the new PDA stack.""" if isinstance(stack, PDAStack): self._init_from_stack_obj(stack) else: self.stack = list(stack) def _init_from_stack_obj(self, stack_obj): """Initialize this Stack as a deep copy of the given Stack.""" self.__init__(stack_obj.stack) def top(self): """Return the symbol at the top of the stack.""" if self.stack: return self.stack[-1] else: return '' def pop(self): """Pop the stack top from the stack.""" self.stack.pop() def replace(self, symbols): """ Replace the top of the stack with the given symbols. The first symbol in the given sequence becomes the new stack top. """ self.stack.pop() self.stack.extend(reversed(symbols)) def copy(self): """Return a deep copy of the stack.""" return self.__class__(self) def __len__(self): """Return the number of symbols on the stack.""" return len(self.stack) def __iter__(self): """Return an interator for the stack.""" return iter(self.stack) def __repr__(self): """Return a string representation of the stack.""" return '{}({})'.format(self.__class__.__name__, self.stack) def __eq__(self, other): """Check if two stacks are equal.""" return self.__dict__ == other.__dict__
#!/usr/bin/env python3 """Classes and methods for working with PDA stacks.""" class PDAStack(object): """A PDA stack.""" def __init__(self, stack): """Initialize the new PDA stack.""" self.stack = list(stack) def top(self): """Return the symbol at the top of the stack.""" if self.stack: return self.stack[-1] else: return '' def pop(self): """Pop the stack top from the stack.""" self.stack.pop() def replace(self, symbols): """ Replace the top of the stack with the given symbols. The first symbol in the given sequence becomes the new stack top. """ self.stack.pop() self.stack.extend(reversed(symbols)) def copy(self): """Return a deep copy of the stack.""" return self.__class__(**self.__dict__) def __len__(self): """Return the number of symbols on the stack.""" return len(self.stack) def __iter__(self): """Return an interator for the stack.""" return iter(self.stack) def __repr__(self): """Return a string representation of the stack.""" return '{}({})'.format(self.__class__.__name__, self.stack) def __eq__(self, other): """Check if two stacks are equal.""" return self.__dict__ == other.__dict__
Remove copy constructor for PDAStack
Remove copy constructor for PDAStack The copy() method is already sufficient.
Python
mit
caleb531/automata
3990e3aa64cff288def07ee36e24026cc15282c0
taiga/projects/issues/serializers.py
taiga/projects/issues/serializers.py
# -*- coding: utf-8 -*- from rest_framework import serializers from taiga.base.serializers import PickleField, NeighborsSerializerMixin from . import models class IssueSerializer(serializers.ModelSerializer): tags = PickleField(required=False) comment = serializers.SerializerMethodField("get_comment") is_closed = serializers.Field(source="is_closed") class Meta: model = models.Issue def get_comment(self, obj): return "" class IssueNeighborsSerializer(NeighborsSerializerMixin, IssueSerializer): def serialize_neighbor(self, neighbor): return NeighborIssueSerializer(neighbor).data class NeighborIssueSerializer(serializers.ModelSerializer): class Meta: model = models.Issue fields = ("id", "ref", "subject") depth = 0
# -*- coding: utf-8 -*- from rest_framework import serializers from taiga.base.serializers import PickleField, NeighborsSerializerMixin from . import models class IssueSerializer(serializers.ModelSerializer): tags = PickleField(required=False) is_closed = serializers.Field(source="is_closed") class Meta: model = models.Issue class IssueNeighborsSerializer(NeighborsSerializerMixin, IssueSerializer): def serialize_neighbor(self, neighbor): return NeighborIssueSerializer(neighbor).data class NeighborIssueSerializer(serializers.ModelSerializer): class Meta: model = models.Issue fields = ("id", "ref", "subject") depth = 0
Remove unnecessary field from IssueSerializer
Remove unnecessary field from IssueSerializer
Python
agpl-3.0
forging2012/taiga-back,EvgeneOskin/taiga-back,xdevelsistemas/taiga-back-community,seanchen/taiga-back,bdang2012/taiga-back-casting,Rademade/taiga-back,crr0004/taiga-back,dayatz/taiga-back,rajiteh/taiga-back,dycodedev/taiga-back,crr0004/taiga-back,obimod/taiga-back,Zaneh-/bearded-tribble-back,seanchen/taiga-back,gauravjns/taiga-back,joshisa/taiga-back,19kestier/taiga-back,jeffdwyatt/taiga-back,taigaio/taiga-back,WALR/taiga-back,joshisa/taiga-back,astronaut1712/taiga-back,taigaio/taiga-back,coopsource/taiga-back,gam-phon/taiga-back,Rademade/taiga-back,obimod/taiga-back,obimod/taiga-back,CMLL/taiga-back,frt-arch/taiga-back,dycodedev/taiga-back,bdang2012/taiga-back-casting,Tigerwhit4/taiga-back,19kestier/taiga-back,EvgeneOskin/taiga-back,EvgeneOskin/taiga-back,astagi/taiga-back,bdang2012/taiga-back-casting,Zaneh-/bearded-tribble-back,dayatz/taiga-back,CoolCloud/taiga-back,astronaut1712/taiga-back,jeffdwyatt/taiga-back,crr0004/taiga-back,WALR/taiga-back,gam-phon/taiga-back,CMLL/taiga-back,seanchen/taiga-back,astagi/taiga-back,gauravjns/taiga-back,gam-phon/taiga-back,WALR/taiga-back,jeffdwyatt/taiga-back,Tigerwhit4/taiga-back,Zaneh-/bearded-tribble-back,seanchen/taiga-back,xdevelsistemas/taiga-back-community,coopsource/taiga-back,astagi/taiga-back,EvgeneOskin/taiga-back,obimod/taiga-back,gam-phon/taiga-back,coopsource/taiga-back,CoolCloud/taiga-back,rajiteh/taiga-back,dycodedev/taiga-back,bdang2012/taiga-back-casting,19kestier/taiga-back,astronaut1712/taiga-back,forging2012/taiga-back,CMLL/taiga-back,frt-arch/taiga-back,astagi/taiga-back,WALR/taiga-back,forging2012/taiga-back,rajiteh/taiga-back,frt-arch/taiga-back,Rademade/taiga-back,xdevelsistemas/taiga-back-community,taigaio/taiga-back,joshisa/taiga-back,gauravjns/taiga-back,Rademade/taiga-back,crr0004/taiga-back,forging2012/taiga-back,joshisa/taiga-back,CMLL/taiga-back,dycodedev/taiga-back,coopsource/taiga-back,CoolCloud/taiga-back,Rademade/taiga-back,astronaut1712/taiga-back,jeffdwyatt/taiga-back,CoolCloud/taiga-back,gauravjns/taiga-back,rajiteh/taiga-back,dayatz/taiga-back,Tigerwhit4/taiga-back,Tigerwhit4/taiga-back
85e853a63d7fed79b931b337bb9e6678077cf8d5
tests/integration/ssh/test_grains.py
tests/integration/ssh/test_grains.py
# -*- coding: utf-8 -*- # Import Python libs from __future__ import absolute_import # Import Salt Testing Libs from tests.support.case import SSHCase from tests.support.unit import skipIf # Import Salt Libs import salt.utils @skipIf(salt.utils.is_windows(), 'salt-ssh not available on Windows') class SSHGrainsTest(SSHCase): ''' testing grains with salt-ssh ''' def test_grains_items(self): ''' test grains.items with salt-ssh ''' ret = self.run_function('grains.items') self.assertEqual(ret['kernel'], 'Linux') self.assertTrue(isinstance(ret, dict))
# -*- coding: utf-8 -*- # Import Python libs from __future__ import absolute_import # Import Salt Testing Libs from tests.support.case import SSHCase from tests.support.unit import skipIf # Import Salt Libs import salt.utils @skipIf(salt.utils.is_windows(), 'salt-ssh not available on Windows') class SSHGrainsTest(SSHCase): ''' testing grains with salt-ssh ''' def test_grains_items(self): ''' test grains.items with salt-ssh ''' ret = self.run_function('grains.items') grain = 'Linux' if salt.utils.platform.is_darwin(): grain = 'Darwin' self.assertEqual(ret['kernel'], grain) self.assertTrue(isinstance(ret, dict))
Add darwin value for ssh grain items tests on MacOSX
Add darwin value for ssh grain items tests on MacOSX
Python
apache-2.0
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
79bbc95abd2c1b41bcbd19d9ce1ffa330bd76b7a
source/views.py
source/views.py
from multiprocessing.pool import ThreadPool from django.shortcuts import render from .forms import SearchForm from source import view_models def index(request): if request.method == 'GET': form = SearchForm(request.GET) if form.is_valid(): title = request.GET.__getitem__('movie_title').__str__() pool = ThreadPool(processes=5) async_rt_rating = pool.apply_async(view_models.get_rt_rating, (title,)) async_bluray_rating = pool.apply_async(view_models.get_bluray_rating, (title,)) async_tech_specs = pool.apply_async(view_models.get_tech_spec, (title,)) async_price = pool.apply_async(view_models.get_price, (title,)) async_artwork = pool.apply_async(view_models.get_artwork, (title,)) rt_rating = async_rt_rating.get() bluray_rating = async_bluray_rating.get() tech_specs = async_tech_specs.get() price = async_price.get() artwork = async_artwork.get() return render(request, 'index.html', {'form': form, 'rt_rating': rt_rating, 'bluray_rating': bluray_rating, 'tech_specs': tech_specs, 'price': price, 'artwork': artwork}) else: form = SearchForm() return render(request, 'index.html', {'form': form})
from multiprocessing.pool import ThreadPool from django.shortcuts import render from .forms import SearchForm from source import view_models def index(request): if request.method == 'GET': form = SearchForm(request.GET) if form.is_valid(): title = request.GET.__getitem__('movie_title').__str__() pool = ThreadPool(processes=5) async_rt_rating = pool.apply_async(view_models.get_rt_rating, (title,)) async_bluray_rating = pool.apply_async(view_models.get_bluray_rating, (title,)) async_tech_specs = pool.apply_async(view_models.get_tech_spec, (title,)) async_price = pool.apply_async(view_models.get_price, (title,)) async_artwork = pool.apply_async(view_models.get_artwork, (title,)) pool.close() rt_rating = async_rt_rating.get() bluray_rating = async_bluray_rating.get() tech_specs = async_tech_specs.get() price = async_price.get() artwork = async_artwork.get() pool.join() return render(request, 'index.html', {'form': form, 'rt_rating': rt_rating, 'bluray_rating': bluray_rating, 'tech_specs': tech_specs, 'price': price, 'artwork': artwork}) else: form = SearchForm() return render(request, 'index.html', {'form': form})
Join threads or else the number of running threads increments by 5 at each request and will never stop until main process is killed
Join threads or else the number of running threads increments by 5 at each request and will never stop until main process is killed
Python
mit
jeremyrea/caterblu,jeremyrea/caterblu,jeremyrea/caterblu,jeremyrea/caterblu
26a6da62dc81720ea13645589719dcbae6dadacc
pynexus/api_client.py
pynexus/api_client.py
import requests class ApiClient: def __init__(self, host, username, password): self.host = host self.username = username self.password = password def get_all_repositories(self): r = requests.get(self.host+'/nexus/service/local/repositories', headers={'Accept': 'application/json'}) return r def get_status(self): r = requests.get(self.host+'/nexus/service/local/status', headers={'Accept': 'application/json'}) return r
import requests class ApiClient: def __init__(self, host, username, password): self.uri = host + '/nexus/service/local/' self.username = username self.password = password def get_all_repositories(self): r = requests.get(self.uri + 'all_repositories', headers={'Accept': 'application/json'}) return r def get_status(self): r = requests.get(self.uri + 'status', headers={'Accept': 'application/json'}) return r
Refactor url attribute to uri
Refactor url attribute to uri It's better to construct the uri in the class constructor, instead of constructing it in every single REST method
Python
apache-2.0
rcarrillocruz/pynexus
68f4d883eb9dd59b3a4560f53657d80cf572104e
pfasst/__init__.py
pfasst/__init__.py
from pfasst import PFASST __all__ = []
try: from pfasst import PFASST except: print 'WARNING: Unable to import PFASST.' __all__ = []
Add warning when unable to import PFASST.
PFASST: Add warning when unable to import PFASST.
Python
bsd-2-clause
memmett/PyPFASST,memmett/PyPFASST
2cb385ab85257562547759c1d192993c258ebdff
wger/utils/tests/test_capitalizer.py
wger/utils/tests/test_capitalizer.py
# This file is part of wger Workout Manager. # # wger Workout Manager is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # wger Workout Manager is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU Affero General Public License from wger.core.tests.base_testcase import WorkoutManagerTestCase from wger.utils.helpers import smart_capitalize class CapitalizerTestCase(WorkoutManagerTestCase): ''' Tests the "intelligent" capitalizer ''' def test_capitalizer(self): ''' Tests different combinations of input strings ''' self.assertEqual(smart_capitalize("some long words"), "Some Long Words") self.assertEqual(smart_capitalize("Here a short one"), "Here a Short One") self.assertEqual(smart_capitalize("meine gym AG"), "Meine Gym AG") self.assertEqual(smart_capitalize("ßpecial case"), "ßpecial Case") self.assertEqual(smart_capitalize("fIRST lettER only"), "FIRST LettER Only")
# -*- coding: utf-8 *-* # This file is part of wger Workout Manager. # # wger Workout Manager is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # wger Workout Manager is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU Affero General Public License from wger.core.tests.base_testcase import WorkoutManagerTestCase from wger.utils.helpers import smart_capitalize class CapitalizerTestCase(WorkoutManagerTestCase): ''' Tests the "intelligent" capitalizer ''' def test_capitalizer(self): ''' Tests different combinations of input strings ''' self.assertEqual(smart_capitalize("some long words"), "Some Long Words") self.assertEqual(smart_capitalize("Here a short one"), "Here a Short One") self.assertEqual(smart_capitalize("meine gym AG"), "Meine Gym AG") self.assertEqual(smart_capitalize("ßpecial case"), "ßpecial Case") self.assertEqual(smart_capitalize("fIRST lettER only"), "FIRST LettER Only")
Add coding for python 2.7 compatibility
Add coding for python 2.7 compatibility
Python
agpl-3.0
wger-project/wger,petervanderdoes/wger,wger-project/wger,kjagoo/wger_stark,rolandgeider/wger,petervanderdoes/wger,rolandgeider/wger,kjagoo/wger_stark,wger-project/wger,wger-project/wger,kjagoo/wger_stark,kjagoo/wger_stark,petervanderdoes/wger,petervanderdoes/wger,rolandgeider/wger,rolandgeider/wger
93926a9986ab4ba7704cd564d0052b6e60ff38cb
casepro/pods/base.py
casepro/pods/base.py
import json from confmodel import fields, Config as ConfmodelConfig from django.apps import AppConfig class PodConfig(ConfmodelConfig): ''' This is the config that all pods should use as the base for their own config. ''' index = fields.ConfigInt( "A unique identifier for the specific instance of this pod." "Automatically determined and set in the pod registry.", required=True) title = fields.ConfigText( "The title to show in the UI for this pod", default=None) class Pod(object): ''' The base class for all pod plugins. ''' def __init__(self, pod_type, config): self.pod_type = pod_type self.config = config @property def config_json(self): return json.dumps(self.config._config_data) def read_data(self, params): '''Should return the data that should be used to create the display for the pod.''' return {} def perform_action(self, params): '''Should perform the action specified by params.''' return {} class PodPlugin(AppConfig): name = 'casepro.pods' label = 'base_pod' pod_class = Pod config_class = PodConfig title = 'Pod' controller = None directive = None
import json from confmodel import fields, Config as ConfmodelConfig from django.apps import AppConfig class PodConfig(ConfmodelConfig): ''' This is the config that all pods should use as the base for their own config. ''' index = fields.ConfigInt( "A unique identifier for the specific instance of this pod." "Automatically determined and set in the pod registry.", required=True) title = fields.ConfigText( "The title to show in the UI for this pod", default=None) class Pod(object): ''' The base class for all pod plugins. ''' def __init__(self, pod_type, config): self.pod_type = pod_type self.config = config @property def config_json(self): return json.dumps(self.config._config_data) def read_data(self, params): '''Should return the data that should be used to create the display for the pod.''' return {} def perform_action(self, params): '''Should perform the action specified by params.''' return {} class PodPlugin(AppConfig): name = 'casepro.pods' pod_class = Pod config_class = PodConfig # django application label, used to determine which pod type to use when # loading pods configured in `settings.PODS` label = 'base_pod' # default title to use when configuring each pod title = 'Pod' # override to use a different angular controller controller = 'PodController' # override to use a different angular directive directive = 'pod' # override with paths to custom scripts that the pod needs scripts = () # override with paths to custom styles that the pod needs styles = ()
Add the class-level vars we need for pod angular components to PodPlugin
Add the class-level vars we need for pod angular components to PodPlugin
Python
bsd-3-clause
rapidpro/casepro,praekelt/casepro,xkmato/casepro,rapidpro/casepro,praekelt/casepro,xkmato/casepro,praekelt/casepro,rapidpro/casepro
aceeac7e9dd2735add937bc7141cfdb29b6201c7
pywatson/watson.py
pywatson/watson.py
from pywatson.answer.answer import Answer from pywatson.question.question import Question import requests class Watson: """The Watson API adapter class""" def __init__(self, url, username, password): self.url = url self.username = username self.password = password def ask_question(self, question_text, question=None): """Ask Watson a question via the Question and Answer API :param question_text: question to ask Watson :type question_text: str :param question: if question_text is not provided, a Question object representing the question to ask Watson :type question: Question :return: Answer """ if question is not None: q = question.to_dict() else: q = Question(question_text).to_dict() r = requests.post(self.url + '/question', json=q) return Answer(r.json())
from pywatson.answer.answer import Answer from pywatson.question.question import Question import requests class Watson(object): """The Watson API adapter class""" def __init__(self, url, username, password): self.url = url self.username = username self.password = password def ask_question(self, question_text, question=None): """Ask Watson a question via the Question and Answer API :param question_text: question to ask Watson :type question_text: str :param question: if question_text is not provided, a Question object representing the question to ask Watson :type question: Question :return: Answer """ if question is not None: q = question.__dict__ else: q = Question(question_text).__dict__ r = requests.post(self.url + '/question', json=q) return Answer(r.json())
Use __dict__ instead of to_dict()
Use __dict__ instead of to_dict()
Python
mit
sherlocke/pywatson
d7c9bcbf25a6b45a462216f426608474aa66ceb0
mysite/missions/models.py
mysite/missions/models.py
from django.db import models class MissionStep(models.Model): pass class MissionStepCompletion(models.Model): person = models.ForeignKey('profile.Person') step = models.ForeignKey('MissionStep') class Meta: unique_together = ('person', 'step')
from django.db import models class Step(models.Model): pass class StepCompletion(models.Model): person = models.ForeignKey('profile.Person') step = models.ForeignKey('Step') class Meta: unique_together = ('person', 'step')
Remove the redundant "Mission" prefix from the mission model names.
Remove the redundant "Mission" prefix from the mission model names.
Python
agpl-3.0
heeraj123/oh-mainline,vipul-sharma20/oh-mainline,sudheesh001/oh-mainline,willingc/oh-mainline,jledbetter/openhatch,jledbetter/openhatch,moijes12/oh-mainline,openhatch/oh-mainline,mzdaniel/oh-mainline,openhatch/oh-mainline,jledbetter/openhatch,waseem18/oh-mainline,waseem18/oh-mainline,SnappleCap/oh-mainline,Changaco/oh-mainline,eeshangarg/oh-mainline,ehashman/oh-mainline,mzdaniel/oh-mainline,mzdaniel/oh-mainline,Changaco/oh-mainline,Changaco/oh-mainline,SnappleCap/oh-mainline,jledbetter/openhatch,onceuponatimeforever/oh-mainline,nirmeshk/oh-mainline,mzdaniel/oh-mainline,ojengwa/oh-mainline,ehashman/oh-mainline,openhatch/oh-mainline,eeshangarg/oh-mainline,nirmeshk/oh-mainline,nirmeshk/oh-mainline,campbe13/openhatch,heeraj123/oh-mainline,waseem18/oh-mainline,sudheesh001/oh-mainline,waseem18/oh-mainline,heeraj123/oh-mainline,vipul-sharma20/oh-mainline,campbe13/openhatch,willingc/oh-mainline,SnappleCap/oh-mainline,campbe13/openhatch,vipul-sharma20/oh-mainline,campbe13/openhatch,moijes12/oh-mainline,eeshangarg/oh-mainline,onceuponatimeforever/oh-mainline,moijes12/oh-mainline,SnappleCap/oh-mainline,willingc/oh-mainline,heeraj123/oh-mainline,mzdaniel/oh-mainline,mzdaniel/oh-mainline,moijes12/oh-mainline,ehashman/oh-mainline,moijes12/oh-mainline,Changaco/oh-mainline,vipul-sharma20/oh-mainline,Changaco/oh-mainline,onceuponatimeforever/oh-mainline,ojengwa/oh-mainline,onceuponatimeforever/oh-mainline,onceuponatimeforever/oh-mainline,sudheesh001/oh-mainline,heeraj123/oh-mainline,willingc/oh-mainline,SnappleCap/oh-mainline,sudheesh001/oh-mainline,jledbetter/openhatch,ehashman/oh-mainline,eeshangarg/oh-mainline,openhatch/oh-mainline,sudheesh001/oh-mainline,waseem18/oh-mainline,mzdaniel/oh-mainline,eeshangarg/oh-mainline,willingc/oh-mainline,nirmeshk/oh-mainline,campbe13/openhatch,vipul-sharma20/oh-mainline,ojengwa/oh-mainline,ojengwa/oh-mainline,ehashman/oh-mainline,ojengwa/oh-mainline,nirmeshk/oh-mainline,openhatch/oh-mainline
a2e3f0590d5bd25993be5291c058c722896aa773
tests/test_utils.py
tests/test_utils.py
import sys import unittest import numpy as np import torch sys.path.append("../metal") from metal.utils import ( rargmax, hard_to_soft, recursive_merge_dicts ) class UtilsTest(unittest.TestCase): def test_rargmax(self): x = np.array([2, 1, 2]) self.assertEqual(sorted(list(set(rargmax(x) for _ in range(10)))), [0, 2]) def test_hard_to_soft(self): x = torch.tensor([1,2,2,1]) target = torch.tensor([ [1, 0], [0, 1], [0, 1], [1, 0], ], dtype=torch.float) self.assertTrue(((hard_to_soft(x, 2) == target).sum() == 8)) def test_recursive_merge_dicts(self): x = { 'foo': {'Foo': {'FOO': 1}}, 'bar': 2, 'baz': 3, } y = { 'FOO': 4, 'bar': 5, } z = { 'foo': 6 } recursive_merge_dicts(x, y, verbose=False) self.assertEqual(x['bar'], 5) self.assertEqual(x['foo']['Foo']['FOO'], 4) with self.assertRaises(ValueError): recursive_merge_dicts(x, z, verbose=False) if __name__ == '__main__': unittest.main()
import sys import unittest import numpy as np import torch sys.path.append("../metal") from metal.utils import ( rargmax, hard_to_soft, recursive_merge_dicts ) class UtilsTest(unittest.TestCase): def test_rargmax(self): x = np.array([2, 1, 2]) np.random.seed(1) self.assertEqual(sorted(list(set(rargmax(x) for _ in range(10)))), [0, 2]) def test_hard_to_soft(self): x = torch.tensor([1,2,2,1]) target = torch.tensor([ [1, 0], [0, 1], [0, 1], [1, 0], ], dtype=torch.float) self.assertTrue(((hard_to_soft(x, 2) == target).sum() == 8)) def test_recursive_merge_dicts(self): x = { 'foo': {'Foo': {'FOO': 1}}, 'bar': 2, 'baz': 3, } y = { 'FOO': 4, 'bar': 5, } z = { 'foo': 6 } recursive_merge_dicts(x, y, verbose=False) self.assertEqual(x['bar'], 5) self.assertEqual(x['foo']['Foo']['FOO'], 4) with self.assertRaises(ValueError): recursive_merge_dicts(x, z, verbose=False) if __name__ == '__main__': unittest.main()
Fix broken utils test with seed
Fix broken utils test with seed
Python
apache-2.0
HazyResearch/metal,HazyResearch/metal
df5e6bdd03ad666afdd9b61745eec95afc08e9cb
tests/test_views.py
tests/test_views.py
""" Tests for the main server file. """ from unittest import TestCase from unittest.mock import patch from app import views class ViewsTestCase(TestCase): """ Our main server testcase. """ def test_ping(self): self.assertEqual(views.ping(None, None), 'pong') @patch('app.views.notify_recipient') @patch('app.views.is_valid_pull_request') def test_valid_pull_request(self, validator, notifier): validator.return_value = True notifier.return_value = True result = views.pull_request({}, None) self.assertEqual(result, 'Recipient Notified') @patch('app.views.is_valid_pull_request') def test_invalid_pull_request(self, validator): validator.return_value = False result = views.pull_request({}, None) self.assertRegex(result, 'ignored')
""" Tests for the main server file. """ from unittest import TestCase from unittest.mock import patch from app import views class ViewsTestCase(TestCase): """ Our main server testcase. """ def test_ping(self): self.assertEqual(views.ping(None, None), 'pong') @patch('app.views.notify_recipient') @patch('app.views.is_valid_pull_request') def test_valid_pull_request(self, validator, notifier): """ Should notify upon a valid pull request. """ validator.return_value = True notifier.return_value = True result = views.pull_request({}, None) self.assertEqual(result, 'Recipient Notified') @patch('app.views.is_valid_pull_request') def test_invalid_pull_request(self, validator): """ Should ignore an invalid pull request. """ validator.return_value = False result = views.pull_request({}, None) self.assertRegex(result, 'ignored')
Fix last code quality issues
Fix last code quality issues
Python
mit
DobaTech/github-review-slack-notifier
23d50e82212eb02a3ba467ae323736e4f03f7293
tof_server/views.py
tof_server/views.py
"""This module provides views for application.""" from tof_server import app, versioning, mysql from flask import jsonify, make_response import string, random @app.route('/') def index(): """Server information""" return jsonify({ 'server-version' : versioning.SERVER_VERSION, 'client-versions' : versioning.CLIENT_VERSIONS }) @app.route('/players', methods=['POST']) def generate_new_id(): """Method for generating new unique player ids""" try: cursor = mysql.connection.cursor() new_pin = '' characters_pool = string.ascii_uppercase + string.digits for _ in range(8): new_pin = new_pin + random.SystemRandom().choice(characters_pool) return jsonify({ 'id' : 'somestubid', 'pin' : new_pin }) except Exception as er_msg: return make_response(jsonify({ 'error' : str(er_msg) }), 500) finally: cursor.close()
"""This module provides views for application.""" from tof_server import app, versioning, mysql from flask import jsonify, make_response import string, random @app.route('/') def index(): """Server information""" return jsonify({ 'server-version' : versioning.SERVER_VERSION, 'client-versions' : versioning.CLIENT_VERSIONS }) @app.route('/players', methods=['POST']) def generate_new_id(): """Method for generating new unique player ids""" try: cursor = mysql.connection.cursor() new_pin = '' characters_pool = string.ascii_uppercase + string.digits for _ in range(8): new_pin = new_pin + random.SystemRandom().choice(characters_pool) insert_sql = "INSERT INTO players (auto_pin) VALUES ('%s')" id_sql = "SELECT LAST_INSERT_ID()" cursor.execute(insert_sql, (new_pin)) cursor.execute(id_sql) insert_data = cursor.fetchone() return jsonify({ 'id' : insert_data[0], 'pin' : new_pin }) except Exception as er_msg: return make_response(jsonify({ 'error' : str(er_msg) }), 500) finally: cursor.close()
Insert new player data into db
Insert new player data into db
Python
mit
P1X-in/Tanks-of-Freedom-Server
39091c3390d121d48097d64526f40d0a09702673
src/zeit/today/tests.py
src/zeit/today/tests.py
import pkg_resources import zeit.cms.testing product_config = """\ <product-config zeit.today> today-xml-url file://{base}/today.xml </product-config> """.format(base=pkg_resources.resource_filename(__name__, '.')) TodayLayer = zeit.cms.testing.ZCMLLayer('ftesting.zcml', product_config=( product_config + zeit.cms.testing.cms_product_config)) def test_suite(): return zeit.cms.testing.FunctionalDocFileSuite( 'README.txt', 'yesterday.txt', layer=TodayLayer )
import pkg_resources import zeit.cms.testing product_config = """\ <product-config zeit.today> today-xml-url file://{base}/today.xml </product-config> """.format(base=pkg_resources.resource_filename(__name__, '.')) CONFIG_LAYER = zeit.cms.testing.ProductConfigLayer(product_config, bases=( zeit.cms.testing.CONFIG_LAYER,)) ZCML_LAYER = zeit.cms.testing.ZCMLLayer(bases=(CONFIG_LAYER,)) ZOPE_LAYER = zeit.cms.testing.ZopeLayer(bases=(ZCML_LAYER,)) def test_suite(): return zeit.cms.testing.FunctionalDocFileSuite( 'README.txt', 'yesterday.txt', layer=ZOPE_LAYER)
Update to new testlayer API
ZON-5241: Update to new testlayer API
Python
bsd-3-clause
ZeitOnline/zeit.today
81f7b2bdd0e916a001b954ce9bac24ebe4600150
roboime/options.py
roboime/options.py
# -*- coding: utf-8 -*- """ General options during execution """ #Position Log filename. Use None to disable. position_log_filename = "math/pos_log.txt" #position_log_filename = None #Position Log with Noise filename. Use None to disable. position_log_noise_filename = "math/pos_log_noise.txt" #position_log_filename = None #Command and Update Log filename. Use None to disable. cmdupd_filename = "math/commands.txt" #cmdupd_filename = None #Gaussian noise addition variances noise_var_x = 3. noise_var_y = 3. noise_var_angle = 0.05
# -*- coding: utf-8 -*- """ General options during execution """ #Position Log filename. Use None to disable. position_log_filename = "math/pos_log.txt" #position_log_filename = None #Command and Update Log filename. Use None to disable. cmdupd_filename = "math/commands.txt" #cmdupd_filename = None #Gaussian noise addition variances noise_var_x = 3.E-5 noise_var_y = 3.E-5 noise_var_angle = 1. # Process error estimate. The lower (higher negative exponent), more the filter # becomes like a Low-Pass Filter (higher confidence in the model prediction). Q = 1e-5 # Measurement error variances (for the R matrix). # The higher (lower negative exponent), more the filter becomes like a # Low-Pass Filter (higher possible measurement error). R_var_x = 3.E-5 R_var_y = 3.E-5 R_var_angle = 3
Add Q (generic) and R (3 values) to get more precise Kalman results
Add Q (generic) and R (3 values) to get more precise Kalman results
Python
agpl-3.0
roboime/pyroboime
d6ce218b0da869f6b4319751c1fe59ef02fba6b6
kremlin/imgutils.py
kremlin/imgutils.py
""" # # #### ##### # # ##### # # # # # # # # # ## ## # # # ## # # ### #### #### # # # # # # # # ##### # # # # # # # # ## # # # # # # ##### # # # # # # # # Kremlin Magical Everything System Glasnost Image Board and Boredom Inhibitor """ import os from PIL import Image def mkthumb(fp, h=128, w=128): """docstring for mkthumb""" size = (h, w) f, ext = os.path.splitext(fp) im = Image.open(fp) im.thumbnail(size, Image.ANTIALIAS) im.save(f + ".thumbnail" + ext)
""" # # #### ##### # # ##### # # # # # # # # # ## ## # # # ## # # ### #### #### # # # # # # # # ##### # # # # # # # # ## # # # # # # ##### # # # # # # # # Kremlin Magical Everything System Glasnost Image Board and Boredom Inhibitor """ import os from PIL import Image def mkthumb(fp, h=128, w=128): """docstring for mkthumb""" size = (h, w) f, ext = os.path.splitext(fp) im = Image.open(fp) im.thumbnail(size, Image.ANTIALIAS) im.save('.thumbnail'.join([f, ext]))
Use better string concatenation in mkthumb()
Use better string concatenation in mkthumb()
Python
bsd-2-clause
glasnost/kremlin,glasnost/kremlin,glasnost/kremlin
aa196b79102959a9fc5e8837c068307791b76d32
lib/matrix_parser.py
lib/matrix_parser.py
#!/usr/bin/python # Import code for parsing a matrix into a sympy object from quantum_simulation import parse_matrix from sympy import latex import json, sys, pipes, urllib # If the file's being run, rather than loaded as a library if __name__ == "__main__": # Load the matrix from json passed as cli argument matrix = parse_matrix(json.loads(sys.argv[1])['matrix']) # Generate latex for the matix, using the pmatrix matrix env. tex = latex(matrix).replace("smallmatrix", "pmatrix").rpartition("\\right]")[0].partition("\\left[")[2] # Print out a JSONified version of the latex for the matrix # in a URL encoded version print pipes.quote(json.dumps({ 'matrix': urllib.quote(tex) }))
#!/usr/bin/python # Import code for parsing a matrix into a sympy object from quantum_simulation import parse_matrix from sympy import latex import json, sys, pipes, urllib, re # If the file's being run, rather than loaded as a library if __name__ == "__main__": # Load the matrix from json passed as cli argument matrix = parse_matrix(json.loads(sys.argv[1])['matrix']) # Generate latex for the matix, using the pmatrix matrix env. tex = latex(matrix).replace("smallmatrix", "pmatrix") tex = re.sub(r'\\right[\]\)]$', '', tex) tex = re.sub(r'^\\left[\[\(]', '', tex) # Print out a JSONified version of the latex for the matrix # in a URL encoded version print pipes.quote(json.dumps({ 'matrix': urllib.quote(tex) }))
Fix with latexising the matrix of an operator
Fix with latexising the matrix of an operator
Python
mit
hrickards/shors_circuits,hrickards/shors_circuits,hrickards/shors_circuits
09f65ff2a21cd00355193bcdee22a2289ead2d24
tests/test_arguments.py
tests/test_arguments.py
from __future__ import print_function import unittest import wrapt class TestArguments(unittest.TestCase): def test_getcallargs(self): def function(a, b=2, c=3, d=4, e=5, *args, **kwargs): pass expected = {'a': 10, 'c': 3, 'b': 20, 'e': 5, 'd': 40, 'args': (), 'kwargs': {'f': 50}} calculated = wrapt.getcallargs(function, 10, 20, d=40, f=50) self.assertEqual(expected, calculated) expected = {'a': 10, 'c': 30, 'b': 20, 'e': 50, 'd': 40, 'args': (60,), 'kwargs': {}} calculated = wrapt.getcallargs(function, 10, 20, 30, 40, 50, 60) self.assertEqual(expected, calculated)
from __future__ import print_function import unittest import wrapt class TestArguments(unittest.TestCase): def test_getcallargs(self): def function(a, b=2, c=3, d=4, e=5, *args, **kwargs): pass expected = {'a': 10, 'c': 3, 'b': 20, 'e': 5, 'd': 40, 'args': (), 'kwargs': {'f': 50}} calculated = wrapt.getcallargs(function, 10, 20, d=40, f=50) self.assertEqual(expected, calculated) expected = {'a': 10, 'c': 30, 'b': 20, 'e': 50, 'd': 40, 'args': (60,), 'kwargs': {}} calculated = wrapt.getcallargs(function, 10, 20, 30, 40, 50, 60) self.assertEqual(expected, calculated) def test_unexpected_unicode_keyword(self): def function(a=2): pass kwargs = { u'b': 40 } self.assertRaises(TypeError, wrapt.getcallargs, function, **kwargs)
Add test for unexpected unicode kwargs.
Add test for unexpected unicode kwargs.
Python
bsd-2-clause
GrahamDumpleton/wrapt,GrahamDumpleton/wrapt
397eb3ee376acec005a8d7b5a4c2b2e0193a938d
tests/test_bookmarks.py
tests/test_bookmarks.py
import bookmarks import unittest class FlaskrTestCase(unittest.TestCase): def setUp(self): self.app = bookmarks.app.test_client() # with bookmarks.app.app_context(): bookmarks.database.init_db() def tearDown(self): # with bookmarks.app.app_context(): bookmarks.database.db_session.remove() bookmarks.database.Base.metadata.drop_all( bind=bookmarks.database.engine) def test_empty_db(self): rv = self.app.get('/') assert b'There aren\'t any bookmarks yet.' in rv.data def register(self, username, name, email, password): return self.app.post('/register_user/', data=dict( username=username, name=name, email=email, password=password, confirm=password ), follow_redirects=True) def login(self, username, password): return self.app.post('/login', data=dict( username=username, password=password, confirm=password ), follow_redirects=True) def logout(self): return self.app.get('/logout', follow_redirects=True) def test_register(self): username = 'byanofsky' name = 'Brandon Yanofsky' email = 'byanofsky@me.com' password = 'Brandon123' rv = self.register(username, name, email, password) # print(rv.data) assert (b'Successfully registered ' in rv.data) if __name__ == '__main__': unittest.main()
import bookmarks import unittest class FlaskrTestCase(unittest.TestCase): def setUp(self): self.app = bookmarks.app.test_client() # with bookmarks.app.app_context(): bookmarks.database.init_db() def tearDown(self): # with bookmarks.app.app_context(): bookmarks.database.db_session.remove() bookmarks.database.Base.metadata.drop_all( bind=bookmarks.database.engine) def test_empty_db(self): rv = self.app.get('/') assert b'There aren\'t any bookmarks yet.' in rv.data def register(self, username, name, email, password, confirm=None): return self.app.post('/register_user/', data=dict( username=username, name=name, email=email, password=password, confirm=confirm ), follow_redirects=True) def login(self, username, password): return self.app.post('/login', data=dict( username=username, password=password, confirm=password ), follow_redirects=True) def logout(self): return self.app.get('/logout', follow_redirects=True) def test_register(self): username = 'byanofsky' name = 'Brandon Yanofsky' email = 'byanofsky@me.com' password = 'Brandon123' rv = self.register(username, name, email, password) # print(rv.data) assert (b'Successfully registered ' in rv.data) if __name__ == '__main__': unittest.main()
Add param for confirm field on register test func
Add param for confirm field on register test func
Python
apache-2.0
byanofsky/bookmarks,byanofsky/bookmarks,byanofsky/bookmarks
95fbbe9bac94e171424cb8ee23a675a70607fb62
tests/test_constants.py
tests/test_constants.py
from __future__ import absolute_import, unicode_literals import unittest from draftjs_exporter.constants import Enum, BLOCK_TYPES, ENTITY_TYPES, INLINE_STYLES class EnumConstants(unittest.TestCase): def test_enum_returns_the_key_if_valid(self): foo_value = 'foo' e = Enum(foo_value) self.assertEqual(e.foo, foo_value) def test_enum_raises_an_error_for_invalid_keys(self): e = Enum('foo', 'bar') with self.assertRaises(AttributeError): e.invalid_key class TestConstants(unittest.TestCase): def test_block_types(self): self.assertIsInstance(BLOCK_TYPES, object) self.assertEqual(BLOCK_TYPES.UNSTYLED, 'unstyled') def test_entity_types(self): self.assertIsInstance(ENTITY_TYPES, object) self.assertEqual(ENTITY_TYPES.LINK, 'LINK') def test_inline_styles(self): self.assertIsInstance(INLINE_STYLES, object) self.assertEqual(INLINE_STYLES.BOLD, 'BOLD')
from __future__ import absolute_import, unicode_literals import unittest from draftjs_exporter.constants import BLOCK_TYPES, ENTITY_TYPES, INLINE_STYLES, Enum class EnumConstants(unittest.TestCase): def test_enum_returns_the_key_if_valid(self): foo_value = 'foo' e = Enum(foo_value) self.assertEqual(e.foo, foo_value) def test_enum_raises_an_error_for_invalid_keys(self): e = Enum('foo', 'bar') with self.assertRaises(AttributeError): e.invalid_key class TestConstants(unittest.TestCase): def test_block_types(self): self.assertIsInstance(BLOCK_TYPES, object) self.assertEqual(BLOCK_TYPES.UNSTYLED, 'unstyled') def test_entity_types(self): self.assertIsInstance(ENTITY_TYPES, object) self.assertEqual(ENTITY_TYPES.LINK, 'LINK') def test_inline_styles(self): self.assertIsInstance(INLINE_STYLES, object) self.assertEqual(INLINE_STYLES.BOLD, 'BOLD')
Fix import order picked up by isort
Fix import order picked up by isort
Python
mit
springload/draftjs_exporter,springload/draftjs_exporter,springload/draftjs_exporter
9519b619c9a2c30ea2a5bf5559675c1d926ec5a4
clouder_template_bind/__openerp__.py
clouder_template_bind/__openerp__.py
# -*- coding: utf-8 -*- ############################################################################## # # Author: Yannick Buron # Copyright 2013 Yannick Buron # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { 'name': 'Clouder Template Bind', 'version': '1.0', 'category': 'Community', 'depends': ['clouder'], 'author': 'Yannick Buron', 'license': 'AGPL-3', 'website': 'https://github.com/YannickB', 'description': """ Clouder Template Bind """, 'demo': [], 'data': ['clouder_template_bind_data.xml'], 'installable': True, 'application': True, }
# -*- coding: utf-8 -*- ############################################################################## # # Author: Yannick Buron # Copyright 2013 Yannick Buron # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { 'name': 'Clouder Template Bind', 'version': '1.0', 'category': 'Community', 'depends': ['clouder','clouder_template_shinken'], 'author': 'Yannick Buron', 'license': 'AGPL-3', 'website': 'https://github.com/YannickB', 'description': """ Clouder Template Bind """, 'demo': [], 'data': ['clouder_template_bind_data.xml'], 'installable': True, 'application': True, }
Add shinken in bind dependancy
Add shinken in bind dependancy
Python
agpl-3.0
YannickB/odoo-hosting
aa6a74abc382bb6be86fa4a91132a9be51f365a5
tests/test_data_checksums.py
tests/test_data_checksums.py
""" test data_checksums""" from nose.tools import assert_equal def test_data_checksums(): from pyne.data import data_checksums assert_equal(len(data_checksums), 6) assert_equal(data_checksums['/neutron/simple_xs'], '3d6e086977783dcdf07e5c6b0c2416be')
""" test data_checksums and hashing functions""" import os from nose.tools import assert_equal, assert_true import pyne # These tests require nuc_data if not os.path.isfile(pyne.nuc_data): raise RuntimeError("Tests require nuc_data.h5. Please run nuc_data_make.") def test_data_checksums(): from pyne.data import data_checksums assert_equal(len(data_checksums), 6) assert_equal(data_checksums['/neutron/simple_xs'], '3d6e086977783dcdf07e5c6b0c2416be') def test_internal_hashes(): from pyne.dbgen import hashtools hashtools.set_internal_hashes(pyne.nuc_data) for item, val in hashtools.check_internal_hashes(pyne.nuc_data): assert_true(val)
Add test of internal hashes and guarded pyne.nuc_data use
Add test of internal hashes and guarded pyne.nuc_data use
Python
bsd-3-clause
pyne/simplesim
698732f1276f92a94143b0531906caf37e885c28
trello_notifications.py
trello_notifications.py
try: from trello import TrelloCommand from output import Output except ImportError: from .trello import TrelloCommand from .output import Output class TrelloNotificationsCommand(TrelloCommand): def work(self, connection): self.options = [ { 'name': "Unread", 'action': self.show_unread }, { 'name': "Read all", 'action': self.read_all }, { 'name': "Exit", 'action': self.noop } ] self.show_quick_panel(self.items(), self.callback) def items(self): return [option['name'] for option in self.options] def callback(self, index): option = self.options[index] if not option is None: option['action']() def show_unread(self): self.view.run_command("trello_unread_notifications") def read_all(): pass def noop(): pass class TrelloUnreadNotificationsCommand(TrelloCommand): def work(self, connection): member = connection.me output = Output.notifications(member.unread_notifications()) self.show_output_panel(output)
try: from trello import TrelloCommand from output import Output except ImportError: from .trello import TrelloCommand from .output import Output class TrelloNotificationsCommand(TrelloCommand): def work(self, connection): self.options = [ { 'name': "Unread", 'action': self.show_unread }, { 'name': "Read all", 'action': self.read_all }, { 'name': "Exit", 'action': self.noop } ] self.show_quick_panel(self.items(), self.callback) self.connection = connection def items(self): return [option['name'] for option in self.options] def callback(self, index): option = self.options[index] if not option is None: option['action']() def show_unread(self): self.view.run_command("trello_unread_notifications") def read_all(self): pass def noop(self): pass class TrelloUnreadNotificationsCommand(TrelloCommand): def work(self, connection): member = connection.me output = Output.notifications(member.unread_notifications()) self.show_output_panel(output)
Store connection and missing self
Store connection and missing self
Python
mit
NicoSantangelo/sublime-text-trello
66c1b353a7fce078fc9c4209e453906b098a22e8
tests/common.py
tests/common.py
from pprint import pprint, pformat import datetime import os import itertools from sgmock import Fixture from sgmock import TestCase if 'USE_SHOTGUN' in os.environ: from shotgun_api3 import ShotgunError, Fault import shotgun_api3_registry def Shotgun(): return shotgun_api3_registry.connect('sgsession.tests', server='testing') else: from sgmock import Shotgun, ShotgunError, Fault from sgsession import Session, Entity from sgfs import SGFS def mini_uuid(): return os.urandom(4).encode('hex') def timestamp(): return datetime.datetime.now().strftime('%Y%m%d%H%M%S') def minimal(entity): return dict(type=entity['type'], id=entity['id'])
from pprint import pprint, pformat import datetime import itertools import os from sgmock import Fixture from sgmock import TestCase _shotgun_server = os.environ.get('SHOTGUN', 'mock') if _shotgun_server == 'mock': from sgmock import Shotgun, ShotgunError, Fault else: from shotgun_api3 import ShotgunError, Fault import shotgun_api3_registry def Shotgun(): return shotgun_api3_registry.connect('sgsession.tests', server=_shotgun_server) from sgsession import Session, Entity from sgfs import SGFS def mini_uuid(): return os.urandom(4).encode('hex') def timestamp(): return datetime.datetime.now().strftime('%Y%m%d%H%M%S') def minimal(entity): return dict(type=entity['type'], id=entity['id'])
Change the way we test the real Shotgun server
Change the way we test the real Shotgun server
Python
bsd-3-clause
westernx/sgfs,westernx/sgfs
9796e60975474006940af723a6cb8b16bc632ae0
tz_app/context_processors.py
tz_app/context_processors.py
from django.conf import settings from django.utils import timezone try: import pytz except ImportError: pytz = None def timezones(request): alt_timezone = request.session.get('alt_timezone', pytz.utc) return { 'pytz': pytz, 'default_timezone_name': settings.TIME_ZONE, 'timezones': pytz.common_timezones if pytz else [], 'alt_timezone': alt_timezone if pytz else timezone.utc, 'alt_timezone_name': alt_timezone.zone if pytz else 'UTC', }
from django.conf import settings from django.utils import timezone try: import pytz except ImportError: pytz = None def timezones(request): alt_timezone = request.session.get('alt_timezone', (pytz or timezone).utc) return { 'pytz': pytz, 'default_timezone_name': settings.TIME_ZONE, 'timezones': pytz.common_timezones if pytz else [], 'alt_timezone': alt_timezone if pytz else timezone.utc, 'alt_timezone_name': alt_timezone.zone if pytz else 'UTC', }
Fix a bug when pytz isn't installed.
Fix a bug when pytz isn't installed.
Python
bsd-3-clause
aaugustin/django-tz-demo
1ce7f82fd76bca735c3e164cb6a67c9a8656af3b
trade_client.py
trade_client.py
import json import socket from orderbook import create_confirm def send_msg(ip, port, message): '''Sends a raw string to the given ip and port. Closes the socket and returns the response.''' sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.connect((ip, port)) try: sock.sendall(message) response = sock.recv(1024) finally: sock.close() return response def send_offer(ip, port, offer): '''Sends an offer in JSON form to the given ip and port. offer parameter should be a dictionary.''' message = json.dumps(offer) return send_msg(ip, port, message) def handle_response(response): try: response = json.loads(response) if response and isinstance(response, basestring): return None if response and response['type'] == 'trade': return handle_trade(response) except ValueError: return None def handle_trade(trade): # id is not yet properly implemented so we use this ugly hack for now id = trade['trade-id'].split(';')[0] # Cancel messages are not yet implemented. See issue #7. return create_confirm( id=id, trade_id=trade['trade-id'] )
import json import socket from crypto import retrieve_key from orderbook import create_confirm def send_msg(ip, port, message): '''Sends a raw string to the given ip and port. Closes the socket and returns the response.''' sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.connect((ip, port)) try: sock.sendall(message) response = sock.recv(1024) finally: sock.close() return response def send_offer(ip, port, offer): '''Sends an offer in JSON form to the given ip and port. offer parameter should be a dictionary.''' message = json.dumps(offer) return send_msg(ip, port, message) def handle_response(response): try: response = json.loads(response) if response and isinstance(response, basestring): return None if response and response['type'] == 'trade': return handle_trade(response) except ValueError: return None def handle_trade(trade): # id is not yet properly implemented so we use this ugly hack for now # Cancel messages are not yet implemented. See issue #7. return create_confirm( trade_id=trade['trade-id'] )
Use public key as id.
Use public key as id.
Python
mit
Tribler/decentral-market
c8b86afc53af25c845c8303111a6e7b17d8c26b4
ciscripts/check/psqcppconan/check.py
ciscripts/check/psqcppconan/check.py
# /ciscripts/check/psqcppconan/check.py # # Run tests and static analysis checks on a polysquare conan c++ project. # # See /LICENCE.md for Copyright information """Run tests and static analysis checks on a polysquare conan c++ project.""" import argparse import os def run(cont, util, shell, argv=None): """Run checks on this conan project.""" parser = argparse.ArgumentParser(description="""Run conan checks""") parser.add_argument("--run-test-binaries", nargs="*", type=str, help="""Files relative to the build dir to run""") result, remainder = parser.parse_known_args(argv or list()) conan_check_script = "check/conan/check.py" conan_check = cont.fetch_and_import(conan_check_script) def _during_test(cont, executor, util, build): """Run the specified test binaries with the --tap switch. We then pipe the output into tap-mocha-reporter. """ del build for binary in result.run_test_binaries or list(): executor(cont, util.running_output, os.path.join(os.getcwd(), binary)) util.print_message(binary) kwargs = { "kind": "polysquare conan c++", "during_test": _during_test } return conan_check.run(cont, util, shell, argv=remainder, override_kwargs=kwargs)
# /ciscripts/check/psqcppconan/check.py # # Run tests and static analysis checks on a polysquare conan c++ project. # # See /LICENCE.md for Copyright information """Run tests and static analysis checks on a polysquare conan c++ project.""" import argparse import os def run(cont, util, shell, argv=None): """Run checks on this conan project.""" parser = argparse.ArgumentParser(description="""Run conan checks""") parser.add_argument("--run-test-binaries", nargs="*", type=str, help="""Files relative to the build dir to run""") result, remainder = parser.parse_known_args(argv or list()) conan_check_script = "check/conan/check.py" conan_check = cont.fetch_and_import(conan_check_script) def _during_test(cont, executor, util, build): """Run the specified test binaries with the --tap switch. We then pipe the output into tap-mocha-reporter. """ del build for binary in result.run_test_binaries or list(): if not os.path.exists(binary) and os.path.exists(binary + ".exe"): binary = binary + ".exe" executor(cont, util.running_output, os.path.join(os.getcwd(), binary)) util.print_message(binary) kwargs = { "kind": "polysquare conan c++", "during_test": _during_test } return conan_check.run(cont, util, shell, argv=remainder, override_kwargs=kwargs)
Allow the use of .exe
psqcppconan: Allow the use of .exe
Python
mit
polysquare/polysquare-ci-scripts,polysquare/polysquare-ci-scripts
e3cb7ad226e3c26cbfa6f9f322ebdb4fde7e7d60
coop_cms/apps/coop_bootstrap/templatetags/coop_bs.py
coop_cms/apps/coop_bootstrap/templatetags/coop_bs.py
# -*- coding: utf-8 -*- """ Some tools for templates """ from __future__ import unicode_literals from django import template from coop_cms.templatetags.coop_utils import is_checkbox as _is_checkbox from coop_cms.templatetags.coop_navigation import NavigationAsNestedUlNode register = template.Library() # Just for compatibility @register.filter(name='is_checkbox') def is_checkbox(field): """returns true if field is a checkbox""" return _is_checkbox(field) @register.tag def navigation_bootstrap(parser, token): """returns the bootstrap-friendly navigation""" return NavigationAsNestedUlNode(li_node="coop_bootstrap/li_node.html")
# -*- coding: utf-8 -*- """ Some tools for templates """ from __future__ import unicode_literals from django import template from coop_cms.templatetags.coop_utils import is_checkbox as _is_checkbox from coop_cms.templatetags.coop_navigation import NavigationAsNestedUlNode, extract_kwargs register = template.Library() # Just for compatibility @register.filter(name='is_checkbox') def is_checkbox(field): """returns true if field is a checkbox""" return _is_checkbox(field) @register.tag def navigation_bootstrap(parser, token): """returns the bootstrap-friendly navigation""" kwargs = dict(li_node="coop_bootstrap/li_node.html") args = token.contents.split() kwargs.update(extract_kwargs(args)) return NavigationAsNestedUlNode(**kwargs)
Fix "navigation_bootstrap" templatetag : arguments were ignored
Fix "navigation_bootstrap" templatetag : arguments were ignored
Python
bsd-3-clause
ljean/coop_cms,ljean/coop_cms,ljean/coop_cms
8a4b576d6df4ef1f174c8698ff9a86dbf2f5bd4a
workshops/models.py
workshops/models.py
from django.db import models from django.db.models.deletion import PROTECT from django_extensions.db.fields import AutoSlugField class Workshop(models.Model): event = models.ForeignKey('events.Event', PROTECT, related_name='workshops') applicant = models.ForeignKey('cfp.Applicant', related_name='workshops') title = models.CharField(max_length=80) slug = AutoSlugField(populate_from="title", unique=True) about = models.TextField() abstract = models.TextField() extra_info = models.TextField(blank=True) skill_level = models.ForeignKey('cfp.AudienceSkillLevel', PROTECT) starts_at = models.DateTimeField() duration_hours = models.DecimalField(max_digits=3, decimal_places=1) tickets_link = models.URLField(blank=True) price = models.PositiveIntegerField(blank=True, null=True) @property def approximate_euro_price(self): return int(self.price / 7.5)
from django.db import models from django.db.models.deletion import PROTECT from django_extensions.db.fields import AutoSlugField class Workshop(models.Model): event = models.ForeignKey('events.Event', PROTECT, related_name='workshops') applicant = models.ForeignKey('cfp.Applicant', related_name='workshops') title = models.CharField(max_length=80) slug = AutoSlugField(populate_from="title", unique=True) about = models.TextField() abstract = models.TextField() extra_info = models.TextField(blank=True) skill_level = models.ForeignKey('cfp.AudienceSkillLevel', PROTECT) starts_at = models.DateTimeField() duration_hours = models.DecimalField(max_digits=3, decimal_places=1) tickets_link = models.URLField(blank=True) price = models.PositiveIntegerField(blank=True, null=True) @property def approximate_euro_price(self): return int(self.price / 7.5) if self.price else None
Check price exists before using it
Check price exists before using it
Python
bsd-3-clause
WebCampZg/conference-web,WebCampZg/conference-web,WebCampZg/conference-web
ea3660bcc1a9f7be619def8e26dd7b0ab4a873cf
estmator_project/est_client/forms.py
estmator_project/est_client/forms.py
from django.forms import ModelForm, Select, TextInput from .models import Client, Company class ClientCreateForm(ModelForm): class Meta: model = Client fields = [ 'company', 'first_name', 'last_name', 'title', 'cell', 'desk', 'email' ] widgets = { 'company': Select(attrs={'required': True}), } class CompanyCreateForm(ModelForm): class Meta: model = Company fields = [ 'company_name', 'phone', 'address', 'address2', 'city', 'state', 'postal', 'st_rate', 'ot_rate' ] widgets = { 'company_name': TextInput(attrs={'required': True}), } class CompanyListForm(ModelForm): class Meta: model = Client fields = ['company']
from django.forms import ModelForm, Select, TextInput from .models import Client, Company class ClientCreateForm(ModelForm): class Meta: model = Client fields = [ 'company', 'first_name', 'last_name', 'title', 'cell', 'desk', 'email' ] widgets = { 'company': Select(attrs={'required': True}), 'first_name': TextInput(attrs={'required': True}), 'last_name': TextInput(attrs={'required': True}), 'title': TextInput(attrs={'required': True}), 'cell': TextInput(attrs={'required': True}), 'email': TextInput(attrs={'required': True}), } class CompanyCreateForm(ModelForm): class Meta: model = Company fields = [ 'company_name', 'phone', 'address', 'address2', 'city', 'state', 'postal', 'st_rate', 'ot_rate' ] widgets = { 'company_name': TextInput(attrs={'required': True}), 'phone': TextInput(attrs={'required': True}), 'address': TextInput(attrs={'required': True}), 'city': TextInput(attrs={'required': True}), 'postal': TextInput(attrs={'required': True}), } class CompanyListForm(ModelForm): class Meta: model = Client fields = ['company']
Make fields required on new client and company
Make fields required on new client and company
Python
mit
Estmator/EstmatorApp,Estmator/EstmatorApp,Estmator/EstmatorApp
b7c52258d39e5c0ee8fba2be87e8e671e0c583c3
xclib/postfix_io.py
xclib/postfix_io.py
# Only supports isuser request for Postfix virtual mailbox maps import sys import re import logging # Message formats described in `../doc/Protocol.md` class postfix_io: @classmethod def read_request(cls, infd, outfd): # "for line in sys.stdin:" would be more concise but adds unwanted buffering while True: line = infd.readline() if not line: break match = re.match('^get ([^\000- @%]+)@([^\000- @%]+)\r?\n$', line) if match: yield ('isuser',) + match.group(1,2) else: logging.error('Illegal request format: ' + line) outfd.write('500 Illegal request format\n') outfd.flush() @classmethod def write_response(cls, flag, outfd): if flag == None: outfd.write('400 Trouble connecting to backend\n') elif flag: outfd.write('200 OK\n') else: outfd.write('500 No such user\n') outfd.flush()
# Only supports isuser request for Postfix virtual mailbox maps import sys import re import logging # Message formats described in `../doc/Protocol.md` class postfix_io: @classmethod def read_request(cls, infd, outfd): # "for line in sys.stdin:" would be more concise but adds unwanted buffering while True: line = infd.readline() if not line: break match = re.match('^get ([^\000- @%]+)@([^\000- @%]+)\r?\n$', line) if match: yield ('isuser',) + match.group(1,2) elif line == 'quit': yield ('quit',) else: logging.error('Illegal request format: ' + line) outfd.write('500 Illegal request format\n') outfd.flush() @classmethod def write_response(cls, flag, outfd): if flag == None: outfd.write('400 Trouble connecting to backend\n') elif flag: outfd.write('200 OK\n') else: outfd.write('500 No such user\n') outfd.flush()
Add quit command to postfix
Add quit command to postfix
Python
mit
jsxc/xmpp-cloud-auth,jsxc/xmpp-cloud-auth,jsxc/xmpp-cloud-auth,jsxc/xmpp-cloud-auth
cc5e75078c707ee2b5622700a0ad2890969193c1
opencademy/model/openacademy_course.py
opencademy/model/openacademy_course.py
from openerp import fields, models ''' This module create model of Course ''' class Course(models.Model): ''' This class create model of Course ''' _name = 'openacademy.course' name = fields.Char(string='Title', required=True) # field reserved to identified name rec description = fields.Text(string='Description', required=False) responsible_id = fields.Many2one('res.users', ondelete='set null', string="Responsible", index=True) session_ids = fields.One2many('openacademy.session', 'course_id', string="Sessions") _sql_constraints = [ ('name_description_check', 'CHECK(name != description)', "The title of the course should not be the description"), ('name_unique', 'UNIQUE(name)', "The course title must be unique"), ]
from openerp import api, fields, models ''' This module create model of Course ''' class Course(models.Model): ''' This class create model of Course ''' _name = 'openacademy.course' name = fields.Char(string='Title', required=True) # field reserved to identified name rec description = fields.Text(string='Description', required=False) responsible_id = fields.Many2one('res.users', ondelete='set null', string="Responsible", index=True) session_ids = fields.One2many('openacademy.session', 'course_id', string="Sessions") _sql_constraints = [ ('name_description_check', 'CHECK(name != description)', "The title of the course should not be the description"), ('name_unique', 'UNIQUE(name)', "The course title must be unique"), ] @api.one # api.one default params: cr, uid, id, context def copy(self, default=None): print "estoy pasando por la funcion heredar de copy en cursos" #default['name'] = self.name + ' (copy) ' copied_count = self.search_count( [('name', '=like', u"Copy of {}%".format(self.name))]) if not copied_count: new_name = u"Copy of {}".format(self.name) else: new_name = u"Copy of {} ({})".format(self.name, copied_count) default['name'] = new_name return super(Course, self).copy(default)
Modify copy method into inherit
[REF] openacademy: Modify copy method into inherit
Python
apache-2.0
LihanHA/opencademy-project
3be9ef4c2ec4c2b10503633c55fd1634f4d5debb
comics/search/indexes.py
comics/search/indexes.py
from django.template.loader import get_template from django.template import Context from haystack import indexes from haystack import site from comics.core.models import Image class ImageIndex(indexes.SearchIndex): document = indexes.CharField(document=True, use_template=True) rendered = indexes.CharField(indexed=False) def prepare_rendered(self, obj): template = get_template('search/results.html') context = Context({'release': obj.get_first_release()}) return template.render(context) site.register(Image, ImageIndex)
from django.template.loader import get_template from django.template import Context from haystack import indexes from haystack import site from comics.core.models import Image class ImageIndex(indexes.SearchIndex): document = indexes.CharField(document=True, use_template=True) rendered = indexes.CharField(indexed=False) def get_updated_field(self): return 'fetched' def prepare_rendered(self, obj): template = get_template('search/results.html') context = Context({'release': obj.get_first_release()}) return template.render(context) site.register(Image, ImageIndex)
Add get_updated_field to search index
Add get_updated_field to search index
Python
agpl-3.0
jodal/comics,jodal/comics,klette/comics,datagutten/comics,jodal/comics,datagutten/comics,jodal/comics,datagutten/comics,klette/comics,klette/comics,datagutten/comics
2b7de99f1de941c66dd282efbdf423e95c104cc9
mysite/missions/management/commands/svn_precommit.py
mysite/missions/management/commands/svn_precommit.py
from django.core.management import BaseCommand, CommandError from mysite.missions import controllers import sys class Command(BaseCommand): args = '<repo_path> <txn_id>' help = 'SVN pre-commit hook for mission repositories' def handle(self, *args, **options): # This management command is called from the mission svn repositories # as the pre-commit hook. It receives the repository path and transaction # ID as arguments, and it receives a description of applicable lock # tokens on stdin. Its environment and current directory are undefined. if len(args) != 2: raise CommandError, 'Exactly two arguments are expected.' repo_path, txn_id = args try: controllers.SvnCommitMission.pre_commit_hook(repo_path, txn_id) except controllers.IncorrectPatch, e: sys.stderr.write(str(e) + '\n\n') raise CommandError, 'The commit failed to validate.'
from django.core.management import BaseCommand, CommandError from mysite.missions import controllers import sys class Command(BaseCommand): args = '<repo_path> <txn_id>' help = 'SVN pre-commit hook for mission repositories' def handle(self, *args, **options): # This management command is called from the mission svn repositories # as the pre-commit hook. It receives the repository path and transaction # ID as arguments, and it receives a description of applicable lock # tokens on stdin. Its environment and current directory are undefined. if len(args) != 2: raise CommandError, 'Exactly two arguments are expected.' repo_path, txn_id = args try: controllers.SvnCommitMission.pre_commit_hook(repo_path, txn_id) except controllers.IncorrectPatch, e: sys.stderr.write('\n ' + str(e) + '\n\n') raise CommandError, 'The commit failed to validate.'
Make the error message stand out more for the user when we reject an svn commit.
Make the error message stand out more for the user when we reject an svn commit.
Python
agpl-3.0
SnappleCap/oh-mainline,heeraj123/oh-mainline,Changaco/oh-mainline,mzdaniel/oh-mainline,sudheesh001/oh-mainline,nirmeshk/oh-mainline,vipul-sharma20/oh-mainline,mzdaniel/oh-mainline,mzdaniel/oh-mainline,Changaco/oh-mainline,Changaco/oh-mainline,eeshangarg/oh-mainline,eeshangarg/oh-mainline,SnappleCap/oh-mainline,moijes12/oh-mainline,waseem18/oh-mainline,moijes12/oh-mainline,ojengwa/oh-mainline,nirmeshk/oh-mainline,vipul-sharma20/oh-mainline,heeraj123/oh-mainline,onceuponatimeforever/oh-mainline,ojengwa/oh-mainline,heeraj123/oh-mainline,mzdaniel/oh-mainline,openhatch/oh-mainline,willingc/oh-mainline,ehashman/oh-mainline,jledbetter/openhatch,heeraj123/oh-mainline,nirmeshk/oh-mainline,waseem18/oh-mainline,waseem18/oh-mainline,Changaco/oh-mainline,ehashman/oh-mainline,willingc/oh-mainline,onceuponatimeforever/oh-mainline,SnappleCap/oh-mainline,moijes12/oh-mainline,campbe13/openhatch,moijes12/oh-mainline,ehashman/oh-mainline,campbe13/openhatch,onceuponatimeforever/oh-mainline,openhatch/oh-mainline,ehashman/oh-mainline,SnappleCap/oh-mainline,vipul-sharma20/oh-mainline,heeraj123/oh-mainline,waseem18/oh-mainline,willingc/oh-mainline,nirmeshk/oh-mainline,ojengwa/oh-mainline,Changaco/oh-mainline,willingc/oh-mainline,mzdaniel/oh-mainline,vipul-sharma20/oh-mainline,waseem18/oh-mainline,campbe13/openhatch,sudheesh001/oh-mainline,sudheesh001/oh-mainline,eeshangarg/oh-mainline,willingc/oh-mainline,jledbetter/openhatch,moijes12/oh-mainline,mzdaniel/oh-mainline,mzdaniel/oh-mainline,campbe13/openhatch,openhatch/oh-mainline,ehashman/oh-mainline,jledbetter/openhatch,jledbetter/openhatch,SnappleCap/oh-mainline,sudheesh001/oh-mainline,onceuponatimeforever/oh-mainline,openhatch/oh-mainline,eeshangarg/oh-mainline,sudheesh001/oh-mainline,onceuponatimeforever/oh-mainline,vipul-sharma20/oh-mainline,eeshangarg/oh-mainline,campbe13/openhatch,nirmeshk/oh-mainline,openhatch/oh-mainline,ojengwa/oh-mainline,ojengwa/oh-mainline,jledbetter/openhatch
b0701205f0b96645d3643bab5188f349cd604603
binaries/streamer_binaries/__init__.py
binaries/streamer_binaries/__init__.py
import os __version__ = '0.5.0' # Module level variables. ffmpeg = '' """The path to the installed FFmpeg binary.""" ffprobe = '' """The path to the installed FFprobe binary.""" packager = '' """The path to the installed Shaka Packager binary.""" # Get the directory path where this __init__.py file resides. _dir_path = os.path.abspath(os.path.dirname(__file__)) # This will be executed at import time. for _file in os.listdir(_dir_path): if _file.startswith('ffmpeg'): ffmpeg = os.path.join(_dir_path, _file) elif _file.startswith('ffprobe'): ffprobe = os.path.join(_dir_path, _file) elif _file.startswith('packager'): packager = os.path.join(_dir_path, _file)
import os import platform __version__ = '0.5.0' # Get the directory path where this __init__.py file resides. _dir_path = os.path.abspath(os.path.dirname(__file__)) # Compute the part of the file name that indicates the OS. _os = { 'Linux': 'linux', 'Windows': 'win', 'Darwin': 'osx', }[platform.system()] # Compute the part of the file name that indicates the CPU architecture. _cpu = { 'x86_64': 'x64', # Linux/Mac report this key 'AMD64': 'x64', # Windows reports this key 'aarch64': 'arm64', }[platform.machine()] # Module level variables. ffmpeg = os.path.join(_dir_path, 'ffmpeg-{}-{}'.format(_os, _cpu)) """The path to the installed FFmpeg binary.""" ffprobe = os.path.join(_dir_path, 'ffprobe-{}-{}'.format(_os, _cpu)) """The path to the installed FFprobe binary.""" packager = os.path.join(_dir_path, 'packager-{}-{}'.format(_os, _cpu)) """The path to the installed Shaka Packager binary."""
Fix usage of local streamer_binaries module
build: Fix usage of local streamer_binaries module The old code would search the directory for the binary to use. This worked fine if the package were installed, but when adding the module path to PYTHONPATH, this technique would fail because the folder would have executables for all architetures. Now we will compute the exact filename we expect for each exectuable, allowing the module to be used locally without installation. This is useful for testing pre-release versions of the module. Change-Id: I35d3a1009b677ef9d29379147312abe3d0a7f8b2
Python
apache-2.0
shaka-project/shaka-streamer,shaka-project/shaka-streamer
d57670995709ae60e9cbed575b1ac9e63cba113a
src/env.py
src/env.py
class Environment: def __init__(self, par=None, bnd=None): if bnd: self.binds = bnd else: self.binds = {} self.parent = par if par: self.level = self.parent.level + 1 else: self.level = 0 def get(self, key): if key in self.binds: return self.binds[key] elif self.parent: return self.parent.get(key) else: return None def set(self, key, value): if key in self.binds: self.binds[key] = value elif self.parent: self.parent.set(key,value) else: self.binds[key] = value def __repr__( self): ret = "\n%s:\n" % self.level keys = self.binds.keys() for key in keys: ret = ret + " %5s: %s\n" % (key, self.binds[key]) return ret
class Environment: def __init__(self, par=None, bnd=None): if bnd: self.binds = bnd else: self.binds = {} self.parent = par if par: self.level = self.parent.level + 1 else: self.level = 0 def get(self, key): if key in self.binds: return self.binds[key] elif self.parent: return self.parent.get(key) else: raise ValueError("Invalid symbol " + key) def set(self, key, value): if key in self.binds: self.binds[key] = value elif self.parent: self.parent.set(key,value) else: self.binds[key] = value def __repr__( self): ret = "\n%s:\n" % self.level keys = self.binds.keys() for key in keys: ret = ret + " %5s: %s\n" % (key, self.binds[key]) return ret
Raise an error when a symbol cannot be found
Raise an error when a symbol cannot be found
Python
mit
readevalprintlove/lithp,fogus/lithp,fogus/lithp,readevalprintlove/lithp,magomsk/lithp,readevalprintlove/lithp,fogus/lithp,magomsk/lithp,magomsk/lithp
d54544ecf6469eedce80d6d3180aa826c1fcc19a
cpgintegrate/__init__.py
cpgintegrate/__init__.py
import pandas import traceback import typing def process_files(file_iterator: typing.Iterator[typing.IO], processor: typing.Callable) -> pandas.DataFrame: def get_frames(): for file in file_iterator: df = processor(file) yield (df .assign(Source=getattr(file, 'name', None), SubjectID=getattr(file, 'cpgintegrate_subject_id', None), FileSubjectID=df.index if df.index.name else None)) return pandas.DataFrame(pandas.concat((frame for frame in get_frames()))).set_index("SubjectID")
import pandas import typing def process_files(file_iterator: typing.Iterator[typing.IO], processor: typing.Callable) -> pandas.DataFrame: def get_frames(): for file in file_iterator: source = getattr(file, 'name', None) subject_id = getattr(file, 'cpgintegrate_subject_id', None) try: df = processor(file) except Exception as e: raise ProcessingException({"Source": source, 'SubjectID': subject_id}) from e yield (df .assign(Source=getattr(file, 'name', None), SubjectID=getattr(file, 'cpgintegrate_subject_id', None), FileSubjectID=df.index if df.index.name else None)) return pandas.DataFrame(pandas.concat((frame for frame in get_frames()))).set_index("SubjectID") class ProcessingException(Exception): """cpgintegrate processing error"""
Add file source and subjectID to processing exceptions
Add file source and subjectID to processing exceptions
Python
agpl-3.0
PointyShinyBurning/cpgintegrate
68ca61ec2206b83cca34a319a472961793771407
setup.py
setup.py
import os from distutils.core import setup def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() def package_data(package): package_data = [] for dirpath, dirnames, filenames in os.walk( os.path.join(os.path.dirname(__file__), package)): for i, dirname in enumerate(dirnames): if dirname.startswith('.'): del dirnames[i] if '__init__.py' in filenames: continue elif filenames: for f in filenames: package_data.append( os.path.join(dirpath[len(package)+len(os.sep):], f)) return {package: package_data} setup( name='django-linkcheck', version='0.1.0', description="A Django app that will analyze and report on links in any " "model that you register with it.", long_description=read('README.rst'), author='Andy Baker', author_email='andy@andybak.net', license='BSD', url='http://github.com/andybak/django-linkcheck/', packages=[ 'linkcheck', 'linkcheck.management', 'linkcheck.management.commands', ], package_data=package_data('linkcheck'), classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Framework :: Django', ], )
import os from distutils.core import setup def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name='django-linkcheck', version='0.1.0', description="A Django app that will analyze and report on links in any " "model that you register with it.", long_description=read('README.rst'), author='Andy Baker', author_email='andy@andybak.net', license='BSD', url='http://github.com/andybak/django-linkcheck/', packages=[ 'linkcheck', 'linkcheck.management', 'linkcheck.management.commands', ], package_data={ 'linkcheck': [ 'templates/linkcheck/*.html', 'templates/linkcheck/*.xhtml', ] }, classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Framework :: Django', ], )
Use static definition for package data.
Use static definition for package data.
Python
bsd-3-clause
Ixxy-Open-Source/django-linkcheck-old,claudep/django-linkcheck,Ixxy-Open-Source/django-linkcheck-old,claudep/django-linkcheck,AASHE/django-linkcheck,yvess/django-linkcheck,DjangoAdminHackers/django-linkcheck,DjangoAdminHackers/django-linkcheck
d1d0576b94ce000a77e08bd8353f5c1c10b0839f
setup.py
setup.py
#!/usr/bin/env python from distutils.core import setup setup( name = 'AudioTranscode', version = '1.0', packages = ['audioTranscode'], scripts = ['transcode'], author = 'Jeffrey Aylesworth', author_email = 'jeffrey@aylesworth.ca', license = 'MIT', url = 'http://github.com/jeffayle/Transcode' )
#!/usr/bin/env python from distutils.core import setup setup( name = 'AudioTranscode', version = '1.0', packages = ['audioTranscode','audioTranscode.encoders','audioTranscode.decoders'], scripts = ['transcode'], author = 'Jeffrey Aylesworth', author_email = 'jeffrey@aylesworth.ca', license = 'MIT', url = 'http://github.com/jeffayle/Transcode' )
Include .encoders and .decoders packages with the distribution
Include .encoders and .decoders packages with the distribution
Python
isc
jeffayle/Transcode
ef7f0090bfb7f37fa584123520b02f69a3a392a0
setup.py
setup.py
# # Copyright 2013 by Arnold Krille <arnold@arnoldarts.de> # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from distutils.core import setup setup( name="workout", version="0.2.0", description="Store and display workout-data from FIT-files in mezzanine.", author="Arnold Krille", author_email="arnold@arnoldarts.de", url="http://github.com/kampfschlaefer/mezzanine-workout", license=open('LICENSE', 'r').read(), packages=['workout'], package_data={'workout': ['templates/workout/*']}, install_requires=['fitparse==0.0.1-dev'], dependency_links=['git+https://github.com/kampfschlaefer/python-fitparse.git@ng#egg=fitparse-0.0.1-dev'], )
# # Copyright 2013 by Arnold Krille <arnold@arnoldarts.de> # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from distutils.core import setup setup( name="workout", version="0.2.1", description="Store and display workout-data from FIT-files in mezzanine.", author="Arnold Krille", author_email="arnold@arnoldarts.de", url="http://github.com/kampfschlaefer/mezzanine-workout", license=open('LICENSE', 'r').read(), packages=['workout'], package_data={'workout': ['templates/workout/*', 'static/*']}, install_requires=['fitparse==0.0.1-dev'], dependency_links=['git+https://github.com/kampfschlaefer/python-fitparse.git@ng#egg=fitparse-0.0.1-dev'], )
Fix inclusion of static files into the package
Fix inclusion of static files into the package and increase the version-number a bit.
Python
apache-2.0
kampfschlaefer/mezzanine-workout,kampfschlaefer/mezzanine-workout,kampfschlaefer/mezzanine-workout
5c8754aefa0a0b2f9e49d95970475a66a6de9510
start.py
start.py
from core.computer import Computer from time import sleep from console import start as start_console # Initialize computer instance computer = Computer() computer.start_monitoring() computer.processor.start_monitoring() for mem in computer.nonvolatile_memory: mem.start_monitoring() computer.virtual_memory.start_monitoring() # Start console interface start_console(computer) # Shutdown computer.processor.stop_monitoring() for mem in computer.nonvolatile_memory: mem.stop_monitoring() computer.virtual_memory.stop_monitoring() sleep(1)
from core.computer import Computer from time import sleep from console import start as start_console # Initialize computer instance computer = Computer() computer.start_monitoring() computer.processor.start_monitoring() for mem in computer.nonvolatile_memory: mem.start_monitoring() computer.virtual_memory.start_monitoring() # Start console interface start_console(computer) # Shutdown computer.processor.stop_monitoring() for mem in computer.nonvolatile_memory: mem.stop_monitoring() computer.virtual_memory.stop_monitoring() computer.stop_monitoring() sleep(1)
Stop monitoring computer on shutdown.
Stop monitoring computer on shutdown.
Python
bsd-3-clause
uzumaxy/pyspectator
746420daec76bf605f0da57902bb60b2cb17c87d
bcbio/bed/__init__.py
bcbio/bed/__init__.py
import pybedtools as bt import six def concat(bed_files, catted=None): """ recursively concat a set of BED files, returning a sorted bedtools object of the result """ bed_files = [x for x in bed_files if x] if len(bed_files) == 0: if catted: # move to a .bed extension for downstream tools if not already sorted_bed = catted.sort() if not sorted_bed.fn.endswith(".bed"): return sorted_bed.moveto(sorted_bed.fn + ".bed") else: return sorted_bed else: return catted if not catted: bed_files = list(bed_files) catted = bt.BedTool(bed_files.pop()) else: catted = catted.cat(bed_files.pop(), postmerge=False, force_truncate=False) return concat(bed_files, catted) def merge(bedfiles): """ given a BED file or list of BED files merge them an return a bedtools object """ if isinstance(bedfiles, list): catted = concat(bedfiles) else: catted = concat([bedfiles]) if catted: return concat(bedfiles).sort().merge() else: return catted def minimize(bed_file): """ strip a BED file down to its three necessary columns: chrom start end """ return bt.BedTool(bed_file).cut(range(3))
import pybedtools as bt import six def concat(bed_files, catted=None): """ recursively concat a set of BED files, returning a sorted bedtools object of the result """ bed_files = [x for x in bed_files if x] if len(bed_files) == 0: if catted: # move to a .bed extension for downstream tools if not already sorted_bed = catted.sort() if not sorted_bed.fn.endswith(".bed"): return sorted_bed.moveto(sorted_bed.fn + ".bed") else: return sorted_bed else: return catted if not catted: bed_files = list(bed_files) catted = bt.BedTool(bed_files.pop()) else: catted = catted.cat(bed_files.pop(), postmerge=False, force_truncate=False) return concat(bed_files, catted) def merge(bedfiles): """ given a BED file or list of BED files merge them an return a bedtools object """ if isinstance(bedfiles, list): catted = concat(bedfiles) else: catted = concat([bedfiles]) if catted: return concat(bedfiles).sort().merge() else: return catted def minimize(bed_file): """ strip a BED file down to its three necessary columns: chrom start end """ if not bed_file: return bed_file else: return bt.BedTool(bed_file).cut(range(3))
Return None if no bed file exists to be opened.
Return None if no bed file exists to be opened.
Python
mit
guillermo-carrasco/bcbio-nextgen,biocyberman/bcbio-nextgen,lbeltrame/bcbio-nextgen,chapmanb/bcbio-nextgen,mjafin/bcbio-nextgen,elkingtonmcb/bcbio-nextgen,a113n/bcbio-nextgen,Cyberbio-Lab/bcbio-nextgen,guillermo-carrasco/bcbio-nextgen,vladsaveliev/bcbio-nextgen,brainstorm/bcbio-nextgen,chapmanb/bcbio-nextgen,lbeltrame/bcbio-nextgen,vladsaveliev/bcbio-nextgen,lpantano/bcbio-nextgen,mjafin/bcbio-nextgen,gifford-lab/bcbio-nextgen,vladsaveliev/bcbio-nextgen,gifford-lab/bcbio-nextgen,lbeltrame/bcbio-nextgen,a113n/bcbio-nextgen,biocyberman/bcbio-nextgen,elkingtonmcb/bcbio-nextgen,biocyberman/bcbio-nextgen,mjafin/bcbio-nextgen,a113n/bcbio-nextgen,guillermo-carrasco/bcbio-nextgen,Cyberbio-Lab/bcbio-nextgen,gifford-lab/bcbio-nextgen,brainstorm/bcbio-nextgen,lpantano/bcbio-nextgen,chapmanb/bcbio-nextgen,brainstorm/bcbio-nextgen,Cyberbio-Lab/bcbio-nextgen,elkingtonmcb/bcbio-nextgen,lpantano/bcbio-nextgen
1d448b65840509c5f21abb7f5ad65a6ce20b139c
packs/travisci/actions/lib/action.py
packs/travisci/actions/lib/action.py
from st2actions.runners.pythonrunner import Action import requests class TravisCI(Action): def __init__(self, config): super(TravisCI, self).__init__(config) def _init_header(self): travis_header = { 'User_Agent': self.config['User-Agent'], 'Accept': self.config['Accept'], 'Host': self.config['Host'], } return travis_header def _auth_header(self): _HEADERS = self._init_header() _HEADERS['Authorization'] = self.config["Authorization"] _HEADERS['Content-Type'] = self.config["Content-Type"] return _HEADERS def _perform_request(self, uri, method, data=None, requires_auth=False): if method == "GET": if requires_auth: _HEADERS = self._auth_header() else: _HEADERS = self._init_header() response = requests.get(uri, headers=_HEADERS) elif method == "POST": _HEADERS = self._auth_header response = requests.post(uri, headers=_HEADERS) elif method == "PUT": _HEADERS = self._auth_header() _HEADERS['Authorization'] = self.config["Authorization"] _HEADERS['Content-Type'] = self.config["Content-Type"] response = requests.put(uri, data=data, headers=_HEADERS) return response
import requests from st2actions.runners.pythonrunner import Action API_URL = 'https://api.travis-ci.org' HEADERS_ACCEPT = 'application/vnd.travis-ci.2+json' HEADERS_HOST = '' class TravisCI(Action): def __init__(self, config): super(TravisCI, self).__init__(config) def _get_auth_headers(self): headers = {} headers['Authorization'] = self.config["Authorization"] headers['Content-Type'] = self.config["Content-Type"] return headers def _perform_request(self, uri, method, data=None, requires_auth=False): if method == "GET": if requires_auth: headers = self._get_auth_headers() else: headers = {} response = requests.get(uri, headers=headers) elif method == 'POST': headers = self._get_auth_headers() response = requests.post(uri, headers=headers) elif method == 'PUT': headers = self._get_auth_headers() response = requests.put(uri, data=data, headers=headers) return response
Remove unnecessary values from the config - those should just be constants.
Remove unnecessary values from the config - those should just be constants.
Python
apache-2.0
StackStorm/st2contrib,StackStorm/st2contrib,pidah/st2contrib,pidah/st2contrib,pearsontechnology/st2contrib,StackStorm/st2contrib,pearsontechnology/st2contrib,tonybaloney/st2contrib,psychopenguin/st2contrib,digideskio/st2contrib,pearsontechnology/st2contrib,lmEshoo/st2contrib,tonybaloney/st2contrib,tonybaloney/st2contrib,pearsontechnology/st2contrib,lmEshoo/st2contrib,armab/st2contrib,armab/st2contrib,digideskio/st2contrib,pidah/st2contrib,armab/st2contrib,psychopenguin/st2contrib
ed8a5b8f34614997a13cdcda03dc4988c1cf4090
urls.py
urls.py
from django.conf.urls.defaults import patterns, include, url from django.contrib import admin from okupy.login.views import * admin.autodiscover() urlpatterns = patterns('', # Examples: # url(r'^$', 'okupy.views.home', name='home'), # url(r'^okupy/', include('okupy.foo.urls')), # Uncomment the admin/doc line below to enable admin documentation: # url(r'^admin/doc/', include('django.contrib.admindocs.urls')), # Uncomment the next line to enable the admin: # url(r'^admin/', include(admin.site.urls)), url(r'^$', mylogin) )
from django.conf.urls.defaults import patterns, include, url from django.contrib import admin from okupy.login.views import * admin.autodiscover() urlpatterns = patterns('', url(r'^$', mylogin) url(r'^admin/', include(admin.site.urls)), )
Remove comments, properly enable admin panel
Remove comments, properly enable admin panel
Python
agpl-3.0
gentoo/identity.gentoo.org,dastergon/identity.gentoo.org,gentoo/identity.gentoo.org,dastergon/identity.gentoo.org
68eb1bd58b84c1937f6f8d15bb9ea9f02a402e22
tests/cdscommon.py
tests/cdscommon.py
import hashlib import os import shutil import cdsapi SAMPLE_DATA_FOLDER = os.path.join(os.path.dirname(__file__), 'sample-data') EXTENSIONS = {'grib': '.grib', 'netcdf': '.nc'} def ensure_data(dataset, request, folder=SAMPLE_DATA_FOLDER, name='{uuid}.grib'): request_text = str(sorted(request.items())).encode('utf-8') uuid = hashlib.sha3_224(request_text).hexdigest()[:10] format = request.get('format', 'grib') ext = EXTENSIONS.get(format, '.bin') name = name.format(**locals()) path = os.path.join(SAMPLE_DATA_FOLDER, name) if not os.path.exists(path): c = cdsapi.Client() try: c.retrieve(dataset, request, target=path + '.tmp') shutil.move(path + '.tmp', path) except: os.unlink(path + '.tmp') raise return path def message_count(dataset, request, count=1): if dataset == 'reanalysis-era5-single-levels' \ and request.get('product_type') == 'ensemble_members': count = 20 elif dataset == 'reanalysis-era5-pressure-levels' \ and request.get('product_type') == 'ensemble_members': count = 20 elif dataset == 'seasonal-original-single-levels': count = 51 elif dataset.startswith('seasonal-') and request.get('product_type') == 'monthly_mean': count = 51 for key in ['year', 'month', 'day', 'time', 'leadtime_hour', 'leadtime_month']: value = request.get(key) if isinstance(value, list): count *= len(value) return count
import hashlib import os import shutil import cdsapi SAMPLE_DATA_FOLDER = os.path.join(os.path.dirname(__file__), 'sample-data') EXTENSIONS = {'grib': '.grib', 'netcdf': '.nc'} def ensure_data(dataset, request, folder=SAMPLE_DATA_FOLDER, name='{uuid}.grib'): request_text = str(sorted(request.items())).encode('utf-8') uuid = hashlib.sha3_224(request_text).hexdigest()[:10] format = request.get('format', 'grib') ext = EXTENSIONS.get(format, '.bin') name = name.format(**locals()) path = os.path.join(SAMPLE_DATA_FOLDER, name) if not os.path.exists(path): c = cdsapi.Client() try: c.retrieve(dataset, request, target=path + '.tmp') shutil.move(path + '.tmp', path) except: os.unlink(path + '.tmp') raise return path
Drop impossible to get right code.
Drop impossible to get right code.
Python
apache-2.0
ecmwf/cfgrib
db6b869eae416e72fa30b1d7271b0ed1d7dc1a55
sqlalchemy_json/__init__.py
sqlalchemy_json/__init__.py
from sqlalchemy.ext.mutable import ( Mutable, MutableDict) from sqlalchemy_utils.types.json import JSONType from . track import ( TrackedDict, TrackedList) __all__ = 'MutableJson', 'NestedMutableJson' class NestedMutableDict(TrackedDict, Mutable): @classmethod def coerce(cls, key, value): if isinstance(value, cls): return value if isinstance(value, dict): return cls(value) return super(cls).coerce(key, value) class NestedMutableList(TrackedList, Mutable): @classmethod def coerce(cls, key, value): if isinstance(value, cls): return value if isinstance(value, list): return cls(value) return super(cls).coerce(key, value) class NestedMutable(Mutable): """SQLAlchemy `mutable` extension with nested change tracking.""" @classmethod def coerce(cls, key, value): """Convert plain dictionary to NestedMutable.""" if isinstance(value, cls): return value if isinstance(value, dict): return NestedMutableDict.coerce(key, value) if isinstance(value, list): return NestedMutableList.coerce(key, value) return super(cls).coerce(key, value) class MutableJson(JSONType): """JSON type for SQLAlchemy with change tracking at top level.""" class NestedMutableJson(JSONType): """JSON type for SQLAlchemy with nested change tracking.""" MutableDict.associate_with(MutableJson) NestedMutable.associate_with(NestedMutableJson)
from sqlalchemy.ext.mutable import ( Mutable, MutableDict) from sqlalchemy_utils.types.json import JSONType from . track import ( TrackedDict, TrackedList) __all__ = 'MutableJson', 'NestedMutableJson' class NestedMutableDict(TrackedDict, Mutable): @classmethod def coerce(cls, key, value): if isinstance(value, cls): return value if isinstance(value, dict): return cls(value) return super(cls).coerce(key, value) class NestedMutableList(TrackedList, Mutable): @classmethod def coerce(cls, key, value): if isinstance(value, cls): return value if isinstance(value, list): return cls(value) return super(cls).coerce(key, value) class NestedMutable(Mutable): """SQLAlchemy `mutable` extension with nested change tracking.""" @classmethod def coerce(cls, key, value): """Convert plain dictionary to NestedMutable.""" if value is None: return value if isinstance(value, cls): return value if isinstance(value, dict): return NestedMutableDict.coerce(key, value) if isinstance(value, list): return NestedMutableList.coerce(key, value) return super(cls).coerce(key, value) class MutableJson(JSONType): """JSON type for SQLAlchemy with change tracking at top level.""" class NestedMutableJson(JSONType): """JSON type for SQLAlchemy with nested change tracking.""" MutableDict.associate_with(MutableJson) NestedMutable.associate_with(NestedMutableJson)
Fix error when setting JSON value to be `None`
Fix error when setting JSON value to be `None` Previously this would raise an attribute error as `None` does not have the `coerce` attribute.
Python
bsd-2-clause
edelooff/sqlalchemy-json
edf95105b7522b115dd4d3882ed57e707126c6af
timepiece/admin.py
timepiece/admin.py
from django.contrib import admin from django.contrib.auth.models import Permission from django.contrib.contenttypes.models import ContentType class PermissionAdmin(admin.ModelAdmin): list_display = ['__unicode__', 'codename'] list_filter = ['content_type__app_label'] class ContentTypeAdmin(admin.ModelAdmin): list_display = ['id', 'app_label', 'model'] list_filter = ['app_label'] admin.site.register(Permission, PermissionAdmin) admin.site.register(ContentType, ContentTypeAdmin)
from django.contrib import admin from django.contrib.auth.models import Permission from django.contrib.contenttypes.models import ContentType class PermissionAdmin(admin.ModelAdmin): list_display = ['content_type', 'codename', 'name'] list_filter = ['content_type__app_label'] class ContentTypeAdmin(admin.ModelAdmin): list_display = ['id', 'app_label', 'model'] list_filter = ['app_label'] admin.site.register(Permission, PermissionAdmin) admin.site.register(ContentType, ContentTypeAdmin)
Update Python/Django: Remove unnecessary reference to __unicode__
Update Python/Django: Remove unnecessary reference to __unicode__
Python
mit
BocuStudio/django-timepiece,caktus/django-timepiece,arbitrahj/django-timepiece,caktus/django-timepiece,arbitrahj/django-timepiece,BocuStudio/django-timepiece,caktus/django-timepiece,BocuStudio/django-timepiece,arbitrahj/django-timepiece
20017da43fe1bf5287b33d9d2fc7f597850bb5b5
readthedocs/settings/proxito/base.py
readthedocs/settings/proxito/base.py
""" Base settings for Proxito Some of these settings will eventually be backported into the main settings file, but currently we have them to be able to run the site with the old middleware for a staged rollout of the proxito code. """ class CommunityProxitoSettingsMixin: ROOT_URLCONF = 'readthedocs.proxito.urls' USE_SUBDOMAIN = True @property def MIDDLEWARE(self): # noqa # Use our new middleware instead of the old one classes = super().MIDDLEWARE classes = list(classes) index = classes.index( 'readthedocs.core.middleware.SubdomainMiddleware' ) classes[index] = 'readthedocs.proxito.middleware.ProxitoMiddleware' middleware_to_remove = ( 'readthedocs.core.middleware.SingleVersionMiddleware', 'csp.middleware.CSPMiddleware', ) for mw in middleware_to_remove: if mw in classes: classes.remove(mw) return classes
""" Base settings for Proxito Some of these settings will eventually be backported into the main settings file, but currently we have them to be able to run the site with the old middleware for a staged rollout of the proxito code. """ class CommunityProxitoSettingsMixin: ROOT_URLCONF = 'readthedocs.proxito.urls' USE_SUBDOMAIN = True @property def DATABASES(self): # This keeps connections to the DB alive, # which reduces latency with connecting to postgres dbs = getattr(super(), 'DATABASES', {}) for db in dbs.keys(): dbs[db]['CONN_MAX_AGE'] = 86400 return dbs @property def MIDDLEWARE(self): # noqa # Use our new middleware instead of the old one classes = super().MIDDLEWARE classes = list(classes) index = classes.index( 'readthedocs.core.middleware.SubdomainMiddleware' ) classes[index] = 'readthedocs.proxito.middleware.ProxitoMiddleware' middleware_to_remove = ( 'readthedocs.core.middleware.SingleVersionMiddleware', 'csp.middleware.CSPMiddleware', ) for mw in middleware_to_remove: if mw in classes: classes.remove(mw) return classes
Expand the logic in our proxito mixin.
Expand the logic in our proxito mixin. This makes proxito mixin match production for .com/.org in the areas where we are overriding the same things.
Python
mit
rtfd/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org
b021fa0335414d3693aabf4c32b7219f0ba33369
holviapi/tests/test_api_idempotent.py
holviapi/tests/test_api_idempotent.py
# -*- coding: utf-8 -*- import os import pytest import holviapi @pytest.fixture def connection(): pool = os.environ.get('HOLVI_POOL', None) key = os.environ.get('HOLVI_KEY', None) if not pool or not key: raise RuntimeError("HOLVI_POOL and HOLVI_KEY must be in ENV for these tests") cnc = holviapi.Connection(pool,key) return cnc @pytest.fixture def invoiceapi(): cnc = connection() ia = holviapi.InvoiceAPI(cnc) return ia def test_list_invoices(invoiceapi): l = invoiceapi.list_invoices() i = next(l) assert type(i) == holviapi.Invoice def test_get_invoice(invoiceapi): l = invoiceapi.list_invoices() i = next(l) assert type(i) == holviapi.Invoice i2 = invoiceapi.get_invoice(i.code) assert i.code == i2.code
# -*- coding: utf-8 -*- import os import pytest import holviapi @pytest.fixture def connection(): pool = os.environ.get('HOLVI_POOL', None) key = os.environ.get('HOLVI_KEY', None) if not pool or not key: raise RuntimeError("HOLVI_POOL and HOLVI_KEY must be in ENV for these tests") cnc = holviapi.Connection(pool,key) return cnc @pytest.fixture def invoiceapi(): cnc = connection() ia = holviapi.InvoiceAPI(cnc) return ia @pytest.fixture def categoriesapi(): cnc = connection() ca = holviapi.CategoriesAPI(cnc) return ca def test_list_invoices(invoiceapi): l = invoiceapi.list_invoices() i = next(l) assert type(i) == holviapi.Invoice def test_get_invoice(invoiceapi): l = invoiceapi.list_invoices() i = next(l) assert type(i) == holviapi.Invoice i2 = invoiceapi.get_invoice(i.code) assert i.code == i2.code def test_list_income_categories(categoriesapi): l = categoriesapi.list_income_categories() c = next(l) assert type(c) == holviapi.IncomeCategory def test_list_expense_categories(categoriesapi): l = categoriesapi.list_expense_categories() c = next(l) assert type(c) == holviapi.ExpenseCategory def test_get_category(categoriesapi): l = categoriesapi.list_income_categories() c = next(l) assert type(c) == holviapi.IncomeCategory c2 = categoriesapi.get_category(c.code) assert c.code == c2.code
Add more tests against live API
Add more tests against live API
Python
mit
rambo/python-holviapi,rambo/python-holviapi
ce639400d48462bdc593e20d13979c33ed4c7fe9
commands/globaladd.py
commands/globaladd.py
from devbot import chat def call(message: str, name, protocol, cfg, commands): if ' ' in message: chat.say('/msg {} Sorry, that was not a valid player name: It contains spaces.'.format(name)) return chat.say('/msg {} Invited {} to GlobalChat'.format(name, message)) chat.say_wrap('/msg {}'.format(message), 'You have been added to global chat. Use /g GlobalChat to speak in the group, and /e to exit.') chat.say('/nlip GlobalChat {}'.format(message))
from devbot import chat def call(message: str, name, protocol, cfg, commands): if message is '': chat.say('/msg {} {}'.format(name, commands['help']['globaladd'].format('globaladd'))) return if ' ' in message: chat.say('/msg {} Sorry, that was not a valid player name: It contains spaces.'.format(name)) return chat.say('/msg {} Invited {} to GlobalChat'.format(name, message)) chat.say_wrap('/msg {}'.format(message), 'You have been added to global chat. Use /g GlobalChat to speak in the group, and /e to exit.') chat.say('/nlip GlobalChat {}'.format(message))
Fix missing command crash with gadd
Fix missing command crash with gadd
Python
mit
Ameliorate/DevotedBot,Ameliorate/DevotedBot
300e946cd72561c69141f65768debed9d0682abb
utils/run_tests.py
utils/run_tests.py
#!/usr/bin/env python """ Run Arista Transcode Tests ========================== Generate test files in various formats and transcode them to all available output devices and qualities. """ import os import subprocess import sys sys.path.append(os.path.dirname(os.path.dirname(__file__))) import arista; arista.init() if not os.path.exists("tests"): os.system("./utils/generate_tests.py") files = os.listdir("tests") status = [] try: for id, device in arista.presets.get().items(): for file in files: print device.make + " " + device.model + ": " + file cmd = "./arista-transcode -q -d %s tests/%s test_output" % (id, file) print cmd ret = subprocess.call(cmd, shell=True) if ret: status.append([file, device, True]) else: status.append([file, device, False]) print "Report" print "======" for file, device, failed in status: if failed: print device.make + " " + device.model + " (" + \ file + "): Failed" else: print device.make + " " + device.model + " (" + \ file + "): Succeeded" print "Tests completed." except KeyboardInterrupt: pass
#!/usr/bin/env python """ Run Arista Transcode Tests ========================== Generate test files in various formats and transcode them to all available output devices and qualities. """ import os import subprocess import sys sys.path.append(os.path.dirname(os.path.dirname(__file__))) import arista; arista.init() if not os.path.exists("tests"): os.system("./utils/generate_tests.py") files = os.listdir("tests") status = [] try: for id, device in arista.presets.get().items(): for file in files: print device.make + " " + device.model + ": " + file cmd = "./arista-transcode -q -d %s -o test_output tests/%s" % (id, file) print cmd ret = subprocess.call(cmd, shell=True) if ret: status.append([file, device, True]) else: status.append([file, device, False]) except KeyboardInterrupt: pass print "Report" print "======" for file, device, failed in status: if failed: print device.make + " " + device.model + " (" + \ file + "): Failed" else: print device.make + " " + device.model + " (" + \ file + "): Succeeded" print "Tests completed."
Update test runner syntax to the new arista-transcode syntax and always output a status report even if the user stops the tests early.
Update test runner syntax to the new arista-transcode syntax and always output a status report even if the user stops the tests early.
Python
lgpl-2.1
danielgtaylor/arista,danielgtaylor/arista