commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
1d269506eab765a70bd7a59cd1806d86146ebabc | Fix outdated comment. | RKrahl/photo-tools | photo/idxitem.py | photo/idxitem.py | """Provide the class IdxItem which represents an item in the index.
"""
import os.path
import hashlib
from photo.exif import Exif
from photo.geo import GeoPosition
def _checksum(fname, hashalg):
"""Calculate hashes for a file.
"""
if not hashalg:
return {}
m = { h:hashlib.new(h) for h in hashalg }
chunksize = 8192
with open(fname, 'rb') as f:
while True:
chunk = f.read(chunksize)
if not chunk:
break
for h in hashalg:
m[h].update(chunk)
return { h: m[h].hexdigest() for h in hashalg }
class IdxItem(object):
def __init__(self, data=None, filename=None, basedir=None, hashalg=['md5']):
self.filename = None
self.tags = []
if data is not None:
# Compatibility with legacy index file formats.
if 'md5' in data:
data['checksum'] = {'md5': data['md5']}
del data['md5']
if 'createdate' in data:
data['createDate'] = data['createdate']
del data['createdate']
self.__dict__.update(data)
elif filename is not None:
self.filename = filename
if basedir is not None:
filename = os.path.join(basedir, filename)
self.checksum = _checksum(filename, hashalg)
exifdata = Exif(filename)
self.createDate = exifdata.createDate
self.orientation = exifdata.orientation
self.gpsPosition = exifdata.gpsPosition
if self.gpsPosition:
self.gpsPosition = GeoPosition(self.gpsPosition)
self.tags = set(self.tags)
def as_dict(self):
d = self.__dict__.copy()
d['tags'] = list(d['tags'])
d['tags'].sort()
if d['gpsPosition']:
d['gpsPosition'] = d['gpsPosition'].as_dict()
return d
| """Provide the class IdxItem which represents an item in the index.
"""
import os.path
import hashlib
from photo.exif import Exif
from photo.geo import GeoPosition
def _checksum(fname, hashalg):
"""Calculate the md5 hash for a file.
"""
if not hashalg:
return {}
m = { h:hashlib.new(h) for h in hashalg }
chunksize = 8192
with open(fname, 'rb') as f:
while True:
chunk = f.read(chunksize)
if not chunk:
break
for h in hashalg:
m[h].update(chunk)
return { h: m[h].hexdigest() for h in hashalg }
class IdxItem(object):
def __init__(self, data=None, filename=None, basedir=None, hashalg=['md5']):
self.filename = None
self.tags = []
if data is not None:
# Compatibility with legacy index file formats.
if 'md5' in data:
data['checksum'] = {'md5': data['md5']}
del data['md5']
if 'createdate' in data:
data['createDate'] = data['createdate']
del data['createdate']
self.__dict__.update(data)
elif filename is not None:
self.filename = filename
if basedir is not None:
filename = os.path.join(basedir, filename)
self.checksum = _checksum(filename, hashalg)
exifdata = Exif(filename)
self.createDate = exifdata.createDate
self.orientation = exifdata.orientation
self.gpsPosition = exifdata.gpsPosition
if self.gpsPosition:
self.gpsPosition = GeoPosition(self.gpsPosition)
self.tags = set(self.tags)
def as_dict(self):
d = self.__dict__.copy()
d['tags'] = list(d['tags'])
d['tags'].sort()
if d['gpsPosition']:
d['gpsPosition'] = d['gpsPosition'].as_dict()
return d
| apache-2.0 | Python |
4a526c061dd3e171c587890bf945f9b235eaa638 | bump version | pulilab/django-medialibrary | medialibrary/__init__.py | medialibrary/__init__.py | "A pluggable django app for media management."
__version__ = '1.4.8'
| "A pluggable django app for media management."
__version__ = '1.4.7'
| bsd-3-clause | Python |
d21ff98f92180497d1175b72bbf4343823c16732 | Update Bob.py | eshook/Forest,eshook/Forest | forest/bobs/Bob.py | forest/bobs/Bob.py | """
Copyright (c) 2017 Eric Shook. All rights reserved.
Use of this source code is governed by a BSD-style license that can be found in the LICENSE file.
@author: eshook (Eric Shook, eshook@gmail.edu)
@contributors: <Contribute and add your name here!>
"""
# TODO: Replace individual variables with numpy arrays
# use accessors to get access to individual values
# Need to profile to see if this improves overall speedup.
# This would align with other projects that use arrays to process x,y,z using vectorization
# In particular, I think this could be helpful for using Bobs for indexing (e.g., r-tree, etc.)
# Just Bob
# (Or for those less cool folks a Spatial-Temporal Bounding OBject :-)
class Bob(object):
def __init__(self, y = 0, x = 0, h = 0, w = 0, t = 0, d = 0):
self.y = y # y-axis (origin)
self.x = x # x-axis (origin) ___
self.h = h # height (y-axis) Y |\__\
self.w = w # width (x-axis) X \|__|
self.t = t # t-axis (origin) T
self.d = d # duration (t-axis)
self.createdby = "" # Who or what created these data
self.data = None # By default Bobs don't have any data
def __repr__(self):
return "Bob (%f,%f) [%f,%f]" % (self.y,self.x,self.h,self.w)
def __call__(self):
return self.data
| """
Copyright (c) 2017 Eric Shook. All rights reserved.
Use of this source code is governed by a BSD-style license that can be found in the LICENSE file.
@author: eshook (Eric Shook, eshook@gmail.edu)
@contributors: <Contribute and add your name here!>
"""
# TODO: Replace individual variables with numpy arrays
# use accessors to get access to individual values
# Need to profile to see if this improves overall speedup.
# Just Bob
# (Or for those less cool folks a Bounding OBject :-)
class Bob(object):
def __init__(self, y = 0, x = 0, h = 0, w = 0, t = 0, d = 0):
self.y = y # y-axis (origin)
self.x = x # x-axis (origin) ___
self.h = h # height (y-axis) Y |\__\
self.w = w # width (x-axis) X \|__|
self.t = t # t-axis (origin) T
self.d = d # duration (t-axis)
self.createdby = "" # Who or what created these data
self.data = None # By default Bobs don't have any data
def __repr__(self):
return "Bob (%f,%f) [%f,%f]" % (self.y,self.x,self.h,self.w)
def __call__(self):
return self.data
| bsd-3-clause | Python |
9f7ecf5b575625cf5d18687d12b79b2e21d59fb0 | Increase coverage | geotagx/pybossa,proyectos-analizo-info/pybossa-analizo-info,geotagx/pybossa,harihpr/tweetclickers,OpenNewsLabs/pybossa,jean/pybossa,proyectos-analizo-info/pybossa-analizo-info,jean/pybossa,Scifabric/pybossa,inteligencia-coletiva-lsd/pybossa,inteligencia-coletiva-lsd/pybossa,PyBossa/pybossa,PyBossa/pybossa,OpenNewsLabs/pybossa,stefanhahmann/pybossa,Scifabric/pybossa,proyectos-analizo-info/pybossa-analizo-info,harihpr/tweetclickers,stefanhahmann/pybossa | test/test_uploader/test_generic_uploader.py | test/test_uploader/test_generic_uploader.py | # -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2014 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
"""This module tests the Uploader class."""
from default import Test, with_context
from pybossa.uploader import Uploader
from mock import patch
class TestUploader(Test):
"""Test PyBossa Uploader module."""
def setUp(self):
"""SetUp method."""
super(TestUploader, self).setUp()
with self.flask_app.app_context():
self.create()
@with_context
def test_uploader_init(self):
"""Test UPLOADER init method works."""
u = Uploader()
new_extensions = ['pdf', 'doe']
new_uploader = Uploader()
with patch.dict(self.flask_app.config,
{'ALLOWED_EXTENSIONS': new_extensions}):
new_uploader.init_app(self.flask_app)
expected_extensions = set.union(u.allowed_extensions, new_extensions)
err_msg = "The new uploader should support two extra extensions"
assert expected_extensions == new_uploader.allowed_extensions, err_msg
@with_context
def test_allowed_file(self):
"""Test UPLOADER allowed_file method works."""
u = Uploader()
for ext in u.allowed_extensions:
# Change extension to uppercase to check that it works too
filename = 'test.%s' % ext.upper()
err_msg = ("This file: %s should be allowed, but it failed"
% filename)
assert u.allowed_file(filename) is True, err_msg
err_msg = "Non allowed extensions should return false"
assert u.allowed_file('wrong.pdf') is False, err_msg
@with_context
def test_get_filename_extension(self):
u = Uploader()
filename = "image.png"
err_msg = "The extension should be PNG"
assert u.get_filename_extension(filename) == 'png', err_msg
filename = "image.jpg"
err_msg = "The extension should be JPEG"
assert u.get_filename_extension(filename) == 'jpeg', err_msg
| # -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2014 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
"""This module tests the Uploader class."""
from default import Test, with_context
from pybossa.uploader import Uploader
from mock import patch
class TestUploader(Test):
"""Test PyBossa Uploader module."""
def setUp(self):
"""SetUp method."""
super(TestUploader, self).setUp()
with self.flask_app.app_context():
self.create()
@with_context
def test_uploader_init(self):
"""Test UPLOADER init method works."""
u = Uploader()
new_extensions = ['pdf', 'doe']
new_uploader = Uploader()
with patch.dict(self.flask_app.config,
{'ALLOWED_EXTENSIONS': new_extensions}):
new_uploader.init_app(self.flask_app)
expected_extensions = set.union(u.allowed_extensions, new_extensions)
err_msg = "The new uploader should support two extra extensions"
assert expected_extensions == new_uploader.allowed_extensions, err_msg
@with_context
def test_allowed_file(self):
"""Test UPLOADER allowed_file method works."""
u = Uploader()
for ext in u.allowed_extensions:
# Change extension to uppercase to check that it works too
filename = 'test.%s' % ext.upper()
err_msg = ("This file: %s should be allowed, but it failed"
% filename)
assert u.allowed_file(filename) is True, err_msg
err_msg = "Non allowed extensions should return false"
assert u.allowed_file('wrong.pdf') is False, err_msg
| agpl-3.0 | Python |
718b7cdad91c22a5359c35a53a41a04caedb0017 | create schedule add data to soar script | elixirhub/events-portal-scraping-scripts | ScheduleAddData.py | ScheduleAddData.py | __author__ = 'chuqiao'
from apscheduler.schedulers.blocking import BlockingScheduler
import EventsPortal
import sys
def scheduleUpdateSolr(sourceUrl,patternUrl,solrUrl):
"""
"""
# logger.info('***Starting update every hour***')
sched = BlockingScheduler()
sched.add_job(EventsPortal.addDataToSolrFromUrl, 'interval', minutes= 1, args=[sourceUrl,patternUrl,solrUrl])
sched.start()
try:
# Keeps the main thread alive.
while True:
time.sleep(20)
except (KeyboardInterrupt, SystemExit):
pass
if __name__ == '__main__':
scheduleUpdateSolr("http://bioevents-portal.org/eventsfull/test?state=published&field_type_tid=All",
"http://bioevents-portal.org/events",
"139.162.217.53:8983/solr/eventsportal/"
)
# scheduleUpdateSolr(sys.argv[1],sys.argv[2])
| __author__ = 'chuqiao'
from apscheduler.schedulers.blocking import BlockingScheduler
import logging
logging.basicConfig()
import EventsPortal
import sys
def scheduleUpdateSolr(sourceUrl,patternUrl,solrUrl):
"""
"""
# logger.info('***Starting update every hour***')
sched = BlockingScheduler()
sched.add_job(EventsPortal.addDataToSolrFromUrl, 'interval', minutes= 60, args=[sourceUrl,patternUrl,solrUrl])
sched.start()
try:
# Keeps the main thread alive.
while True:
time.sleep(20)
except (KeyboardInterrupt, SystemExit):
pass
if __name__ == '__main__':
scheduleUpdateSolr("http://bioevents-portal.org/eventsfull/test?state=published&field_type_tid=All",
"http://bioevents-portal.org/events",
"139.162.217.53:8983/solr/eventsportal/"
)
# scheduleUpdateSolr(sys.argv[1],sys.argv[2])
| mit | Python |
43582e2ce55e374622d68d3a9ef8b0ab2e66a5fb | Update prod config | voer-platform/vp.web,voer-platform/vp.web,voer-platform/vp.web,voer-platform/vp.web | voer/settings/prod.py | voer/settings/prod.py | '''
Created on 16 Dec 2013
@author: huyvq
'''
from base import *
# FOR DEBUG
DEBUG = True
DEVELOPMENT = True
TEMPLATE_DEBUG = DEBUG
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'voer_django',
'USER': 'root',
'PASSWORD': 'root',
'HOST': '127.0.0.1',
'PORT': 3306,
}
}
#VPR Address
VPR_URL = 'http://dev.voer.vn:2013/1.0/'
#VPT Address
VPT_URL = 'http://voer.edu.vn:6543/'
SITE_URL = 'voer.edu.vn'
| '''
Created on 16 Dec 2013
@author: huyvq
'''
from base import *
# FOR DEBUG
DEBUG = True
DEVELOPMENT = True
TEMPLATE_DEBUG = DEBUG
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'voer_django',
'USER': 'root',
'PASSWORD': 'root',
'HOST': '127.0.0.1',
'PORT': 3306,
}
}
VPR_URL = 'dev.voer.vn'
VPR_PORT = '2013'
VPR_VERSION = '1.0'
VPR_URL_FULL = os.path.join(VPR_URL, VPR_PORT, VPR_VERSION)
SITE_URL = 'dev.voer.vn'
| agpl-3.0 | Python |
90284acf9490d239bf5ca9e82337b191022c48a0 | build bump | tb0hdan/voiceplay,tb0hdan/voiceplay,tb0hdan/voiceplay,tb0hdan/voiceplay | voiceplay/__init__.py | voiceplay/__init__.py | #-*- coding: utf-8 -*-
""" get version, etc """
__title__ = 'VoicePlay'
__description__ = 'Client-side first music centered voice controlled player'
__version__ = '0.2.3.1'
__author__ = 'Bohdan Turkynewych'
__author_email__ = 'tb0hdan@gmail.com'
__license__ = 'UNLICENSE'
__copyright__ = 'public domain'
__website__ = 'https://github.com/tb0hdan/voiceplay'
__issuesurl__ = 'https://github.com/tb0hdan/voiceplay/issues/new'
| #-*- coding: utf-8 -*-
""" get version, etc """
__title__ = 'VoicePlay'
__description__ = 'Client-side first music centered voice controlled player'
__version__ = '0.2.3.0'
__author__ = 'Bohdan Turkynewych'
__author_email__ = 'tb0hdan@gmail.com'
__license__ = 'UNLICENSE'
__copyright__ = 'public domain'
__website__ = 'https://github.com/tb0hdan/voiceplay'
__issuesurl__ = 'https://github.com/tb0hdan/voiceplay/issues/new'
| unlicense | Python |
615643365eac1592867d03834e094247d33467dc | Update common utilities. | MKLab-ITI/reveal-user-annotation | reveal_user_annotation/common/config_package.py | reveal_user_annotation/common/config_package.py | __author__ = 'Georgios Rizos (georgerizos@iti.gr)'
import os
import inspect
import multiprocessing
import reveal_user_annotation
########################################################################################################################
# Configure path related functions.
########################################################################################################################
def get_package_path():
return os.path.dirname(inspect.getfile(reveal_user_annotation))
########################################################################################################################
# Configure optimization related functions.
########################################################################################################################
def get_threads_number():
"""
Automatically determine the number of cores. If that fails, the number defaults to a manual setting.
"""
try:
cores_number = multiprocessing.cpu_count()
return cores_number
except NotImplementedError:
cores_number = 8
return cores_number
| __author__ = 'Georgios Rizos (georgerizos@iti.gr)'
import os
import inspect
import multiprocessing
import reveal_user_annotation
from reveal_user_annotation.common.datarw import get_file_row_generator
########################################################################################################################
# Configure path related functions.
########################################################################################################################
def get_package_path():
return os.path.dirname(inspect.getfile(reveal_user_annotation))
def get_data_path():
data_file_path = get_package_path() + "/common/res/config_data_path.txt"
file_row_gen = get_file_row_generator(data_file_path, "=")
file_row = next(file_row_gen)
data_path = file_row[1]
return data_path
def get_raw_datasets_path():
return get_data_path() + "/raw_data"
def get_memory_path():
return get_data_path() + "/memory"
########################################################################################################################
# Configure optimization related functions.
########################################################################################################################
def get_threads_number():
"""
Automatically determine the number of cores. If that fails, the number defaults to a manual setting.
"""
try:
cores_number = multiprocessing.cpu_count()
return cores_number
except NotImplementedError:
cores_number = 8
return cores_number
| apache-2.0 | Python |
1b4ba2b5dbfa6889d063d73c48d325f670623846 | Set broker URL | renalreg/radar,renalreg/radar,renalreg/radar,renalreg/radar | ukrdc_importer/radar_ukrdc_importer/app.py | ukrdc_importer/radar_ukrdc_importer/app.py | from flask import Flask
from sqlalchemy import event
from radar.database import db
from radar_ukrdc_importer.utils import get_import_user
def create_app():
app = Flask(__name__)
app.config.from_envvar('RADAR_SETTINGS')
# noinspection PyUnresolvedReferences
from radar import models # noqa
db.init_app(app)
@event.listens_for(db.session, 'before_flush')
def before_flush(session, flush_context, instances):
user = get_import_user()
# SET LOCAL lasts until the end of the current transaction
# http://www.postgresql.org/docs/9.4/static/sql-set.html
session.execute('SET LOCAL radar.user_id = :user_id', dict(user_id=user.id))
return app
def setup_celery(celery, app=None):
if app is None:
app = create_app()
broker_url = app.config.get('CELERY_BROKER_URL')
if broker_url is not None:
celery.conf['BROKER_URL'] = broker_url
celery.conf.update(app.config)
TaskBase = celery.Task
class ContextTask(TaskBase):
abstract = True
def __call__(self, *args, **kwargs):
with app.app_context():
return TaskBase.__call__(self, *args, **kwargs)
celery.Task = ContextTask
| from flask import Flask
from sqlalchemy import event
from radar.database import db
from radar_ukrdc_importer.utils import get_import_user
def create_app():
app = Flask(__name__)
app.config.from_envvar('RADAR_SETTINGS')
# noinspection PyUnresolvedReferences
from radar import models # noqa
db.init_app(app)
@event.listens_for(db.session, 'before_flush')
def before_flush(session, flush_context, instances):
user = get_import_user()
# SET LOCAL lasts until the end of the current transaction
# http://www.postgresql.org/docs/9.4/static/sql-set.html
session.execute('SET LOCAL radar.user_id = :user_id', dict(user_id=user.id))
return app
def setup_celery(celery, app=None):
if app is None:
app = create_app()
celery.conf.update(app.config)
TaskBase = celery.Task
class ContextTask(TaskBase):
abstract = True
def __call__(self, *args, **kwargs):
with app.app_context():
return TaskBase.__call__(self, *args, **kwargs)
celery.Task = ContextTask
| agpl-3.0 | Python |
221d0d39edfbf693a509d942e77143520afd314a | Add permissions to groups | MasterFacilityList/mfl_api,MasterFacilityList/mfl_api,MasterFacilityList/mfl_api,MasterFacilityList/mfl_api,MasterFacilityList/mfl_api | data/management/commands/load_groups.py | data/management/commands/load_groups.py | from django.core.management import BaseCommand
from django.contrib.auth.models import Group, Permission
from users.models import MflUser
class Command(BaseCommand):
def handle(self, *args, **options):
chrio, created = Group.objects.get_or_create(
name="County Health Records Information Officer")
schrio, created = Group.objects.get_or_create(
name="Sub County Health Records Information Officer")
national, created = Group.objects.get_or_create(
name="National Users")
for perm in Permission.objects.all():
national.permissions.add(perm.id)
national_user = MflUser.objects.get(
email='national@mfltest.slade360.co.ke')
national_user.groups.add(national)
chrio_user = MflUser.objects.get(
email='chrio@mfltest.slade360.co.ke')
chrio_user.groups.add(chrio)
schrio_user = MflUser.objects.get(
email='schrio@mfltest.slade360.co.ke')
schrio_user.groups.add(schrio)
mombasa_user = MflUser.objects.get(
email='mombasa@mfltest.slade360.co.ke')
mombasa_user.groups.add(chrio)
kilifi_user = MflUser.objects.get(
email='kilifi@mfltest.slade360.co.ke')
kilifi_user.groups.add(chrio)
tanriver_user = MflUser.objects.get(
email='tanariver@mfltest.slade360.co.ke')
tanriver_user.groups.add(chrio)
narok_user = MflUser.objects.get(
email='narok@mfltest.slade360.co.ke')
narok_user.groups.add(chrio)
nairobi_user = MflUser.objects.get(
email='nairobi@mfltest.slade360.co.ke')
nairobi_user.groups.add(chrio)
| from django.core.management import BaseCommand
from django.contrib.auth.models import Group
from users.models import MflUser
class Command(BaseCommand):
def handle(self, *args, **options):
chrio, created = Group.objects.get_or_create(
name="County Health Records Information Officer")
schrio, created = Group.objects.get_or_create(
name="Sub County Health Records Information Officer")
national, created = Group.objects.get_or_create(
name="National Users")
national_user = MflUser.objects.get(
email='national@mfltest.slade360.co.ke')
national_user.groups.add(national)
chrio_user = MflUser.objects.get(
email='chrio@mfltest.slade360.co.ke')
chrio_user.groups.add(chrio)
schrio_user = MflUser.objects.get(
email='schrio@mfltest.slade360.co.ke')
schrio_user.groups.add(schrio)
mombasa_user = MflUser.objects.get(
email='mombasa@mfltest.slade360.co.ke')
mombasa_user.groups.add(chrio)
kilifi_user = MflUser.objects.get(
email='kilifi@mfltest.slade360.co.ke')
kilifi_user.groups.add(chrio)
tanriver_user = MflUser.objects.get(
email='tanariver@mfltest.slade360.co.ke')
tanriver_user.groups.add(chrio)
narok_user = MflUser.objects.get(
email='narok@mfltest.slade360.co.ke')
narok_user.groups.add(chrio)
nairobi_user = MflUser.objects.get(
email='nairobi@mfltest.slade360.co.ke')
nairobi_user.groups.add(chrio)
| mit | Python |
2a742af0fd1b9f33b2b78901a1a4b9a5db4df4c8 | change TableTop.board to be a flat list | IanDCarroll/xox | source/game_table.py | source/game_table.py | class TableTop(object):
def __init__(self):
self.board = [0,0,0,0,0,0,0,0,0]
| class TableTop(object):
def __init__(self):
self.board = [[0,0,0],[0,0,0],[0,0,0]]
| mit | Python |
129e7f300a9356be59860ea93534a57220563a4b | test relies on syminfo.py, which needs CLIBD | xia2/xia2,xia2/xia2 | Handlers/test_CommandLine.py | Handlers/test_CommandLine.py | from __future__ import absolute_import, division, print_function
import pytest
from xia2.Handlers.CommandLine import validate_project_crystal_name
from dials.util import Sorry
def test_validate_project_crystal_name(ccp4):
for value in ("foo_001", "_foo_001", "foo", "_foo_", "_1foo"):
assert validate_project_crystal_name("crystal", value)
for value in ("foo.001", "1foo", "foo&", "*foo"):
with pytest.raises(Sorry):
validate_project_crystal_name("crystal", value)
| from __future__ import absolute_import, division, print_function
import pytest
from xia2.Handlers.CommandLine import validate_project_crystal_name
from dials.util import Sorry
def test_validate_project_crystal_name():
for value in ("foo_001", "_foo_001", "foo", "_foo_", "_1foo"):
assert validate_project_crystal_name("crystal", value)
for value in ("foo.001", "1foo", "foo&", "*foo"):
with pytest.raises(Sorry):
validate_project_crystal_name("crystal", value)
| bsd-3-clause | Python |
6b7260e717129c08a1e44a42a9468d2003a1b7e3 | Structure update | DarkmatterVale/Blockly-Frame-Generator,DarkmatterVale/Blockly-Frame-Generator | frame_generator.py | frame_generator.py | __author__ = 'Vale Tolpegin'
# Importing relevant classes
import sys, os, re
class frame_generator:
# Initialization method
def init( self, *args, **kwargs ):
pass
# Function that delegates frame file generation for every language that was requested
def generate_frame_files( self, base_directory, languages ):
for language in languages:
if language == "propc":
return self.generate_c_frame( base_directory )
elif language == "spin":
return self.generate_spin_frame( self, base_directory )
# Function that actually generates the frame file for the C language
def generate_c_frame( self, base_directory ):
return False
# Function that actually generates the frame file for the Spin language
def generate_spin_frame( self, base_directory ):
return False
# Opens a directory chooser dialog window and returns the path of the directory the user chose
def askdirectory(self, **options):
return apply(self.Chooser, (), options).show()
class Chooser(Dialog):
command = "tk_chooseDirectory"
def _fixresult(self, widget, result):
if result:
# keep directory until next time
self.options["initialdir"] = result
self.directory = result # compatibility
return result
class arg_parser:
# Initialization method for the acommand line arguement parser
def init( self, *args, **kwargs ):
pass
if __name__ == '__main__':
# Get language & other command line arguements
command_line_arg_parser = arg_parser()
# TO DO: add way to get arguements
# Instantiate a frame_generator object
frame_creator = frame_generator()
# Get base directory
base_directory = frame_creator.askdirectory()
# Call frame_generator object to parse and generate frame file(s)
generated_truefalse = frame_creator.generate_frame_files( base_directory, languages )
# If the files were successfully generated
if generated_truefalse:
# Let the user know the files were generated successfully
print ""
print "[ Info ] Frame files generated"
print ""
# Otherwise if the files were not successfully generated
else:
# Let the user know the files were not successfully generated
print ""
print "[ Error ] Frame files could not be generated due to some unknown error. Please try again"
print "" | __author__ = 'Vale Tolpegin'
import sys, os, re
class frame_generator:
def init( self, *args, **kwargs ):
pass
if __name__ == '__main__':
pass
| apache-2.0 | Python |
ab952b47561edc901efeafd63838c589ee2da2b8 | Fix #578 -- Sputnik had been purging all files on --force, not just the relevant one. | explosion/spaCy,banglakit/spaCy,oroszgy/spaCy.hu,recognai/spaCy,recognai/spaCy,oroszgy/spaCy.hu,aikramer2/spaCy,Gregory-Howard/spaCy,explosion/spaCy,Gregory-Howard/spaCy,oroszgy/spaCy.hu,spacy-io/spaCy,spacy-io/spaCy,aikramer2/spaCy,honnibal/spaCy,banglakit/spaCy,recognai/spaCy,raphael0202/spaCy,explosion/spaCy,banglakit/spaCy,explosion/spaCy,banglakit/spaCy,Gregory-Howard/spaCy,aikramer2/spaCy,spacy-io/spaCy,raphael0202/spaCy,oroszgy/spaCy.hu,aikramer2/spaCy,recognai/spaCy,raphael0202/spaCy,explosion/spaCy,honnibal/spaCy,aikramer2/spaCy,raphael0202/spaCy,raphael0202/spaCy,Gregory-Howard/spaCy,spacy-io/spaCy,spacy-io/spaCy,Gregory-Howard/spaCy,banglakit/spaCy,aikramer2/spaCy,oroszgy/spaCy.hu,raphael0202/spaCy,honnibal/spaCy,explosion/spaCy,recognai/spaCy,oroszgy/spaCy.hu,spacy-io/spaCy,honnibal/spaCy,recognai/spaCy,Gregory-Howard/spaCy,banglakit/spaCy | spacy/download.py | spacy/download.py | from __future__ import print_function
import sys
import sputnik
from sputnik.package_list import (PackageNotFoundException,
CompatiblePackageNotFoundException)
from . import about
from . import util
def download(lang, force=False, fail_on_exist=True):
try:
pkg = sputnik.package(about.__title__, about.__version__,
about.__models__.get(lang, lang))
if force:
shutil.rmtree(pkg.path)
elif fail_on_exist:
print("Model already installed. Please run 'python -m "
"spacy.%s.download --force' to reinstall." % lang, file=sys.stderr)
sys.exit(0)
except (PackageNotFoundException, CompatiblePackageNotFoundException):
pass
package = sputnik.install(about.__title__, about.__version__,
about.__models__.get(lang, lang))
try:
sputnik.package(about.__title__, about.__version__,
about.__models__.get(lang, lang))
except (PackageNotFoundException, CompatiblePackageNotFoundException):
print("Model failed to install. Please run 'python -m "
"spacy.%s.download --force'." % lang, file=sys.stderr)
sys.exit(1)
data_path = util.get_data_path()
print("Model successfully installed to %s" % data_path, file=sys.stderr)
| from __future__ import print_function
import sys
import sputnik
from sputnik.package_list import (PackageNotFoundException,
CompatiblePackageNotFoundException)
from . import about
from . import util
def download(lang, force=False, fail_on_exist=True):
if force:
sputnik.purge(about.__title__, about.__version__)
try:
sputnik.package(about.__title__, about.__version__,
about.__models__.get(lang, lang))
if fail_on_exist:
print("Model already installed. Please run 'python -m "
"spacy.%s.download --force' to reinstall." % lang, file=sys.stderr)
sys.exit(0)
except (PackageNotFoundException, CompatiblePackageNotFoundException):
pass
package = sputnik.install(about.__title__, about.__version__,
about.__models__.get(lang, lang))
try:
sputnik.package(about.__title__, about.__version__,
about.__models__.get(lang, lang))
except (PackageNotFoundException, CompatiblePackageNotFoundException):
print("Model failed to install. Please run 'python -m "
"spacy.%s.download --force'." % lang, file=sys.stderr)
sys.exit(1)
data_path = util.get_data_path()
print("Model successfully installed to %s" % data_path, file=sys.stderr)
| mit | Python |
9cae7682a8a74f52de1ab44d30f604334416ddb8 | implement eightball (#122) | GLolol/PyLink | plugins/games.py | plugins/games.py | """
games.py: Create a bot that provides game functionality (dice, 8ball, etc).
"""
import sys
import os
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import random
import utils
from log import log
import world
gameclient = utils.registerService("Games", manipulatable=True)
reply = gameclient.reply # TODO find a better syntax for ServiceBot.reply()
# commands
def dice(irc, source, args):
"""<num>d<sides>
Rolls a die with <sides> sides <num> times.
"""
if not args:
reply(irc, "No string given.")
return
try:
# Split num and sides and convert them to int.
num, sides = map(int, args[0].split('d', 1))
except ValueError:
# Invalid syntax. Show the command help.
gameclient.help(irc, source, ['dice'])
return
assert 1 < sides <= 100, "Invalid side count (must be 2-100)."
assert 1 <= num <= 100, "Cannot roll more than 100 dice at once."
results = []
for _ in range(num):
results.append(random.randint(1, sides))
# Convert results to strings, join them, format, and reply.
s = 'You rolled %s: %s (total: %s)' % (args[0], ' '.join([str(x) for x in results]), sum(results))
reply(irc, s)
gameclient.add_cmd(dice, 'd')
gameclient.add_cmd(dice)
eightball_responses = ["It is certain.",
"It is decidedly so.",
"Without a doubt.",
"Yes, definitely.",
"You may rely on it.",
"As I see it, yes.",
"Most likely.",
"Outlook good.",
"Yes.",
"Signs point to yes.",
"Reply hazy, try again.",
"Ask again later.",
"Better not tell you now.",
"Cannot predict now.",
"Concentrate and ask again.",
"Don't count on it.",
"My reply is no.",
"My sources say no.",
"Outlook not so good.",
"Very doubtful."]
def eightball(irc, source, args):
"""[<question>]
Asks the Magic 8-ball a question.
"""
reply(irc, random.choice(eightball_responses))
gameclient.add_cmd(eightball)
gameclient.add_cmd(eightball, '8ball')
gameclient.add_cmd(eightball, '8b')
# loading
def main(irc=None):
"""Main function, called during plugin loading at start."""
# seed the random
random.seed()
def die(irc):
utils.unregisterService('games')
| """
games.py: Create a bot that provides game functionality (dice, 8ball, etc).
"""
import sys
import os
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import random
import utils
from log import log
import world
gameclient = utils.registerService("Games", manipulatable=True)
reply = gameclient.reply # TODO find a better syntax for ServiceBot.reply()
# commands
def dice(irc, source, args):
"""<num>d<sides>
Rolls a die with <sides> sides <num> times.
"""
if not args:
reply(irc, "No string given.")
return
try:
# Split num and sides and convert them to int.
num, sides = map(int, args[0].split('d', 1))
except ValueError:
# Invalid syntax. Show the command help.
gameclient.help(irc, source, ['dice'])
return
assert 1 < sides <= 100, "Invalid side count (must be 2-100)."
assert 1 <= num <= 100, "Cannot roll more than 100 dice at once."
results = []
for _ in range(num):
results.append(random.randint(1, sides))
# Convert results to strings, join them, format, and reply.
s = 'You rolled %s: %s (total: %s)' % (args[0], ' '.join([str(x) for x in results]), sum(results))
reply(irc, s)
gameclient.add_cmd(dice, 'd')
gameclient.add_cmd(dice)
# loading
def main(irc=None):
"""Main function, called during plugin loading at start."""
# seed the random
random.seed()
def die(irc):
utils.unregisterService('games')
| mpl-2.0 | Python |
1f8014a68ac8d0b10c990405370edeaf369cb674 | Update the description | rahulbohra/Python-Basic | 32_filtering_the_number.py | 32_filtering_the_number.py | # Looking for a needle in hackstack, in a loop
data = [12,4,21,56,01,42,76,93]
print data
print '\nScript starts\n'
for number in data:
if number > 50:
print 'Number greater then 50 is ', number
print '\nScript Ends'
| data = [12,4,21,56,01,42,76,93]
print data
print '\nScript starts\n'
for number in data:
if number > 50:
print 'Number greater then 50 is ', number
print '\nScript Ends'
| mit | Python |
73553a2c6817addccff5a0e90cd32993ee1ea69b | Update ngrokwebhook.py | jbogarin/ciscosparkapi | examples/ngrokwebhook.py | examples/ngrokwebhook.py | #sample script that reads ngrok info from localhost:4040 and create Cisco Spark Webhook
#typicall ngrok is called "ngrok http 8080" to redirect localhost:8080 to Internet
#accesible ngrok url
#
#To use script simply launch ngrok, then launch this script. After ngrok is killed, run this
#script a second time to remove webhook from Cisco Spark
import requests
import json
import re
import sys
import requests.packages.urllib3
requests.packages.urllib3.disable_warnings()
from ciscosparkapi import CiscoSparkAPI, Webhook
def findwebhookidbyname(api, webhookname):
webhooks = api.webhooks.list()
for wh in webhooks:
if wh.name == webhookname:
return wh.id
else:
return "not found"
#Webhook attributes
webhookname="testwebhook"
resource="messages"
event="created"
url_suffix="/sparkwebhook"
#grab the at from a local at.txt file instead of global variable
fat=open ("at.txt","r+")
at=fat.readline().rstrip()
fat.close
api = CiscoSparkAPI(at)
#go to the localhost page for nogrok and grab the public url for http
try:
ngrokpage = requests.get("http://127.0.0.1:4040").text
except:
print ("no ngrok running - deleting webhook if it exists")
whid=findwebhookidbyname(api, webhookname)
if "not found" in whid:
print ("no webhook found")
sys.exit()
else:
print (whid)
dict=api.webhooks.delete(whid)
print (dict)
print ("Webhook deleted")
sys.exit()
for line in ngrokpage.split("\n"):
if "window.common = " in line:
ngrokjson = re.search('JSON.parse\(\"(.+)\"\)\;',line).group(1)
ngrokjson = (ngrokjson.replace('\\',''))
print (ngrokjson)
Url = (json.loads(ngrokjson)["Session"]["Tunnels"]["command_line (http)"]["URL"])+url_suffix
print (Url)
#check if the webhook exists by name and then create it if not
whid=findwebhookidbyname(api, webhookname)
if "not found" in whid:
#create
print ("not found")
dict=api.webhooks.create(webhookname, targetUrl, resource, event)
print (dict)
else:
#update
print (whid)
dict=api.webhooks.update(whid, name=webhookname, targetUrl=Url)
print (dict)
| #sample script that reads ngrok info from localhost:4040 and create Cisco Spark Webhook
#typicall ngrok is called "ngrok http 8080" to redirect localhost:8080 to Internet
#accesible ngrok url
#
#To use script simply launch ngrok, then launch this script. After ngrok is killed, run this
#script a second time to remove webhook from Cisco Spark
import requests
import json
import re
import sys
import requests.packages.urllib3
requests.packages.urllib3.disable_warnings()
from ciscosparkapi import CiscoSparkAPI, Webhook
def findwebhookidbyname(api, webhookname):
webhooks = api.webhooks.list()
for wh in webhooks:
if wh.name == webhookname:
return wh.id
else:
return "not found"
#Webhook attributes
webhookname="testwebhook"
resource="messages"
event="created"
url_suffix="/sparkwebhook"
#grab the at from a local at.txt file instead of global variable
fat=open ("at.txt","r+")
at=fat.readline().rstrip()
fat.close
api = CiscoSparkAPI(at)
#go to the localhost page for nogrok and grab the public url for http
try:
ngrokpage = requests.get("http://127.0.0.1:4040").text
except:
print ("no ngrok running - deleting webhook if it exists")
whid=findwebhookidbyname(api, webhookname)
if "not found" in whid:
print ("no webhook found")
sys.exit()
else:
print (whid)
dict=api.webhooks.delete(whid)
print (dict)
print ("Webhook deleted")
sys.exit()
for line in ngrokpage.split("\n"):
if "window.common = " in line:
ngrokjson = re.search('JSON.parse\(\"(.+)\"\)\;',line).group(1)
ngrokjson = (ngrokjson.replace('\\',''))
print (ngrokjson)
Url = (json.loads(ngrokjson)["Session"]["Tunnels"]["command_line (http)"]["URL"])+url_suffix
print (targetUrl)
#check if the webhook exists by name and then create it if not
whid=findwebhookidbyname(api, webhookname)
if "not found" in whid:
#create
print ("not found")
dict=api.webhooks.create(webhookname, targetUrl, resource, event)
print (dict)
else:
#update
print (whid)
dict=api.webhooks.update(whid, name=webhookname, targetUrl=Url)
print (dict)
| mit | Python |
1cbbd3fe890afdeb3f0df7104ba151d7c5f33ef9 | bump version number | laowantong/mocodo,laowantong/mocodo,laowantong/mocodo,laowantong/mocodo,laowantong/mocodo | mocodo/version_number.py | mocodo/version_number.py | version = u"2.0.19" | version = u"2.0.18" | mit | Python |
eccf8d2d9784554b85b5d446fe8737eb29600efb | Switch test_example_hdf_io to pytest | ericdill/databroker,ericdill/databroker | databroker/tests/test_example_hdf_io.py | databroker/tests/test_example_hdf_io.py | from ..examples.hdf_io import hdf_data_io
from metadatastore.test.utils import mds_setup, mds_teardown
from filestore.test.utils import fs_setup, fs_teardown
from databroker import DataBroker as db, get_events
from numpy.testing.utils import assert_array_equal
def setup_module(module):
fs_setup()
mds_setup()
def teardown_module(module):
fs_teardown()
mds_teardown()
def _retrieve_data_helper(event, cols):
spec = event['data']['xrf_spectrum']
x = event['data']['h_pos']
y = event['data']['v_pos']
assert spec.size == 20
assert_array_equal(spec, y * cols + x)
def test_hdf_io():
rows, cols = 1, 5
rs_uid, ev_uids = hdf_data_io(rows, cols)
h = db[rs_uid]
for e in get_events(h, fill=True):
_retrieve_data_helper(e, cols)
assert e['uid'] in ev_uids
| from ..examples.hdf_io import hdf_data_io
from metadatastore.test.utils import mds_setup, mds_teardown
from filestore.test.utils import fs_setup, fs_teardown
from databroker import DataBroker as db, get_events
from numpy.testing.utils import assert_array_equal
def setup_module():
fs_setup()
mds_setup()
def teardown_module():
fs_teardown()
mds_teardown()
def _test_retrieve_data(event, rows, cols):
spec = event['data']['xrf_spectrum']
x = event['data']['h_pos']
y = event['data']['v_pos']
assert spec.size == 20
assert_array_equal(spec, y * cols + x)
def test_hdf_io():
rows, cols = 1, 5
rs_uid, ev_uids = hdf_data_io(rows, cols)
h = db[rs_uid]
for e in get_events(h, fill=True):
_test_retrieve_data(e, rows, cols)
assert e['uid'] in ev_uids
| bsd-3-clause | Python |
d297cb7435f94ab6e086f83c09faeafb6f8cf86e | allow hyphens and underscores in form urls | dimagi/commcare-hq,SEL-Columbia/commcare-hq,dimagi/commcare-hq,SEL-Columbia/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,SEL-Columbia/commcare-hq,puttarajubr/commcare-hq | couchforms/urls.py | couchforms/urls.py | from django.conf.urls.defaults import *
urlpatterns = patterns('',
url(r'^post/?$', 'couchforms.views.post', name='xform_post'),
url(r'^download/(?P<instance_id>[\w_-]+)/(?P<attachment>[\w.-_]+)?$',
'couchforms.views.download_attachment', name='xform_attachment'),
)
| from django.conf.urls.defaults import *
urlpatterns = patterns('',
url(r'^post/?$', 'couchforms.views.post', name='xform_post'),
url(r'^download/(?P<instance_id>\w+)/(?P<attachment>[\w.-_]+)?$',
'couchforms.views.download_attachment', name='xform_attachment'),
)
| bsd-3-clause | Python |
27e6b067aea3cc969911d5c0071e4accfd77b7a1 | Update main.py | Python-IoT/Smart-IoT-Planting-System,Python-IoT/Smart-IoT-Planting-System | device/src/main.py | device/src/main.py | #This is the file executing while STM32 MCU bootup, and in this file,
#it will call other functions to fullfill the project.
#Communication module: LoRa.
#Communication method with gateway via LoRa.
#Uart port drive LoRa module.
#Parse JSON between device and gateway via LoRa channel.
#LoRa module: E32-TTL-100
#Pin specification:
#Module MCU
#M0(IN) <--> GPIO(X3)(OUT) #mode setting, can not hang
#M1(IN) <--> GPIO(X4)(OUT) #mode setting, can not hang
#RXD(IN) <--> X1(TX)(OUT) #UART4
#TXD(OUT) <--> X2(RX)(IN) #UART4
#AUX(OUT) <--> GPIO/INT(IN) #module status detecting
#VCC
#GND
#Communication mode is 0, need to set M0 and M1 to 0.
import pyb
from pyb import Pin
from pyb import Timer
from pyb import UART
import micropython
#Import light intensity needed module
import LightIntensity
import time
import json
micropython.alloc_emergency_exception_buf(100)
print('pin init')
Pin('Y11',Pin.OUT_PP).low() #GND
Pin('Y9',Pin.OUT_PP).high() #VCC
#Set LoRa module with mode-0.
M0 = Pin('X3', Pin.OUT_PP)
M1 = Pin('X4', Pin.OUT_PP)
M0.low()
M1.low()
#Init uart4 for LoRa module.
u4 = UART(4,9600)
u4.init(9600, bits=8, parity=None, stop=1)
cmd_online = '{"ID":"1", "CMD":"Online", "TYPE":"N", "VALUE":"N"}\n'
u4.write(cmd_online)
#LED shining regularly(using timer) to indicate the program is running correctly
tim1 = Timer(1, freq=1)
tim1.callback(lambda t: pyb.LED(1).toggle())
if __name__=='__main__':
while True:
#Waiting for the message from UART4 to obtain LoRa data.
len = u4.any()
if(len > 0):
print(u4.read())
| #This is the file executing while STM32 MCU bootup, and in this file,
#it will call other functions to fullfill the project.
#Communication module: LoRa.
#Communication method with gateway via LoRa.
#Uart port drive LoRa module.
#Parse JSON between device and gateway via LoRa channel.
#LoRa module: E32-TTL-100
#Pin specification:
#Module MCU
#M0(IN) <--> GPIO(X3)(OUT) #mode setting, can not hang
#M1(IN) <--> GPIO(X4)(OUT) #mode setting, can not hang
#RXD(IN) <--> X1(TX)(OUT) #UART4
#TXD(OUT) <--> X2(RX)(IN) #UART4
#AUX(OUT) <--> GPIO/INT(IN) #module status detecting
#VCC
#GND
#Communication mode is 0, need to set M0 and M1 to 0.
import pyb
from pyb import Pin
from pyb import Timer
from pyb import UART
import micropython
#Import light intensity needed module
import LightIntensity
import time
micropython.alloc_emergency_exception_buf(100)
print('pin init')
Pin('Y11',Pin.OUT_PP).low() #GND
Pin('Y9',Pin.OUT_PP).high() #VCC
#Set LoRa module with mode-0.
M0 = Pin('X3', Pin.OUT_PP)
M1 = Pin('X4', Pin.OUT_PP)
M0.low()
M1.low()
#Init uart4 for LoRa module.
u4 = UART(4,9600)
u4.init(9600, bits=8, parity=None, stop=1)
cmd_online = '{"ID":"1", "CMD":"Online", "TYPE":"N", "VALUE":"N"}\n'
u4.write(cmd_online)
#LED shining regularly(using timer) to indicate the program is running correctly
tim1 = Timer(1, freq=1)
tim1.callback(lambda t: pyb.LED(1).toggle())
if __name__=='__main__':
while True:
#Waiting for the message from UART4 to obtain LoRa data.
len = u4.any()
if(len > 0):
print(u4.read())
| mit | Python |
be5263d7aef7651bfe0b0992998bdb67655b051f | Remove an unused helper | Meerkov/fireplace,smallnamespace/fireplace,butozerca/fireplace,beheh/fireplace,smallnamespace/fireplace,Ragowit/fireplace,Meerkov/fireplace,butozerca/fireplace,jleclanche/fireplace,oftc-ftw/fireplace,liujimj/fireplace,Ragowit/fireplace,amw2104/fireplace,liujimj/fireplace,amw2104/fireplace,oftc-ftw/fireplace,NightKev/fireplace | fireplace/cards/utils.py | fireplace/cards/utils.py | import random
import fireplace.cards
from ..actions import *
from ..enums import CardClass, CardType, GameTag, Race, Rarity, Zone
from ..events import *
from ..targeting import *
def hand(func):
"""
@hand helper decorator
The decorated event listener will only listen while in the HAND Zone
"""
func.zone = Zone.HAND
return func
RandomCard = lambda **kw: RandomCardGenerator(**kw)
RandomCollectible = lambda **kw: RandomCardGenerator(collectible=True, **kw)
RandomMinion = lambda **kw: RandomCollectible(type=CardType.MINION, **kw)
RandomWeapon = lambda **kw: RandomCollectible(type=CardType.WEAPON, **kw)
RandomSparePart = lambda **kw: RandomCardGenerator(spare_part=True, **kw)
| import random
import fireplace.cards
from ..actions import *
from ..enums import CardClass, CardType, GameTag, Race, Rarity, Zone
from ..events import *
from ..targeting import *
def hand(func):
"""
@hand helper decorator
The decorated event listener will only listen while in the HAND Zone
"""
func.zone = Zone.HAND
return func
drawCard = lambda self, *args: self.controller.draw()
RandomCard = lambda **kw: RandomCardGenerator(**kw)
RandomCollectible = lambda **kw: RandomCardGenerator(collectible=True, **kw)
RandomMinion = lambda **kw: RandomCollectible(type=CardType.MINION, **kw)
RandomWeapon = lambda **kw: RandomCollectible(type=CardType.WEAPON, **kw)
RandomSparePart = lambda **kw: RandomCardGenerator(spare_part=True, **kw)
| agpl-3.0 | Python |
f080b9b05dc36fd1ef30c32f870f6233db728d84 | Add assets file. | jledbetter/openhatch,openhatch/oh-mainline,mzdaniel/oh-mainline,vipul-sharma20/oh-mainline,ojengwa/oh-mainline,ehashman/oh-mainline,sudheesh001/oh-mainline,nirmeshk/oh-mainline,heeraj123/oh-mainline,vipul-sharma20/oh-mainline,ojengwa/oh-mainline,moijes12/oh-mainline,willingc/oh-mainline,willingc/oh-mainline,heeraj123/oh-mainline,Changaco/oh-mainline,waseem18/oh-mainline,moijes12/oh-mainline,ehashman/oh-mainline,campbe13/openhatch,mzdaniel/oh-mainline,waseem18/oh-mainline,mzdaniel/oh-mainline,moijes12/oh-mainline,sudheesh001/oh-mainline,moijes12/oh-mainline,vipul-sharma20/oh-mainline,heeraj123/oh-mainline,ehashman/oh-mainline,ehashman/oh-mainline,eeshangarg/oh-mainline,Changaco/oh-mainline,mzdaniel/oh-mainline,eeshangarg/oh-mainline,eeshangarg/oh-mainline,mzdaniel/oh-mainline,eeshangarg/oh-mainline,moijes12/oh-mainline,campbe13/openhatch,SnappleCap/oh-mainline,waseem18/oh-mainline,openhatch/oh-mainline,Changaco/oh-mainline,SnappleCap/oh-mainline,onceuponatimeforever/oh-mainline,ojengwa/oh-mainline,heeraj123/oh-mainline,SnappleCap/oh-mainline,mzdaniel/oh-mainline,jledbetter/openhatch,ojengwa/oh-mainline,sudheesh001/oh-mainline,jledbetter/openhatch,vipul-sharma20/oh-mainline,ehashman/oh-mainline,SnappleCap/oh-mainline,nirmeshk/oh-mainline,waseem18/oh-mainline,openhatch/oh-mainline,campbe13/openhatch,sudheesh001/oh-mainline,heeraj123/oh-mainline,nirmeshk/oh-mainline,vipul-sharma20/oh-mainline,nirmeshk/oh-mainline,willingc/oh-mainline,openhatch/oh-mainline,jledbetter/openhatch,openhatch/oh-mainline,sudheesh001/oh-mainline,willingc/oh-mainline,Changaco/oh-mainline,onceuponatimeforever/oh-mainline,Changaco/oh-mainline,waseem18/oh-mainline,campbe13/openhatch,willingc/oh-mainline,eeshangarg/oh-mainline,mzdaniel/oh-mainline,campbe13/openhatch,onceuponatimeforever/oh-mainline,onceuponatimeforever/oh-mainline,nirmeshk/oh-mainline,onceuponatimeforever/oh-mainline,jledbetter/openhatch,ojengwa/oh-mainline,SnappleCap/oh-mainline | mysite/assets.py | mysite/assets.py | import django_assets
# Django Assets <http://github.com/miracle2k/django-assets>
# helps us bundle our assets, i.e., JavaScript and CSS,
# so they load faster.
# So django_assets doesn't feel bad creating files
ASSETS_AUTO_CREATE=True
# See documentation at <http://elsdoerfer.name/docs/django-assets/settings.html#assets-expire>
ASSETS_EXPIRE='filename'
# Below, we bundle all of our JavaScripts into what Cal Henderson calls a "monolith".
# (See <http://carsonified.com/blog/dev/serving-javascript-fast/>.)
# NB: "Note that...all filenames and paths are considered to be
# relative to Django’s MEDIA_ROOT settings, and generated urls will be based on MEDIA_URL."
# <http://elsdoerfer.name/docs/django-assets/bundles.html>
js_monolith = django_assets.Bundle(
# JavaScripts kindly provided by other folks
# 'js/jquery.js',
# 'js/jquery.query.js',
# 'js/jquery.json.js',
# 'js/jquery.hint.js',
# 'js/jquery.form.js',
# 'js/jquery.jgrowl.js',
# 'js/jquery-ui-1.7.2.custom.min.js',
# 'js/jquery.cookie.js',
# 'js/piwik.js',
#
# # Stuff we wrote
# 'js/base/locationDialog.js',
# 'js/base/base.js',
# 'js/search/defaultText.js',
# 'js/account/set_location.js',
# 'js/profile/portfolio.js',
# 'js/importer.js', # ~32KB at time of writing (commit 391939b7)
#'js/css_if_js.js', # I don't think we need this
#filters='jsmin',
output='bundle.js')
django_assets.register('big_bundle_of_javascripts', js_monolith)
| import django_assets
# Django Assets <http://github.com/miracle2k/django-assets>
# helps us bundle our assets, i.e., JavaScript and CSS,
# so they load faster.
# So django_assets doesn't feel bad creating files
ASSETS_AUTO_CREATE=True
# See documentation at <http://elsdoerfer.name/docs/django-assets/settings.html#assets-expire>
ASSETS_EXPIRE='filename'
# Bundle all of our JavaScripts into what Cal Henderson calls a "monolith".
# (See <http://carsonified.com/blog/dev/serving-javascript-fast/>.)
js_monolith = django_assets.Bundle(
# JavaScripts kindly provided by other folks
'static/js/jquery.js',
'static/js/jquery.query.js',
'static/js/jquery.json.js',
'static/js/jquery.hint.js',
'static/js/jquery.form.js',
'static/js/jquery.jgrowl.js',
'static/js/jquery-ui-1.7.2.custom.min.js',
'static/js/jquery.cookie.js',
'static/js/piwik.js',
# Stuff we wrote
'static/js/base/locationDialog.js',
'static/js/base/base.js',
'static/js/search/defaultText.js',
'static/js/account/set_location.js',
'static/js/profile/portfolio.js',
'static/js/importer.js', # ~32KB at time of writing (commit 391939b7)
#'static/js/css_if_js.js', # I don't think we need this
filters='jsmin',
output='static/js/bundle.js')
django_assets.register('big_bundle_of_javascripts', js_monolith)
| agpl-3.0 | Python |
a8795d77d3f1a3586de83b92541848d2f12e161f | Clean example script. | alexis-roche/niseg,arokem/nipy,alexis-roche/register,arokem/nipy,alexis-roche/register,alexis-roche/nipy,alexis-roche/nipy,bthirion/nipy,bthirion/nipy,nipy/nipy-labs,nipy/nireg,bthirion/nipy,arokem/nipy,alexis-roche/nireg,nipy/nipy-labs,alexis-roche/register,alexis-roche/nireg,nipy/nireg,arokem/nipy,bthirion/nipy,alexis-roche/nipy,alexis-roche/nipy,alexis-roche/niseg | examples/neurospin/onesample_group.py | examples/neurospin/onesample_group.py | import numpy as np
import nipy.neurospin.statistical_mapping as sm
from nipy.io.imageformats import Nifti1Image as Image
def remake_images():
# Get group data
f = np.load('data/offset_002.npz')
data, vardata, xyz = f['mat'], f['var'], f['xyz']
dX = xyz[0,:].max() + 1
dY = xyz[1,:].max() + 1
dZ = xyz[2,:].max() + 1
aux = np.zeros([dX,dY,dZ])
data_images = []
vardata_images = []
mask_images = []
for i in range(data.shape[0]):
aux[list(xyz)] = data[i,:]
data_images.append(Image(aux.copy(), np.eye(4)))
aux[list(xyz)] = vardata[i,:]
vardata_images.append(Image(aux.copy(), np.eye(4)))
aux[list(xyz)] = 1
mask_images.append(Image(aux.copy(), np.eye(4)))
return data_images, vardata_images, mask_images
data_images, vardata_images, mask_images = remake_images()
##zimg, mask = sm.onesample_test(data_images, None, mask_images, 'student')
zimg, mask, nulls = sm.onesample_test(data_images, None, mask_images, 'wilcoxon',
permutations=1024, cluster_forming_th=0.01)
clusters, info = sm.cluster_stats(zimg, mask, 0.01, nulls=nulls)
| import numpy as np
import nipy.neurospin.statistical_mapping as sm
from nipy.io.imageformats import Nifti1Image as Image
def remake_images():
# Get group data
f = np.load('data/offset_002.npz')
data, vardata, xyz = f['mat'], f['var'], f['xyz']
dX = xyz[0,:].max() + 1
dY = xyz[1,:].max() + 1
dZ = xyz[2,:].max() + 1
aux = np.zeros([dX,dY,dZ])
data_images = []
vardata_images = []
mask_images = []
for i in range(data.shape[0]):
aux[list(xyz)] = data[i,:]
data_images.append(brifti.nifti1.Nifti1Image(aux.copy(), np.eye(4)))
aux[list(xyz)] = vardata[i,:]
vardata_images.append(brifti.nifti1.Nifti1Image(aux.copy(), np.eye(4)))
aux[list(xyz)] = 1
mask_images.append(brifti.nifti1.Nifti1Image(aux.copy(), np.eye(4)))
return data_images, vardata_images, mask_images
data_images, vardata_images, mask_images = remake_images()
##zimg, mask = sm.onesample_test(data_images, None, mask_images, 'student')
zimg, mask, nulls = sm.onesample_test(data_images, None, mask_images, 'wilcoxon',
permutations=1024, cluster_forming_th=0.01)
clusters, info = sm.cluster_stats(zimg, mask, 0.01, nulls=nulls)
| bsd-3-clause | Python |
9da42e47effc1d61a36c7936ea9fe31adb1c1033 | raise version to dev | trichter/qopen | qopen/__init__.py | qopen/__init__.py | from qopen.core import run, __doc__
__version__ = '1.4-dev'
| from qopen.core import run, __doc__
__version__ = '1.3'
| mit | Python |
0b534c54e4fa5b7d0d558159b63a7cc0bf7f5393 | Bump @graknlabs | lolski/grakn,graknlabs/grakn,graknlabs/grakn,graknlabs/grakn,graknlabs/grakn,lolski/grakn,lolski/grakn,lolski/grakn | dependencies/graknlabs/dependencies.bzl | dependencies/graknlabs/dependencies.bzl | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
load("@bazel_tools//tools/build_defs/repo:git.bzl", "git_repository")
def graknlabs_common():
git_repository(
name = "graknlabs_common",
remote = "https://github.com/graknlabs/common",
tag = "0.2.2" # sync-marker: do not remove this comment, this is used for sync-dependencies by @graknlabs_common
)
def graknlabs_build_tools():
git_repository(
name = "graknlabs_build_tools",
remote = "https://github.com/graknlabs/build-tools",
commit = "04f9678403cdbde889b8e25cc74d16bf1751fd81", # sync-marker: do not remove this comment, this is used for sync-dependencies by @graknlabs_build_tools
)
def graknlabs_verification():
git_repository(
name = "graknlabs_verification",
remote = "https://github.com/graknlabs/verification",
commit = "1e81b2b397cd0ac020f1d69708eef1d4cc2d736b"
)
| #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
load("@bazel_tools//tools/build_defs/repo:git.bzl", "git_repository")
def graknlabs_common():
git_repository(
name = "graknlabs_common",
remote = "https://github.com/graknlabs/common",
tag = "0.2.2" # sync-marker: do not remove this comment, this is used for sync-dependencies by @graknlabs_common
)
def graknlabs_build_tools():
git_repository(
name = "graknlabs_build_tools",
remote = "https://github.com/graknlabs/build-tools",
commit = "04f9678403cdbde889b8e25cc74d16bf1751fd81", # sync-marker: do not remove this comment, this is used for sync-dependencies by @graknlabs_build_tools
)
def graknlabs_verification():
git_repository(
name = "graknlabs_verification",
remote = "https://github.com/graknlabs/verification",
commit = "82e14c0b439006c7c62b5b2a440396c10a82d44f"
)
| agpl-3.0 | Python |
8deccaa046832afbe8c43ea38d179dd8706754ef | use 'b' prefix, six.b not required | simudream/spec,frol/spec,bitprophet/spec | spec/trap.py | spec/trap.py | """
Test decorator for capturing stdout/stderr/both.
Based on original code from Fabric 1.x, specifically:
* fabric/tests/utils.py
* as of Git SHA 62abc4e17aab0124bf41f9c5f9c4bc86cc7d9412
Though modifications have been made since.
"""
import sys
from functools import wraps
import six
from six import BytesIO as IO
class CarbonCopy(IO):
"""
An IO wrapper capable of multiplexing its writes to other buffer objects.
"""
# NOTE: because StringIO.StringIO on Python 2 is an old-style class we
# cannot use super() :(
def __init__(self, buffer=b'', cc=None):
#def __init__(self, buffer='', cc=None):
"""
If ``cc`` is given and is a file-like object or an iterable of same,
it/they will be written to whenever this instance is written to.
"""
IO.__init__(self, buffer)
if cc is None:
cc = []
elif hasattr(cc, 'write'):
cc = [cc]
self.cc = cc
def write(self, s):
IO.write(self, s)
for writer in self.cc:
writer.write(s)
# Dumb hack to deal with py3 expectations; real sys.std(out|err) in Py3
# requires writing to a buffer attribute obj in some situations.
#@property
#def buffer(self):
# return self
def trap(func):
"""
Replace sys.std(out|err) with a wrapper during execution, restored after.
In addition, a new combined-streams output (another wrapper) will appear at
``sys.stdall``. This stream will resemble what a user sees at a terminal,
i.e. both out/err streams intermingled.
"""
@wraps(func)
def wrapper(*args, **kwargs):
sys.stdall = IO()
my_stdout, sys.stdout = sys.stdout, CarbonCopy(cc=sys.stdall)
my_stderr, sys.stderr = sys.stderr, CarbonCopy(cc=sys.stdall)
try:
ret = func(*args, **kwargs)
finally:
sys.stdout = my_stdout
sys.stderr = my_stderr
del sys.stdall
return wrapper
| """
Test decorator for capturing stdout/stderr/both.
Based on original code from Fabric 1.x, specifically:
* fabric/tests/utils.py
* as of Git SHA 62abc4e17aab0124bf41f9c5f9c4bc86cc7d9412
Though modifications have been made since.
"""
import sys
from functools import wraps
import six
from six import BytesIO as IO
class CarbonCopy(IO):
"""
An IO wrapper capable of multiplexing its writes to other buffer objects.
"""
# NOTE: because StringIO.StringIO on Python 2 is an old-style class we
# cannot use super() :(
def __init__(self, buffer=six.b(''), cc=None):
#def __init__(self, buffer='', cc=None):
"""
If ``cc`` is given and is a file-like object or an iterable of same,
it/they will be written to whenever this instance is written to.
"""
IO.__init__(self, buffer)
if cc is None:
cc = []
elif hasattr(cc, 'write'):
cc = [cc]
self.cc = cc
def write(self, s):
IO.write(self, s)
for writer in self.cc:
writer.write(s)
# Dumb hack to deal with py3 expectations; real sys.std(out|err) in Py3
# requires writing to a buffer attribute obj in some situations.
#@property
#def buffer(self):
# return self
def trap(func):
"""
Replace sys.std(out|err) with a wrapper during execution, restored after.
In addition, a new combined-streams output (another wrapper) will appear at
``sys.stdall``. This stream will resemble what a user sees at a terminal,
i.e. both out/err streams intermingled.
"""
@wraps(func)
def wrapper(*args, **kwargs):
sys.stdall = IO()
my_stdout, sys.stdout = sys.stdout, CarbonCopy(cc=sys.stdall)
my_stderr, sys.stderr = sys.stderr, CarbonCopy(cc=sys.stdall)
try:
ret = func(*args, **kwargs)
finally:
sys.stdout = my_stdout
sys.stderr = my_stderr
del sys.stdall
return wrapper
| mit | Python |
d0fc327eb7b3ed798b02a654bb527d299b0245c9 | add unicode type | jlutz777/FreeStore,jlutz777/FreeStore,jlutz777/FreeStore | run_freestore.py | run_freestore.py | import bottle
from bottle import HTTPError
from bottle.ext import sqlalchemy
from sqlalchemy import create_engine, Column, Integer, Sequence, String, Unicode
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
#engine = create_engine('postgresql://postgres:passw0rd@localhost:5432/freestore', echo=True)
engine = create_engine('postgres://root:j9t95X5qpwsbaaQm@172.17.42.1:49160/db', echo=True)
app = bottle.Bottle()
plugin = sqlalchemy.Plugin(
engine,
keyword='db', # Keyword used to inject session database in a route (default 'db').
)
app.install(plugin)
class CustomerFamily(Base):
"""Sqlalchemy deals model"""
__tablename__ = "customerfamily"
id = Column(Integer, primary_key=True)
email = Column('email', Unicode)
phone = Column('phone', Unicode, default='')
address = Column('address', Unicode)
city = Column('city', Unicode)
state = Column('state', Unicode)
zip = Column('zip', Unicode)
datecreated = Column('datecreated', DateTime)
@app.get('/')
def show(db):
entity = db.query(CustomerFamily).first()
if entity:
return {'id': entity.id, 'name': entity.email}
return HTTPError(404, 'Entity not found.')
if __name__ == "main":
bottle.run(server='gunicorn')
app = bottle.default_app() | import bottle
from bottle import HTTPError
from bottle.ext import sqlalchemy
from sqlalchemy import create_engine, Column, Integer, Sequence, String
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
#engine = create_engine('postgresql://postgres:passw0rd@localhost:5432/freestore', echo=True)
engine = create_engine('postgres://root:j9t95X5qpwsbaaQm@172.17.42.1:49160/db', echo=True)
app = bottle.Bottle()
plugin = sqlalchemy.Plugin(
engine,
keyword='db', # Keyword used to inject session database in a route (default 'db').
)
app.install(plugin)
class CustomerFamily(Base):
"""Sqlalchemy deals model"""
__tablename__ = "customerfamily"
id = Column(Integer, primary_key=True)
email = Column('email', Unicode)
phone = Column('phone', Unicode, default='')
address = Column('address', Unicode)
city = Column('city', Unicode)
state = Column('state', Unicode)
zip = Column('zip', Unicode)
datecreated = Column('datecreated', DateTime)
@app.get('/')
def show(db):
entity = db.query(CustomerFamily).first()
if entity:
return {'id': entity.id, 'name': entity.email}
return HTTPError(404, 'Entity not found.')
if __name__ == "main":
bottle.run(server='gunicorn')
app = bottle.default_app() | mit | Python |
1595b20d09576e2b686b376884e45b82c2018006 | Update test cases (#463) | exercism/xpython,exercism/xpython,N-Parsons/exercism-python,N-Parsons/exercism-python,behrtam/xpython,jmluy/xpython,jmluy/xpython,mweb/python,mweb/python,behrtam/xpython,pheanex/xpython,exercism/python,smalley/python,smalley/python,pheanex/xpython,exercism/python | exercises/allergies/allergies_test.py | exercises/allergies/allergies_test.py | import unittest
from allergies import Allergies
# Python 2/3 compatibility
if not hasattr(unittest.TestCase, 'assertCountEqual'):
unittest.TestCase.assertCountEqual = unittest.TestCase.assertItemsEqual
# test cases adapted from `x-common//canonical-data.json` @ version: 1.0.0
class AllergiesTests():
def test_no_allergies_means_not_allergic(self):
allergies = Allergies(0)
self.assertFalse(allergies.is_allergic_to('peanuts'))
self.assertFalse(allergies.is_allergic_to('cats'))
self.assertFalse(allergies.is_allergic_to('strawberries'))
def test_is_allergic_to_eggs(self):
self.assertTrue(Allergies(1).is_allergic_to('eggs'))
def test_allergic_to_eggs_in_addition_to_other_stuff(self):
allergies = Allergies(5)
self.assertTrue(allergies.is_allergic_to('eggs'))
self.assertTrue(allergies.is_allergic_to('shellfish'))
self.assertFalse(allergies.is_allergic_to('strawberries'))
def test_no_allergies_at_all(self):
self.assertEqual(Allergies(0).lst, [])
def test_allergic_to_just_eggs(self):
self.assertEqual(Allergies(1).lst, ['eggs'])
def test_allergic_to_just_peanuts(self):
self.assertEqual(Allergies(2).lst, ['peanuts'])
def test_allergic_to_just_strawberries(self):
self.assertEqual(Allergies(8).lst, ['strawberries'])
def test_allergic_to_eggs_and_peanuts(self):
self.assertCountEqual(Allergies(3).lst, ['eggs', 'peanuts'])
def test_allergic_to_more_than_eggs_but_not_peanuts(self):
self.assertCountEqual(Allergies(5).lst, ['eggs', 'shellfish'])
def test_allergic_to_lots_of_stuff(self):
self.assertCountEqual(
Allergies(248).lst,
['strawberries', 'tomatoes', 'chocolate', 'pollen', 'cats'])
def test_allergic_to_everything(self):
self.assertCountEqual(
Allergies(255).lst, [
'eggs', 'peanuts', 'shellfish', 'strawberries', 'tomatoes',
'chocolate', 'pollen', 'cats'
])
def test_ignore_non_allergen_score_parts_only_eggs(self):
self.assertEqual(Allergies(257).lst, ['eggs'])
def test_ignore_non_allergen_score_parts(self):
self.assertCountEqual(
Allergies(509).lst, [
'eggs', 'shellfish', 'strawberries', 'tomatoes', 'chocolate',
'pollen', 'cats'
])
if __name__ == '__main__':
unittest.main()
| import unittest
from allergies import Allergies
class AllergiesTests(unittest.TestCase):
def test_no_allergies_means_not_allergic(self):
allergies = Allergies(0)
self.assertFalse(allergies.is_allergic_to('peanuts'))
self.assertFalse(allergies.is_allergic_to('cats'))
self.assertFalse(allergies.is_allergic_to('strawberries'))
def test_is_allergic_to_eggs(self):
self.assertTrue(Allergies(1).is_allergic_to('eggs'))
def test_has_the_right_allergies(self):
allergies = Allergies(5)
self.assertTrue(allergies.is_allergic_to('eggs'))
self.assertTrue(allergies.is_allergic_to('shellfish'))
self.assertFalse(allergies.is_allergic_to('strawberries'))
def test_no_allergies_at_all(self):
self.assertEqual(Allergies(0).lst, [])
def test_allergic_to_just_peanuts(self):
self.assertEqual(Allergies(2).lst, ['peanuts'])
def test_allergic_to_everything(self):
self.assertEqual(
sorted(Allergies(255).lst),
sorted(('eggs peanuts shellfish strawberries tomatoes '
'chocolate pollen cats').split()))
@unittest.skip('Extra Credit: Passes with a specific type of solution')
def test_ignore_non_allergen_score_parts(self):
self.assertEqual(Allergies(257).lst, ['eggs'])
if __name__ == '__main__':
unittest.main()
| mit | Python |
c0d71164e1a659008426d53dae384e83bbcdabb6 | Use instance.summary in description | whiskyechobravo/pelican-open_graph | open_graph.py | open_graph.py | # -*- coding: utf-8 -*- #
"""
Open Graph
==========
This plugin adds Open Graph Protocol tags to articles.
Use like this in your template:
.. code-block:: jinja2
{% for tag in article.ogtags %}
<meta property="{{tag[0]}}" content="{{tag[1]|striptags|e}}" />
{% endfor %}
"""
from __future__ import unicode_literals
import os.path
from pelican import contents
from pelican import signals
from pelican.utils import strftime, path_to_url
def tag_article(instance):
if not isinstance(instance, contents.Article):
return
ogtags = [('og:title', instance.title),
('og:type', 'article')]
image = instance.metadata.get('og_image', '')
if image:
ogtags.append(('og:image', image))
url = os.path.join(instance.settings.get('SITEURL', ''), instance.url)
ogtags.append(('og:url', url))
ogtags.append(('og:description', instance.metadata.get('og_description',
instance.metadata.get('summary',
instance.summary))))
default_locale = instance.settings.get('LOCALE', [])
if default_locale:
default_locale = default_locale[0]
else:
default_locale = ''
ogtags.append(('og:locale', instance.metadata.get('og_locale', default_locale)))
ogtags.append(('og:site_name', instance.settings.get('SITENAME', '')))
ogtags.append(('article:published_time', strftime(instance.date, "%Y-%m-%d")))
ogtags.append(('article:modified_time', strftime(instance.modified, "%Y-%m-%d")))
author_fb_profiles = instance.settings.get('AUTHOR_FB_ID', {})
if len(author_fb_profiles) > 0:
for author in instance.authors:
if author.name in author_fb_profiles:
ogtags.append(('article:author', author_fb_profiles[author.name]))
ogtags.append(('article:section', instance.category.name))
for tag in instance.tags:
ogtags.append(('article:tag', tag.name))
instance.ogtags = ogtags
def register():
signals.content_object_init.connect(tag_article)
| # -*- coding: utf-8 -*- #
"""
Open Graph
==========
This plugin adds Open Graph Protocol tags to articles.
Use like this in your template:
.. code-block:: jinja2
{% for tag in article.ogtags %}
<meta property="{{tag[0]}}" content="{{tag[1]|striptags|e}}" />
{% endfor %}
"""
from __future__ import unicode_literals
import os.path
from pelican import contents
from pelican import signals
from pelican.utils import strftime, path_to_url
def tag_article(instance):
if not isinstance(instance, contents.Article):
return
ogtags = [('og:title', instance.title),
('og:type', 'article')]
image = instance.metadata.get('og_image', '')
if image:
ogtags.append(('og:image', image))
url = os.path.join(instance.settings.get('SITEURL', ''), instance.url)
ogtags.append(('og:url', url))
ogtags.append(('og:description', instance.metadata.get('og_description',
instance.metadata.get('summary',
''))))
default_locale = instance.settings.get('LOCALE', [])
if default_locale:
default_locale = default_locale[0]
else:
default_locale = ''
ogtags.append(('og:locale', instance.metadata.get('og_locale', default_locale)))
ogtags.append(('og:site_name', instance.settings.get('SITENAME', '')))
ogtags.append(('article:published_time', strftime(instance.date, "%Y-%m-%d")))
ogtags.append(('article:modified_time', strftime(instance.modified, "%Y-%m-%d")))
author_fb_profiles = instance.settings.get('AUTHOR_FB_ID', {})
if len(author_fb_profiles) > 0:
for author in instance.authors:
if author.name in author_fb_profiles:
ogtags.append(('article:author', author_fb_profiles[author.name]))
ogtags.append(('article:section', instance.category.name))
for tag in instance.tags:
ogtags.append(('article:tag', tag.name))
instance.ogtags = ogtags
def register():
signals.content_object_init.connect(tag_article)
| agpl-3.0 | Python |
79500a7cefbbb25454284586a20ab0e7bb61d195 | Update sphinx Directive reference | dials/dials,dials/dials,dials/dials,dials/dials,dials/dials | doc/sphinx/button.py | doc/sphinx/button.py | from __future__ import absolute_import, division
from docutils import nodes
import jinja2
from docutils.parsers.rst import Directive
from docutils.parsers.rst.directives import unchanged
BUTTON_TEMPLATE = jinja2.Template(u"""
<a href="{{ link }}">
<span class="button">{{ text }}</span>
</a>
""")
# placeholder node for document graph
class button_node(nodes.General, nodes.Element):
pass
class ButtonDirective(Directive):
required_arguments = 0
option_spec = {
'text': unchanged,
'link': unchanged,
}
# this will execute when your directive is encountered
# it will insert a button_node into the document that will
# get visisted during the build phase
def run(self):
env = self.state.document.settings.env
app = env.app
app.add_stylesheet('button.css')
node = button_node()
node['text'] = self.options['text']
node['link'] = self.options['link']
return [node]
# build phase visitor emits HTML to append to output
def html_visit_button_node(self, node):
html = BUTTON_TEMPLATE.render(text=node['text'], link=node['link'])
self.body.append(html)
raise nodes.SkipNode
# if you want to be pedantic, define text, latex, manpage visitors too..
def setup(app):
app.add_node(button_node,
html=(html_visit_button_node, None))
app.add_directive('button', ButtonDirective)
| from __future__ import absolute_import, division
from docutils import nodes
import jinja2
from sphinx.util.compat import Directive
from docutils.parsers.rst.directives import unchanged
BUTTON_TEMPLATE = jinja2.Template(u"""
<a href="{{ link }}">
<span class="button">{{ text }}</span>
</a>
""")
# placeholder node for document graph
class button_node(nodes.General, nodes.Element):
pass
class ButtonDirective(Directive):
required_arguments = 0
option_spec = {
'text': unchanged,
'link': unchanged,
}
# this will execute when your directive is encountered
# it will insert a button_node into the document that will
# get visisted during the build phase
def run(self):
env = self.state.document.settings.env
app = env.app
app.add_stylesheet('button.css')
node = button_node()
node['text'] = self.options['text']
node['link'] = self.options['link']
return [node]
# build phase visitor emits HTML to append to output
def html_visit_button_node(self, node):
html = BUTTON_TEMPLATE.render(text=node['text'], link=node['link'])
self.body.append(html)
raise nodes.SkipNode
# if you want to be pedantic, define text, latex, manpage visitors too..
def setup(app):
app.add_node(button_node,
html=(html_visit_button_node, None))
app.add_directive('button', ButtonDirective)
| bsd-3-clause | Python |
054f057bfe08505096f8735d462a9ba23e2d889c | increase verbosity | dhardtke/pyEncode,dhardtke/pyEncode,dhardtke/pyEncode | run_tests.py | run_tests.py | import os
import sys
import unittest
sys.path.append(os.path.realpath(__file__) + "/app")
suite = unittest.TestLoader().discover("tests")
results = unittest.TextTestRunner(verbosity=3).run(suite)
if len(results.errors) > 0 or len(results.failures) > 0:
sys.exit(1)
sys.exit()
| import os
import sys
import unittest
sys.path.append(os.path.realpath(__file__) + "/app")
suite = unittest.TestLoader().discover("tests")
results = unittest.TextTestRunner(verbosity=2).run(suite)
if len(results.errors) > 0 or len(results.failures) > 0:
sys.exit(1)
sys.exit()
| mit | Python |
c52e55dfc739fe78b9fe36d257fcb154528de27c | Fix space. | ulule/django-safety,ulule/django-safety | safety/mixins.py | safety/mixins.py | # -*- coding: utf-8 -*-
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
from django.utils.timezone import now
from .models import Session
class SessionMixin(object):
def get_queryset(self):
qs = Session.objects.filter(expire_date__gt=now(), user=self.request.user)
qs = qs.order_by('-last_activity')
return qs
class LoginRequiredMixin(object):
@method_decorator(login_required)
def dispatch(self, request, *args, **kwargs):
return super(LoginRequiredMixin, self).dispatch(request, *args, **kwargs)
|
# -*- coding: utf-8 -*-
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
from django.utils.timezone import now
from .models import Session
class SessionMixin(object):
def get_queryset(self):
qs = Session.objects.filter(expire_date__gt=now(), user=self.request.user)
qs = qs.order_by('-last_activity')
return qs
class LoginRequiredMixin(object):
@method_decorator(login_required)
def dispatch(self, request, *args, **kwargs):
return super(LoginRequiredMixin, self).dispatch(request, *args, **kwargs)
| mit | Python |
ed58091b7adb2a1d75c34ae961f75ef70c62d1d9 | Update testHardware.py close #51 | oSoc17/code9000,oSoc17/code9000,oSoc17/code9000,oSoc17/code9000,oSoc17/code9000,oSoc17/code9000 | hardware/testHardware.py | hardware/testHardware.py | # Libraries
import RPi.GPIO as GPIO
import requests
import picamera
from time import gmtime, strftime, sleep
#Constants
PIR_SENSOR = 13
BOUNCE_TIME = 100
LATITUDE = 50.8503 # Change this to real location
LONGITUDE = 4.3517
API_URL = "develop.birds.today/api/observations"
camera = picamera.PiCamera()
def getTime():
return strftime('%Y-%m-%d %H:%M:%S', gmtime())
# Triggered when interrupt detected from the PIR sensor
def pir(PIR_SENSOR):
print('Hello World')
print('PIR sensor triggered!')
# Handle camera
def capturePicture():
camera = picamera.PiCamera()
currentTime = getTime()
print('Capturing..')
camera.capture('{}.jpg'.format(currentTime))
def main():
print("MAIN")
GPIO.setmode(GPIO.BOARD) # Use BCM GPIO numbers
GPIO.setup(PIR_SENSOR, GPIO.IN, pull_up_down=GPIO.PUD_DOWN) # Setup GPIO for PIR sensor
GPIO.add_event_detect(PIR_SENSOR, GPIO.FALLING, bouncetime=BOUNCE_TIME)
#GPIO.wait_for_edge(PIR_SENSOR, GPIO.FALLING)
try:
print('Waiting for PIR event...')
while True:
if GPIO.event_detected(PIR_SENSOR):
GPIO.remove_event_detect(PIR_SENSOR)
print('Capturing..')
currentTime = getTime()
camera.capture('{}.jpg'.format(currentTime))
GPIO.add_event_detect(PIR_SENSOR, GPIO.FALLING, bouncetime=BOUNCE_TIME)
else:
sleep(0.1)
except KeyboardInterrupt:
GPIO.cleanup()
print('DONE')
main()
| # Libraries
import RPi.GPIO as GPIO
import requests
import picamera
from time import gmtime, strftime
#Constants
PIR_SENSOR = 13
LATITUDE = 50.8503 # Change this to real location
LONGITUDE = 4.3517
def getTime():
return strftime('%Y-%m-%d %H:%M:%S', gmtime())
# Triggered when interrupt detected from the PIR sensor
def pir(channel):
print('PIR sensor triggered!')
# Handle camera
def capturePicture():
currentTime = getTime()
camera.capture('{}.jpg'.format(currentTime))
def main():
GPIO.setmode(GPIO.BCM) # Use BCM GPIO numbers
GPIO.setup(PIR_SENSOR, GPIO.IN, pull_up_down=GPIO.PUD_UP) # Setup GPIO for PIR sensor
GPIO.add_event_detect(PIR_SENSOR, GPIO.FALLING, callback=my_callback2, bouncetime=300) # Dtetct interrupts for the PIR sensor
camera = picamera.PiCamera()
capturePicture()
| mit | Python |
aa2e72e5a6d9f9626cc9876c3b5b15adae293569 | add plot points | 4bic-attic/data_viz | rw_visual.py | rw_visual.py | import matplotlib.pyplot as plt
from random_walk import RandomWalk
#keep making random walks as long as the program is active
while True:
#make arandom walk and plt the points
rw = RandomWalk(50000)
rw.fill_walk()
#Plot points and show the plot
point_numbers = list(range(rw.num_points))
plt.scatter(rw.x_values, rw.y_values, c=point_numbers, cmap=plt.cm.Blues,
edgecolor='none', s=1)
#Emphasize the first and last points
plt.scatter(0, 0, c='green', edgecolor='none', s=100)
plt.scatter(rw.x_values[-1], rw.y_values[-1], c='red', edgecolor='none',
s=100)
#remove the axes
plt.axes().get_xaxis().set_visible(False)
plt.axes().get_yaxis().set_visible(False)
plt.show()
keep_running = raw_input("Make another walk? (y/n): ")
if keep_running == 'n':
break
| import matplotlib.pyplot as plt
from random_walk import RandomWalk
#keep making random walks as long as the program is active
while True:
#make arandom walk and plt the points
rw = RandomWalk()
rw.fill_walk()
point_numbers = list(range(rw.num_points))
plt.scatter(rw.x_values, rw.y_values, c=point_numbers, cmap=plt.cm.Blues,
edgecolor='none', s=15)
#Emphasize the first and last points
plt.scatter(0, 0, c='green', edgecolor='none', s=100)
plt.scatter(rw.x_values[-1], rw.y_values[-1], c='red', edgecolor='none',
s=100)
#remove the axes
plt.axes().get_xaxis().set_visible(False)
plt.axes().get_yaxis().set_visible(False)
plt.show()
keep_running = raw_input("Make another walk? (y/n): ")
if keep_running == 'n':
break
| mit | Python |
b822c91fa9c3228e9f6b3bff2a122bfc07fc89cb | improve spectrogram test | Parisson/TimeSide,Parisson/TimeSide,Parisson/TimeSide,Parisson/TimeSide,Parisson/TimeSide | timeside/tests/api/test_pipe_spectrogram.py | timeside/tests/api/test_pipe_spectrogram.py | # -*- coding: utf-8 -*-
import os
from timeside.core import *
from timeside.api import *
from timeside.decoder import *
from timeside.grapher import *
sample_dir = '../samples'
img_dir = '../results/img'
if not os.path.exists(img_dir):
os.mkdir(img_dir)
test_dict = {'sweep.wav': 'spec_wav.png',
'sweep.flac': 'spec_flac.png',
'sweep.ogg': 'spec_ogg.png',
'sweep.mp3': 'spec_mp3.png',
}
for source, image in test_dict.iteritems():
audio = os.path.join(os.path.dirname(__file__), sample_dir + os.sep + source)
image = img_dir + os.sep + image
print 'Test : decoder(%s) | waveform (%s)' % (source, image)
decoder = FileDecoder(audio)
spectrogram = Spectrogram(width=1024, height=256, output=image, bg_color=(0,0,0), color_scheme='default')
(decoder | spectrogram).run()
print 'frames per pixel = ', spectrogram.graph.samples_per_pixel
print "render spectrogram to: %s" % image
spectrogram.render()
| # -*- coding: utf-8 -*-
import os
from timeside.core import *
from timeside.api import *
from timeside.decoder import *
from timeside.grapher import *
image_file = '../results/img/spectrogram.png'
source = os.path.join(os.path.dirname(__file__), "../samples/sweep.wav")
decoder = FileDecoder(source)
spectrogram = Spectrogram(width=1024, height=256, output=image_file, bg_color=(0,0,0), color_scheme='default')
(decoder | spectrogram).run()
print 'frames per pixel = ', spectrogram.graph.samples_per_pixel
print "render spectrogram to: %s" % image_file
spectrogram.render()
| agpl-3.0 | Python |
632ef6f1f1a8382e757459ed98fc8beb0a2e9eb9 | use python3 from virtualenv | hronecviktor/twmail,hronecviktor/twmail | twmail.py | twmail.py | #!python3
from flask import Flask
from flask import request
app = Flask(__name__)
@app.route('/', methods=['GET'])
def hello_world():
return 'Hi there'
if __name__ == '__main__':
app.run()
| #!/usr/bin/env python3
from flask import Flask
from flask import request
app = Flask(__name__)
@app.route('/', methods=['GET'])
def hello_world():
return 'Hi there'
if __name__ == '__main__':
app.run()
| mit | Python |
e0f442e34ecd7e006679d2520f426e88ccc3b626 | add factories for meal application | savoirfairelinux/santropol-feast,madmath/sous-chef,savoirfairelinux/sous-chef,savoirfairelinux/sous-chef,savoirfairelinux/santropol-feast,madmath/sous-chef,madmath/sous-chef,savoirfairelinux/santropol-feast,savoirfairelinux/sous-chef | django/santropolFeast/meal/factories.py | django/santropolFeast/meal/factories.py | # coding=utf-8
import factory
from meal.model import Meal, Ingredient, Allergy
class MealFactory(factory.DjangoModelFactory):
class Meta:
model = Meal
name = "Tomato Soupe"
description = "A Simple Tomato Soupe"
size = "R"
@classmethod
def __init__(self, **kwargs):
name = kwargs.pop("name", None)
meal = super(MealFactory, self).__init__(self, **kwargs)
meal.save()
class IngredientFactory(factory.DjangoModelFactory):
class Meta:
model = Ingredient
name = "Tomato"
@classmethod
def __init__(self, **kwargs):
name = kwargs.pop('name', None)
ingredients = super(IngredientFactory, self).__init__(self, **kwargs)
ingredients.save()
class AllergyFactory(factory.DjangoModelFactory):
class Meta:
model = Allergy
name = "Tomato"
description = "A Simple Tomato"
@classmethod
def __init__(self, **kwargs):
name = kwargs.pop("name",None)
allergy = super(AllergyFactory, self).__init__(self, **kwargs)
allergy.save()
| # coding=utf-8
import factory
from meal.model import Meal, Ingredient, Allergy
class MealFactory(factory.DjangoModelFactory):
class Meta:
model = Meal
nom = "Tomato Soupe"
description = "A Simple Tomato Soupe"
Ingredients = "Tomato"
class IngredientFactory(factory.DjangoModelFactory):
class Meta:
model = Ingredient
Ingredient = "Tomato"
class AllergyFactory(factory.DjangoModelFactory):
class Meta:
model = Allergy
nom = "Tomato"
description = "A Simple Tomato"
Ingredient = "Tomato"
| agpl-3.0 | Python |
84c47634c9ef7d5021d549da7704863e69615c60 | Add put() method in BinarySearchTree class | bowen0701/algorithms_data_structures | ds_binary_search_tree.py | ds_binary_search_tree.py | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
class TreeNode(object):
"""Tree node class collects helper functions for BinarySearchTree."""
def __init__(self, key, value,
left=None, right=None, parent=None):
self.key = key
self.value = value
self.left_child = left
self.right_child = right
self.parent = parent
def has_left_child(self):
return self.left_child
def has_right_child(self):
return self.right_child
def is_left_child(self):
return self.parent and self.parent.left_child == self
def is_right_child(self):
return self.parent and self.parent.right_child == self
def is_root(self):
return not self.parent
def is_leaf(self):
return not (self.left_child or self.right_child)
def has_any_children(self):
return self.left_child or self.right_child
def has_both_children(self):
return self.left_child and self.right_child
def replace_node_data(self, key, value, lc, rc):
self.key = key
self.payload = value
self.left_child = lc
self.right_child = rc
if self.has_left_child():
self.left_child.parent = self
if self.has_right_child():
self.right_child.parent = self
class BinarySearchTree(object):
def __init__(self):
self.root = None
self.size = 0
def length(self):
return self.size
def __len__(self):
return self.size
def __iter__(self):
return self.root.__iter__()
def _put():
pass
def put():
pass
def main():
bst = BinarySearchTree()
pass
if __name__ == '__main__':
main()
| from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
class TreeNode(object):
"""Tree node class collects helper functions for BinarySearchTree."""
def __init__(self, key, value,
left=None, right=None, parent=None):
self.key = key
self.value = value
self.left_child = left
self.right_child = right
self.parent = parent
def has_left_child(self):
return self.left_child
def has_right_child(self):
return self.right_child
def is_left_child(self):
return self.parent and self.parent.left_child == self
def is_right_child(self):
return self.parent and self.parent.right_child == self
def is_root(self):
return not self.parent
def is_leaf(self):
return not (self.left_child or self.right_child)
def has_any_children(self):
return self.left_child or self.right_child
def has_both_children(self):
return self.left_child and self.right_child
def replace_node_data(self, key, value, lc, rc):
self.key = key
self.payload = value
self.left_child = lc
self.right_child = rc
if self.has_left_child():
self.left_child.parent = self
if self.has_right_child():
self.right_child.parent = self
class BinarySearchTree(object):
def __init__(self):
self.root = None
self.size = 0
def length(self):
return self.size
def __len__(self):
return self.size
def __iter__(self):
return self.root.__iter__()
def main():
bst = BinarySearchTree()
pass
if __name__ == '__main__':
main()
| bsd-2-clause | Python |
356ed840925cb2bf63bc03092e4989b97dd61bdb | Remove superfluous `print` | claymation/django-zendesk | djzendesk/views.py | djzendesk/views.py | import base64
import logging
from django.conf import settings
from django.http import HttpResponse, HttpResponseNotAllowed, HttpResponseBadRequest, HttpResponseForbidden
from django.views.decorators.csrf import csrf_exempt
from djzendesk.signals import target_callback_received
def is_authenticated(request, username, password):
"""Authenticate the request using HTTP Basic authorization"""
authenticated = False
if 'HTTP_AUTHORIZATION' in request.META:
auth = request.META['HTTP_AUTHORIZATION'].split()
if len(auth) == 2:
if auth[0].lower() == "basic":
provided_username, provided_password = base64.b64decode(auth[1]).split(':')
if username == provided_username and password == provided_password:
authenticated = True
return authenticated
@csrf_exempt
def callback(request, ticket_id):
"""Handle HTTP callback requests from Zendesk"""
# Require POST. Anything else would be uncivilized.
if not request.method == 'POST':
return HttpResponseNotAllowed(['POST'])
username = getattr(settings, 'ZENDESK_CALLBACK_USERNAME', None)
password = getattr(settings, 'ZENDESK_CALLBACK_PASSWORD', None)
# Authenticate the request if credentials have been configured
if username is not None and password is not None:
if not is_authenticated(request, username, password):
return HttpResponseForbidden()
# Extract the message
if not 'message' in request.POST:
return HttpResponseBadRequest()
message = request.POST['message']
logging.info("HTTP callback received from Zendesk for ticket %s: %s", ticket_id, message)
# Fire the signal to notify listeners of received target callback
target_callback_received.send(sender=None, ticket_id=ticket_id, message=message)
return HttpResponse('OK')
| import base64
import logging
from django.conf import settings
from django.http import HttpResponse, HttpResponseNotAllowed, HttpResponseBadRequest, HttpResponseForbidden
from django.views.decorators.csrf import csrf_exempt
from djzendesk.signals import target_callback_received
def is_authenticated(request, username, password):
"""Authenticate the request using HTTP Basic authorization"""
authenticated = False
if 'HTTP_AUTHORIZATION' in request.META:
print request.META['HTTP_AUTHORIZATION']
auth = request.META['HTTP_AUTHORIZATION'].split()
if len(auth) == 2:
if auth[0].lower() == "basic":
provided_username, provided_password = base64.b64decode(auth[1]).split(':')
if username == provided_username and password == provided_password:
authenticated = True
return authenticated
@csrf_exempt
def callback(request, ticket_id):
"""Handle HTTP callback requests from Zendesk"""
# Require POST. Anything else would be uncivilized.
if not request.method == 'POST':
return HttpResponseNotAllowed(['POST'])
username = getattr(settings, 'ZENDESK_CALLBACK_USERNAME', None)
password = getattr(settings, 'ZENDESK_CALLBACK_PASSWORD', None)
# Authenticate the request if credentials have been configured
if username is not None and password is not None:
if not is_authenticated(request, username, password):
return HttpResponseForbidden()
# Extract the message
if not 'message' in request.POST:
return HttpResponseBadRequest()
message = request.POST['message']
logging.info("HTTP callback received from Zendesk for ticket %s: %s", ticket_id, message)
# Fire the signal to notify listeners of received target callback
target_callback_received.send(sender=None, ticket_id=ticket_id, message=message)
return HttpResponse('OK')
| bsd-3-clause | Python |
47ba50d73dbaae2818fb177d9c2417e182d2b53d | Fix mistake in implementation of Board squares | djshuckerow/GameOfPebbles | PebbleGame/src/Board.py | PebbleGame/src/Board.py | '''
The Board class represents a board game containing n columns,
p pebbles per square, and one row per player. It provides functions
necessary for moving pebbles and copying the current game state.
'''
__author__ = "Carlos Lemus, David Shuckerow"
__license__ = "MIT"
class Board(object):
"""
:param n: number of squares per player.
:param p: number of pebbles per square.
"""
def __init__(self, n, p):
self.squares = [[p for _ in range(2)] for _ in range(n)]
pass
def move(self):
""" Performs a move on the selected square. """
pass
# TODO: add copy method.
def get_score(self, player):
"""
:param player:
"""
pass
| '''
The Board class represents a board game containing n columns,
p pebbles per square, and one row per player. It provides functions
necessary for moving pebbles and copying the current game state.
'''
__author__ = "Carlos Lemus, David Shuckerow"
__license__ = "MIT"
class Board(object):
"""
:param n: number of squares per player.
:param p: number of pebbles per square.
"""
def __init__(self, n, p):
_squares = [[p for _ in range(2)] for _ in range(n)]
pass
def move(self):
""" Performs a move on the selected square. """
pass
# TODO: add copy method.
def get_score(self, player):
"""
:param player:
"""
pass
| mit | Python |
e7eab95e6739b25e5b2fe3d2358ae1aaaa543854 | save "CRUDDII/CRUDDII_ALL_n100_p91_th.txt" to sparseData | ameenetemady/DeepPep,ameenetemady/DeepPep,ameenetemady/DeepPep | DeepPep/mySprsInput.py | DeepPep/mySprsInput.py | #!/usr/bin/env python3.5
# run parameters: CRUDDII/CRUDDII_ALL_n100_p91_th.txt CRUDDII/CRUDDII_ALL_n100_p91.1n.txt
import os
import sys
import numpy as np
import csv
#X=np.zeros((400,50104), dtype=np.int_)
def sparseWriteLineToFiles(strLine, bfList, lineId):
currProtId=0
currProtStart=0
isCurrProtEmpty=True
protNonEmpty={}
for i in range(0,len(strLine)):
if strLine[i] == 'B':
isCurrProtEmpty=True
currProtId=currProtId+1
currProtStart=i+1
elif strLine[i] == 'X':
if isCurrProtEmpty:
bfList[currProtId].write('{:d}:'.format(lineId))
isCurrProtEmpty=False
protNonEmpty[currProtId]=True
offset=i-currProtStart
bfList[currProtId].write('{:d},'.format(offset))
for currProtId, value in protNonEmpty.items():
bfList[currProtId].write('\n')
return #End sparseWriteLineToFiles
strDirname='./sparseData/'
nProteins=182
bfList=[]
metaInfo=[]
#a) Create protein files
for i in range(0, nProteins):
strFilename='p{:d}.sprs'.format(i)
metaInfo.append([strFilename])
bfCurr=open(strDirname + strFilename, 'w')
bfList.append(bfCurr)
#b) Read from bfInput and write to bfList (protein files)
bfInput=open(sys.argv[1], 'r')
rId=0
for strLine in bfInput:
sparseWriteLineToFiles(strLine, bfList, rId)
rId=rId+1
#c) Calculate width of each protein
bfInput.seek(0)
nWidth=0
currProtId=0
currChar=bfInput.read(1)
while currChar != '\n':
if currChar == 'B':
metaInfo[currProtId].append(nWidth)
nWidth=-1
currProtId=currProtId+1
nWidth=nWidth+1
currChar=bfInput.read(1)
metaInfo[currProtId].append(nWidth)
#d) Save the metaInfo
with open('sparseData/metaInfo.csv', 'w', newline='') as csvfile:
writer = csv.writer(csvfile)
writer.writerows(metaInfo)
| #!/usr/bin/env python3.5
# run parameters: CRUDDII/CRUDDII_ALL_n100_p91_th.txt CRUDDII/CRUDDII_ALL_n100_p91.1n.txt
import os
import sys
import numpy as np
import csv
#X=np.zeros((400,50104), dtype=np.int_)
def sparseWriteLineToFiles(strLine, bfList, lineId):
currProtId=0
currProtStart=0
isCurrProtEmpty=True
protNonEmpty={}
for i in range(0,len(strLine)):
if strLine[i] == 'B':
currProtId=currProtId+1
currProtStart=i+1
elif strLine[i] == 'X':
if isCurrProtEmpty:
bfList[currProtId].write('{:d}:'.format(lineId))
isCurrProtEmpty=False
protNonEmpty[currProtId]=True
offset=i-currProtStart
bfList[currProtId].write('{:d},'.format(offset))
for currProtId, value in protNonEmpty.items():
bfList[currProtId].write('\n')
return #End sparseWriteLineToFiles
nProteins=182
bfList=[]
metaInfo=[]
# Create protein files
for i in range(0, nProteins):
strFilename='sparseData/p{:d}.sprs'.format(i)
metaInfo.append([strFilename])
bfCurr=open(strFilename, 'w')
bfList.append(bfCurr)
# Read from bfInput and write to bfList (protein files)
'''
bfInput=open(sys.argv[1], 'r')
rId=0
for strLine in bfInput:
sparseWriteLineToFiles(strLine, bfList, rId)
rId=rId+1
'''
# Record the meta info
bfInput.seek(0)
bfInput.readline()
#for item in metaInfo:
# print(item[0])
| apache-2.0 | Python |
59eac0d8dba2f0aca43cd3df2777897079a2368e | Use 4chan's api | Exceen/4chan-downloader | thread-watcher.py | thread-watcher.py | #!/usr/bin/python
from itertools import chain
from urllib import request
import argparse
import json
import logging
import os
# TODO
# add argument to have something like vg/monster-hunter/ and inside that dir all threads separated by their id
# ./thread-watcher.py -b vg -q mhg -f queue.txt -n "Monster Hunter"
log = logging.getLogger('thread-watcher')
workpath = os.path.dirname(os.path.realpath(__file__))
API_URL_BASE = 'https://a.4cdn.org'
URL_BASE = 'https://boards.4chan.org'
def load_catalog(board):
url = '{base}/{board}/catalog.json'.format(base=API_URL_BASE, board=board)
req = request.Request(url, headers={'User-Agent': '4chan Browser',
'Content-Type': 'application/json'})
content = request.urlopen(req).read().decode('utf-8')
return json.loads(content)
def get_threads(board):
catalog = load_catalog(board)
return chain.from_iterable([page['threads'] for page in catalog])
def main():
parser = argparse.ArgumentParser(description='thread-watcher')
parser.add_argument('-v', '--verbose', action='store_true', help='verbose')
parser.add_argument('-b', '--board', help='board', required=True)
parser.add_argument('-q', '--query', help='search term', required=True)
parser.add_argument('-f', '--queuefile', help='queue file', required=True)
parser.add_argument('-n', '--naming', help='dir name for saved threads', required=True)
args = parser.parse_args()
name = args.naming.lower().replace(' ', '-')
thread_url = '{base}/{board}/%d/{name}'.format(
base=URL_BASE,
board=args.board,
name=name,
)
current_threads = []
for thread in get_threads(args.board):
if args.query in thread.get('sub', ''):
current_threads.append(thread_url % thread['no'])
ignored_lines = ['#', '-', '\n']
queue_threads = [line.strip() for line in open(args.queuefile, 'r') if line[0] not in ignored_lines]
new_threads = list(set(current_threads) - set(queue_threads))
if args.verbose:
print(new_threads)
if len(new_threads) > 0:
with open(args.queuefile, 'a') as f:
for thread in new_threads:
f.write(thread)
f.write('\n')
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
pass
| #!/usr/bin/python
from urllib import request
import argparse
import logging
import os
import re
# TODO
# add argument to have something like vg/monster-hunter/ and inside that dir all threads separated by their id
# ./thread-watcher.py -b vg -q mhg -f queue.txt -n "Monster Hunter"
log = logging.getLogger('thread-watcher')
workpath = os.path.dirname(os.path.realpath(__file__))
def load(url):
req = request.Request(url, headers={'User-Agent': '4chan Browser'})
return request.urlopen(req).read()
def main():
parser = argparse.ArgumentParser(description='thread-watcher')
parser.add_argument('-v', '--verbose', action='store_true', help='verbose')
parser.add_argument('-b', '--board', help='board', required=True)
parser.add_argument('-q', '--query', help='search term', required=True)
parser.add_argument('-f', '--queuefile', help='queue file', required=True)
parser.add_argument('-n', '--naming', help='dir name for saved threads', required=True)
args = parser.parse_args()
name = args.naming.lower().replace(' ', '-')
query = args.query
base_url = 'https://boards.4chan.org/' + args.board + '/'
catalog_url = base_url + 'catalog'
current_threads = []
regex = '"(\d+)":\{(?!"sub").*?"sub":"((?!").*?)"'
for threadid, title in list(set(re.findall(regex, load(catalog_url).decode('utf-8')))):
if query not in title:
continue
current_threads.append(base_url + 'thread/' + threadid + '/' + name)
ignored_lines = ['#', '-', '\n']
queue_threads = [line.strip() for line in open(args.queuefile, 'r') if line[0] not in ignored_lines]
new_threads = list(set(current_threads) - set(queue_threads))
if args.verbose:
print(new_threads)
if len(new_threads) > 0:
with open(args.queuefile, 'a') as f:
for thread in new_threads:
f.write(thread)
f.write('\n')
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
pass
| mit | Python |
2ce24f423ac1573c6c4b1a7a19672a84620042b5 | Remove old docstring | mopidy/mopidy-beets | mopidy_beets/__init__.py | mopidy_beets/__init__.py | from __future__ import unicode_literals
import os
from mopidy import ext, config
from mopidy.exceptions import ExtensionError
__version__ = '1.0.2'
class BeetsExtension(ext.Extension):
dist_name = 'Mopidy-Beets'
ext_name = 'beets'
version = __version__
def get_default_config(self):
conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf')
return config.read(conf_file)
def get_config_schema(self):
schema = super(BeetsExtension, self).get_config_schema()
schema['hostname'] = config.Hostname()
schema['port'] = config.Port()
return schema
def validate_environment(self):
try:
import requests # noqa
except ImportError as e:
raise ExtensionError('Library requests not found', e)
def get_backend_classes(self):
from .actor import BeetsBackend
return [BeetsBackend]
| from __future__ import unicode_literals
import os
from mopidy import ext, config
from mopidy.exceptions import ExtensionError
__doc__ = """A extension for playing music from Beets.
This extension handles URIs starting with ``beets:`` and enables you,
to play music from Beets web service.
See https://github.com/dz0ny/mopidy-beets/ for further instructions on
using this extension.
**Issues:**
https://github.com/dz0ny/mopidy-beets/issues
**Dependencies:**
requests
"""
__version__ = '1.0.2'
class BeetsExtension(ext.Extension):
dist_name = 'Mopidy-Beets'
ext_name = 'beets'
version = __version__
def get_default_config(self):
conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf')
return config.read(conf_file)
def get_config_schema(self):
schema = super(BeetsExtension, self).get_config_schema()
schema['hostname'] = config.Hostname()
schema['port'] = config.Port()
return schema
def validate_environment(self):
try:
import requests # noqa
except ImportError as e:
raise ExtensionError('Library requests not found', e)
def get_backend_classes(self):
from .actor import BeetsBackend
return [BeetsBackend]
| mit | Python |
d27a1e98662b1731def649e936395c9e50979125 | Update crop.py | TheNathanBlack/moviepy,Zulko/moviepy,misterbisson/moviepy,carlodri/moviepy,deniszgonjanin/moviepy,bertyhell/moviepy,ssteo/moviepy,mcanthony/moviepy,Vicaris/ModPro,benroeder/moviepy,LunarLanding/moviepy,kerstin/moviepy | moviepy/video/fx/crop.py | moviepy/video/fx/crop.py | def crop(clip, x1=None, y1=None, x2=None, y2=None,
width = None, height=None,
x_center= None, y_center=None):
"""
Returns a new clip in which just a rectangular subregion of the
original clip is conserved. x1,y1 indicates the top left corner and
x2,y2 is the lower right corner of the croped region.
All coordinates are in pixels. Float numbers are accepted.
To crop an arbitrary rectangle:
>>> crop(clip, x1=50, y1=60, x2=460, y2=275)
Only remove the part above y=30:
>>> crop(clip, y1=30)
Crop a rectangle that starts 10 pixels left and is 200px wide
>>> crop(clip, x1=10, width=200)
Crop a rectangle centered in x,y=(300,400), width=50, height=150 :
>>> crop(clip, x_center=300 , y_center=400,
width=50, height=150)
Any combination of the above should work, like for this rectangle
centered in x=300, with explicit y-boundaries:
>>> crop(x_center=300, width=400, y1=100, y2=600)
"""
if width and (x1 is not None or x2 is not None):
if x1 is not None:
x2 = x1+width
else:
x1 = x2-width
if height and (y1 is not None or y2 is not None):
if y1 is not None:
y2 = y1+height
else:
y1 = y2 - height
if x_center:
x1, x2 = x_center - width/2, x_center + width/2
if y_center:
y1, y2 = y_center - height/2, y_center + height/2
if x1 is None:
x1 = 0
if y1 is None:
y1 = 0
if x2 is None:
x2 = clip.size[0]
if y2 is None:
y2 = clip.size[1]
return clip.fl_image(
lambda pic: pic[int(y1):int(y2), int(x1):int(x2)],
apply_to=['mask'])
| def crop(clip, x1=None, y1=None, x2=None, y2=None,
width = None, height=None,
x_center= None, y_center=None):
"""
Returns a new clip in which just a rectangular subregion of the
original clip is conserved. x1,y1 indicates the top left corner and
x2,y2 is the lower right corner of the croped region.
All coordinates are in pixels. Float numbers are accepted.
To crop an arbitrary rectangle:
>>> crop(clip, x1=50, y1=60, x2=460, y2=275)
Only remove the part above y=30:
>>> crop(clip, y1=30)
Crop a rectangle that starts 10 pixels left and is 200px wide
>>> crop(clip, x1=10, width=200)
Crop a rectangle centered in x,y=(300,400), width=50, height=150 :
>>> crop(clip, x_center=300 , y_center=400,
width=50, height=150)
Any combination of the above should work, like for this rectangle
centered in x=300, with explicit y-boundaries:
>>> crop(x_center=300, width=400, y1=100, y2=600)
"""
if width:
if x1 is not None:
x2 = x1+width
else:
x1 = x2-width
if height:
if y1 is not None:
y2 = y1+height
else:
y1 = y2 - height
if x_center:
x1, x2 = x_center - width/2, x_center + width/2
if y_center:
y1, y2 = y_center - height/2, y_center + height/2
if x1 is None:
x1 = 0
if y1 is None:
y1 = 0
if x2 is None:
x2 = clip.size[0]
if y2 is None:
y2 = clip.size[1]
return clip.fl_image(
lambda pic: pic[int(y1):int(y2), int(x1):int(x2)],
apply_to=['mask'])
| mit | Python |
832163eca4b723e8c66b93108de71809a1a944cc | Remove hardcoded start and end dates | AlexFridman/visits-detector | visits_detector/scripts/mr_emulator_demo.py | visits_detector/scripts/mr_emulator_demo.py | import argparse
import datetime
import json
import sys
from visits_detector.core import FilterAndMapToIndexMapper, ExtractEventsReducer
from visits_detector.core.components.params import EventExtractionStageParams
from visits_detector.core.helpers.geo_index import build_geo_index_from_point_index
from visits_detector.core.helpers.index import get_bbox_by_index, read_index
from visits_detector.demo.mr_emulator import MapReduceEmulator
def lazy_load_gps_log(path):
with open(path) as f:
for line in f:
yield json.loads(line)
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('--index-path', required=True)
parser.add_argument('--gps-log-path', required=True)
parser.add_argument('--use-nn', action='store_true')
parser.add_argument('--model-path')
return parser.parse_args()
def main():
args = parse_args()
index = read_index(args.index_path)
gps_log = lazy_load_gps_log(args.gps_log_path)
mapper = FilterAndMapToIndexMapper(
start_dt=datetime.datetime.min,
end_dt=datetime.datetime.max,
bbox=get_bbox_by_index(index, 0.1),
geo_index=build_geo_index_from_point_index(index, precision=6),
cut_off_r=200,
id_column='uuid',
timestamp_column='timestamp',
speed_column='speed',
lat_column='lat',
lon_column='lon'
)
reducer_params = EventExtractionStageParams(
use_nn_estimator=args.use_nn,
model_path=args.model_path
)
reducer = ExtractEventsReducer(reducer_params)
mr_emulator = MapReduceEmulator(mapper, reducer, sort_by=['uuid', 'timestamp'], reduce_by=['uuid'])
for event in mr_emulator(gps_log):
sys.stdout.write(json.dumps(event) + '\n')
if __name__ == '__main__':
main()
| import argparse
import datetime
import json
import sys
from visits_detector.core import FilterAndMapToIndexMapper, ExtractEventsReducer
from visits_detector.core.components.params import EventExtractionStageParams
from visits_detector.core.helpers.geo_index import build_geo_index_from_point_index
from visits_detector.core.helpers.index import get_bbox_by_index, read_index
from visits_detector.demo.mr_emulator import MapReduceEmulator
def lazy_load_gps_log(path):
with open(path) as f:
for line in f:
yield json.loads(line)
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('--index-path', required=True)
parser.add_argument('--gps-log-path', required=True)
parser.add_argument('--use-nn', action='store_true')
parser.add_argument('--model-path')
return parser.parse_args()
def main():
args = parse_args()
index = read_index(args.index_path)
gps_log = lazy_load_gps_log(args.gps_log_path)
mapper = FilterAndMapToIndexMapper(
start_dt=datetime.datetime(2017, 4, 14),
end_dt=datetime.datetime(2017, 4, 15),
bbox=get_bbox_by_index(index, 0.5),
geo_index=build_geo_index_from_point_index(index, precision=6),
cut_off_r=200,
id_column='uuid',
timestamp_column='timestamp',
speed_column='speed',
lat_column='lat',
lon_column='lon'
)
reducer_params = EventExtractionStageParams(
use_nn_estimator=args.use_nn,
model_path=args.model_path
)
reducer = ExtractEventsReducer(reducer_params)
mr_emulator = MapReduceEmulator(mapper, reducer, sort_by=['uuid', 'timestamp'], reduce_by=['uuid'])
for event in mr_emulator(gps_log):
sys.stdout.write(json.dumps(event) + '\n')
if __name__ == '__main__':
main()
| mit | Python |
3fd6886ace3756c9e05ae798a6d1252e31d35c94 | Adjust tolerandce of sampling tests | poliastro/poliastro | tests/tests_twobody/test_sampling.py | tests/tests_twobody/test_sampling.py | from functools import partial
import numpy as np
from astropy import units as u
from astropy.tests.helper import assert_quantity_allclose
from hypothesis import example, given, settings, strategies as st
from poliastro.twobody.sampling import sample_closed
angles = partial(st.floats, min_value=-2 * np.pi, max_value=2 * np.pi)
eccentricities = partial(st.floats, min_value=0, max_value=1, exclude_max=True)
@st.composite
def with_units(draw, elements, unit):
angle = draw(elements)
return angle * unit
angles_q = partial(with_units, elements=angles(), unit=u.rad)
eccentricities_q = partial(with_units, elements=eccentricities(), unit=u.one)
@settings(deadline=None)
@given(
min_nu=angles_q(),
ecc=eccentricities_q(),
max_nu=st.one_of(angles_q(), st.none()),
)
def test_sample_closed_is_always_between_minus_pi_and_pi(min_nu, ecc, max_nu):
result = sample_closed(min_nu, ecc, max_nu)
assert ((-np.pi * u.rad <= result) & (result <= np.pi * u.rad)).all()
@settings(deadline=None)
@given(
min_nu=with_units(
elements=st.floats(
min_value=-np.pi, max_value=np.pi, exclude_max=True
),
unit=u.rad,
),
ecc=eccentricities_q(),
max_nu=st.one_of(angles_q(), st.none()),
)
@example(0 * u.rad, 0 * u.one, 0 * u.rad)
def test_sample_closed_starts_at_min_anomaly_if_in_range(min_nu, ecc, max_nu):
result = sample_closed(min_nu, ecc, max_nu)
assert_quantity_allclose(result[0], min_nu, atol=1e-15 * u.rad)
@settings(deadline=None)
@given(
min_nu=with_units(
elements=st.floats(min_value=-np.pi, max_value=np.pi), unit=u.rad
),
ecc=eccentricities_q(),
)
@example(1e-16 * u.rad, 0 * u.one)
@example(0 * u.rad, 0 * u.one)
@example(0 * u.rad, 0.88680956 * u.one)
@example(0 << u.rad, (1 - 1e-16) << u.one)
def test_sample_closed_starts_and_ends_at_min_anomaly_if_in_range_and_no_max_given(
min_nu, ecc
):
result = sample_closed(min_nu, ecc)
assert_quantity_allclose(result[0], min_nu, atol=1e-14 * u.rad)
assert_quantity_allclose(result[-1], min_nu, atol=1e-7 * u.rad)
| from functools import partial
import numpy as np
from astropy import units as u
from astropy.tests.helper import assert_quantity_allclose
from hypothesis import example, given, settings, strategies as st
from poliastro.twobody.sampling import sample_closed
angles = partial(st.floats, min_value=-2 * np.pi, max_value=2 * np.pi)
eccentricities = partial(st.floats, min_value=0, max_value=1, exclude_max=True)
@st.composite
def with_units(draw, elements, unit):
angle = draw(elements)
return angle * unit
angles_q = partial(with_units, elements=angles(), unit=u.rad)
eccentricities_q = partial(with_units, elements=eccentricities(), unit=u.one)
@settings(deadline=None)
@given(
min_nu=angles_q(),
ecc=eccentricities_q(),
max_nu=st.one_of(angles_q(), st.none()),
)
def test_sample_closed_is_always_between_minus_pi_and_pi(min_nu, ecc, max_nu):
result = sample_closed(min_nu, ecc, max_nu)
assert ((-np.pi * u.rad <= result) & (result <= np.pi * u.rad)).all()
@settings(deadline=None)
@given(
min_nu=with_units(
elements=st.floats(
min_value=-np.pi, max_value=np.pi, exclude_max=True
),
unit=u.rad,
),
ecc=eccentricities_q(),
max_nu=st.one_of(angles_q(), st.none()),
)
@example(0 * u.rad, 0 * u.one, 0 * u.rad)
def test_sample_closed_starts_at_min_anomaly_if_in_range(min_nu, ecc, max_nu):
result = sample_closed(min_nu, ecc, max_nu)
assert_quantity_allclose(result[0], min_nu, atol=1e-15 * u.rad)
@settings(deadline=None)
@given(
min_nu=with_units(
elements=st.floats(min_value=-np.pi, max_value=np.pi), unit=u.rad
),
ecc=eccentricities_q(),
)
@example(1e-16 * u.rad, 0 * u.one)
@example(0 * u.rad, 0 * u.one)
@example(0 * u.rad, 0.88680956 * u.one)
def test_sample_closed_starts_and_ends_at_min_anomaly_if_in_range_and_no_max_given(
min_nu, ecc
):
result = sample_closed(min_nu, ecc)
assert_quantity_allclose(result[0], min_nu, atol=1e-14 * u.rad)
assert_quantity_allclose(result[-1], min_nu, atol=1e-14 * u.rad)
| mit | Python |
7c3ac5adc33d2048f28a96d8145e71a4c12518cc | Set base version to 1.6 | etalab/udata,opendatateam/udata,opendatateam/udata,opendatateam/udata,etalab/udata,etalab/udata | udata/__init__.py | udata/__init__.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
uData
'''
from __future__ import unicode_literals
__version__ = '1.6.0.dev'
__description__ = 'Open data portal'
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
uData
'''
from __future__ import unicode_literals
__version__ = '1.5.1.dev'
__description__ = 'Open data portal'
| agpl-3.0 | Python |
0e20f9fe0dd239cb2fd0c55e7b901861a61d18ad | simplify and remove unnecessary code | stoewer/nixpy,stoewer/nixpy | nix/util/find.py | nix/util/find.py | # Copyright (c) 2014, German Neuroinformatics Node (G-Node)
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted under the terms of the BSD License. See
# LICENSE file in the root of the Project.
from __future__ import absolute_import
import nix
class _cont(object):
"""
Simple container for an element an a level
"""
def __init__(self, elem, level):
self.elem = elem
self.level = level
def _find_sources(with_sources, filtr, limit):
"""
Find a list of matching sources recursively.
For internal use.
"""
fifo = []
result = []
level = 0
if isinstance(with_sources, nix.Source):
fifo.append(_cont(with_sources, level))
else:
level += 1
fifo += [_cont(e, level) for e in with_sources.sources]
while len(fifo) > 0:
c = fifo.pop(0)
level = c.level + 1
if level <= limit:
fifo += [_cont(e, level) for e in c.elem.sources]
if filtr(c.elem):
result.append(c.elem)
return result
def _find_sections(with_sections, filtr, limit):
"""
Find a list of matching sections recursively.
For internal use.
"""
fifo = []
result = []
level = 0
if isinstance(with_sections, nix.Section):
fifo.append(_cont(with_sections, level))
else:
level += 1
fifo += [_cont(e, level) for e in with_sections.sections]
while len(fifo) > 0:
c = fifo.pop(0)
level = c.level + 1
if level <= limit:
fifo += [_cont(e, level) for e in c.elem.sections]
if filtr(c.elem):
result.append(c.elem)
return result
| # Copyright (c) 2014, German Neuroinformatics Node (G-Node)
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted under the terms of the BSD License. See
# LICENSE file in the root of the Project.
from __future__ import absolute_import
import nix
class _cont(object):
"""
Simple container for an element an a level
"""
def __init__(self, elem, level):
self.elem = elem
self.level = level
def _find_sources(with_sources, filtr, limit):
"""
Find a list of matching sources recursively.
For internal use.
"""
fifo = []
result = []
level = 0
if isinstance(with_sources, nix.Source):
fifo.append(_cont(with_sources, level))
else:
level += 1
fifo += [_cont(e, level) for e in with_sources.sources]
while len(fifo) > 0:
c = fifo.pop(0)
level = c.level
if level > limit:
break
level += 1
if level <= limit:
fifo += [_cont(e, level) for e in c.elem.sources]
if filtr(c.elem):
result.append(c.elem)
return result
def _find_sections(with_sections, filtr, limit):
"""
Find a list of matching sections recursively.
For internal use.
"""
fifo = []
result = []
level = 0
if isinstance(with_sections, nix.Section):
fifo.append(_cont(with_sections, level))
else:
level += 1
fifo += [_cont(e, level) for e in with_sections.sections]
while len(fifo) > 0:
c = fifo.pop(0)
level = c.level
if level > limit:
break
level += 1
if level <= limit:
fifo += [_cont(e, level) for e in c.elem.sections]
if filtr(c.elem):
result.append(c.elem)
return result
| bsd-3-clause | Python |
8a553df5d6e11b346a4cabf9ac137ce5c2910fa7 | install just with `python -m jupyterdrive` | jupyter/jupyter-drive,jupyter/jupyter-drive,jupyter/jupyter-drive,Carreau/jupyter-drive,Carreau/jupyter-drive | jupyterdrive/__init__.py | jupyterdrive/__init__.py |
import IPython
import IPython.html.nbextensions as nbe
from IPython.utils.path import locate_profile
import sys
import os
import os.path
import json
import io
from IPython.config import Config, JSONFileConfigLoader, ConfigFileNotFound
def install(profile='default', symlink=False):
dname = os.path.dirname(__file__)
# miht want to check if already installed and overwrite if exist
nbe.install_nbextension(os.path.join(dname,'gdrive'), symlink=symlink)
activate(profile)
def activate(profile):
dname = os.path.dirname(__file__)
pdir = locate_profile(profile)
jc = JSONFileConfigLoader('ipython_notebook_config.json',pdir)
try:
config = jc.load_config();
except (ConfigFileNotFound,ValueError) as e:
config = Config()
if 'NotebookApp' in config:
if ('tornado_settings' in config['NotebookApp']) or ('contents_manager_class' in config['NotebookApp']):
# TODO, manually merge tornado settin if exist
# but cannot do anythin automatically if contents_manager_calss is set
raise ValueError('You already got some configuration that will conflict with google drive. Bailin out')
drive_config = JSONFileConfigLoader('ipython_notebook_config.json', dname).load_config()
config.merge(drive_config)
print(config)
config['nbformat'] = 1
with io.open(os.path.join(pdir,'ipython_notebook_config.json'),'wb') as f:
json.dump(config,f, indent=2)
def deactivate(profile):
"""should be a matter of just unsetting the above keys
"""
raise NotImplemented('deactivating a profile is not yet implemented.')
if __name__ == '__main__':
"""shoudl probably parse aruments of profiel and or activate deactivate"""
install()
|
import IPython
import IPython.html.nbextensions as nbe
import sys
import os
import os.path
def install(profile='default', symlink=False):
dname = os.path.dirname(__file__)
nbe.install_nbextension(os.path.join(dname,'gdrive'), symlink=symlink)
if __name__ == '__main__':
install()
| bsd-2-clause | Python |
2c1fe59f793ce6acfcf1cf85901d49739af10534 | fix typo in crashtest security parser | rackerlabs/django-DefectDojo,OWASP/django-DefectDojo,OWASP/django-DefectDojo,rackerlabs/django-DefectDojo,OWASP/django-DefectDojo,rackerlabs/django-DefectDojo,OWASP/django-DefectDojo,rackerlabs/django-DefectDojo,OWASP/django-DefectDojo | dojo/tools/crashtest_security/parser.py | dojo/tools/crashtest_security/parser.py | __author__ = "phylu"
from defusedxml import ElementTree as ET
from dojo.models import Finding
class CrashtestSecurityXmlParser(object):
"""
The objective of this class is to parse an xml file generated by the crashtest security suite.
@param xml_output A proper xml generated by the crashtest security suite
"""
def __init__(self, xml_output, test):
tree = self.parse_xml(xml_output)
if tree:
self.items = self.get_items(tree, test)
else:
self.items = []
def parse_xml(self, xml_output):
"""
Open and parse an xml file.
@return xml_tree An xml tree instance. None if error.
"""
try:
tree = ET.parse(xml_output)
except SyntaxError as se:
raise se
return tree
def get_items(self, tree, test):
"""
@return items A list of Host instances
"""
items = list()
# Get all testcases
for node in tree.findall('.//testcase'):
# Only failed test cases contain a finding
failure = node.find('failure')
if failure is None:
continue
title = node.get('name')
description = failure.get('message')
severity = failure.get('type')
find = Finding(title=title,
description=description,
test=test,
severity=severity,
mitigation="No mitigation provided",
active=False,
verified=False,
false_p=False,
duplicate=False,
out_of_scope=False,
mitigated=None,
impact="No impact provided",
numerical_severity=Finding.get_numerical_severity(severity))
items.append(find)
return items
| __auther__ = "phylu"
from defusedxml import ElementTree as ET
from dojo.models import Finding
class CrashtestSecurityXmlParser(object):
"""
The objective of this class is to parse an xml file generated by the crashtest security suite.
@param xml_output A proper xml generated by the crashtest security suite
"""
def __init__(self, xml_output, test):
tree = self.parse_xml(xml_output)
if tree:
self.items = self.get_items(tree, test)
else:
self.items = []
def parse_xml(self, xml_output):
"""
Open and parse an xml file.
@return xml_tree An xml tree instance. None if error.
"""
try:
tree = ET.parse(xml_output)
except SyntaxError as se:
raise se
return tree
def get_items(self, tree, test):
"""
@return items A list of Host instances
"""
items = list()
# Get all testcases
for node in tree.findall('.//testcase'):
# Only failed test cases contain a finding
failure = node.find('failure')
if failure is None:
continue
title = node.get('name')
description = failure.get('message')
severity = failure.get('type')
find = Finding(title=title,
description=description,
test=test,
severity=severity,
mitigation="No mitigation provided",
active=False,
verified=False,
false_p=False,
duplicate=False,
out_of_scope=False,
mitigated=None,
impact="No impact provided",
numerical_severity=Finding.get_numerical_severity(severity))
items.append(find)
return items
| bsd-3-clause | Python |
a021f279341eb15c17597200d4bbe97a98034c54 | Remove sensitive information from the example | FederatedAI/FATE,FederatedAI/FATE,FederatedAI/FATE | fate_flow/examples/test_inference.py | fate_flow/examples/test_inference.py | # -*-coding:utf8 -*-
import json
import requests
import time
import uuid
import datetime
import time
ids = ["18576635456", "13512345432"]
url1 = "http://127.0.0.1:8059/federation/1.0/inference"
for i in range(2):
request_data_tmp = {
"head": {
"serviceId": "test_model_service",
"applyId": "209090900991",
},
"body": {
"featureData": {
"phone_num": ids[i],
},
"sendToRemoteFeatureData": {
"device_type": "imei",
"phone_num": ids[i],
"encrypt_type": "raw"
}
}
}
headers = {"Content-Type": "application/json"}
response = requests.post(url1, json=request_data_tmp, headers=headers)
print("url地址:", url1)
print("请求信息:\n", request_data_tmp)
print()
print("响应信息:\n", response.text)
print()
#time.sleep(0.1)
| # -*-coding:utf8 -*-
import json
import requests
import time
import uuid
import datetime
import time
ids = ["18576635456", "13512345432"]
url1 = "http://172.16.153.71:8059/federation/1.0/inference"
for i in range(2):
request_data_tmp = {
"head": {
"serviceId": "test_model_service",
"applyId": "209090900991",
},
"body": {
"featureData": {
"phone_num": ids[i],
},
"sendToRemoteFeatureData": {
"device_type": "imei",
"phone_num": ids[i],
"encrypt_type": "raw"
}
}
}
headers = {"Content-Type": "application/json"}
response = requests.post(url1, json=request_data_tmp, headers=headers)
print("url地址:", url1)
print("请求信息:\n", request_data_tmp)
print()
print("响应信息:\n", response.text)
print()
#time.sleep(0.1)
| apache-2.0 | Python |
5b8e2d975962220ce4908a39607ac4d9a51d7545 | update project url in comment | Uname-a/knife_scraper,Uname-a/knife_scraper,Uname-a/knife_scraper | pytest_run.py | pytest_run.py | #!/usr/bin/env python
# coding=utf-8
"""This is a script for running pytest from the command line.
This script exists so that the project directory gets added to sys.path, which
prevents us from accidentally testing the globally installed willie version.
pytest_run.py
Copyright 2013, Ari Koivula, <ari@koivu.la>
Licensed under the Eiffel Forum License 2.
http://sopel.chat
"""
from __future__ import unicode_literals
if __name__ == "__main__":
import sys
import pytest
returncode = pytest.main()
sys.exit(returncode)
| #!/usr/bin/env python
# coding=utf-8
"""This is a script for running pytest from the command line.
This script exists so that the project directory gets added to sys.path, which
prevents us from accidentally testing the globally installed willie version.
pytest_run.py
Copyright 2013, Ari Koivula, <ari@koivu.la>
Licensed under the Eiffel Forum License 2.
http://willie.dfbta.net
"""
from __future__ import unicode_literals
if __name__ == "__main__":
import sys
import pytest
returncode = pytest.main()
sys.exit(returncode)
| mit | Python |
e9ce3ed024cd12a89200498aab7bc4920e7100b9 | change Admin model registrations to newer way using decorators | arturtamborski/arturtamborskipl,arturtamborski/arturtamborskipl | blog/admin.py | blog/admin.py | from django.contrib import admin
from .models import Tag, Category, Article
@admin.register(Tag)
class TagAdmin(admin.ModelAdmin):
ordering = ('name',)
list_display = ('id', 'name',)
search_fields = ('id', 'name',)
@admin.register(Category)
class CategoryAdmin(admin.ModelAdmin):
ordering = ('name',)
list_display = ('id', 'name',)
search_fields = ('id', 'name',)
@admin.register(Article)
class ArticleAdmin(admin.ModelAdmin):
ordering = ('-date',)
list_filter = ('date',)
list_display = ('id', 'title', 'date', 'category',)
search_fields = ('id', 'title', 'date', 'category',)
date_hierarchy = 'date'
filter_horizontal = ('tags',)
| from django.contrib import admin
from . import models as blog
class TagAdmin(admin.ModelAdmin):
ordering = ('name',)
list_display = ('id', 'name',)
search_fields = ('id', 'name',)
class CategoryAdmin(admin.ModelAdmin):
ordering = ('name',)
list_display = ('id', 'name',)
search_fields = ('id', 'name',)
class ArticleAdmin(admin.ModelAdmin):
ordering = ('-date',)
list_filter = ('date',)
list_display = ('id', 'title', 'date', 'category',)
search_fields = ('id', 'title', 'date', 'category',)
date_hierarchy = 'date'
filter_horizontal = ('tags',)
admin.site.register(blog.Article, ArticleAdmin)
admin.site.register(blog.Category, CategoryAdmin)
admin.site.register(blog.Tag, TagAdmin)
| mit | Python |
fa29356863f7912cdf84bb884719923155de7d94 | Update descitption of Accuracy | RobertKleinkauf/pyVienna | vienna.py | vienna.py | import RNA
def getBPPM(sequence, structure = "", bppm_cutoff = 0.00001):
"""
Requires ViennaRNAtools Python module
Returns the base pair probability matrix using Vienna pf_fold, get_pr and free_pf_arrays functions.
returns upper triangular matrix, whose entries exceed a threshold
"""
bppm = {}
#'--noPS', '-d 2', t, P
if structure != "":
RNA.cvar.fold_constrained = 1
else:
RNA.cvar.fold_constrained = 0
#print "Before", structure
RNA.pf_fold(sequence, structure)
#print "After", structure
seq_len = len(sequence)+1
for i in xrange(1, seq_len):
for j in xrange(1, seq_len):
if i<j:
bpp = RNA.get_pr(i,j)
if bpp > bppm_cutoff:
bppm[str(i) + "_" + str(j)] = bpp
else:
bppm[str(i) + "_" + str(j)] = 0
RNA.free_pf_arrays()
#print bppm
#exit(1)
return bppm
def getAccuracy(struct_dict, bppm):
"""
Calculate average structuredness of given structure(stack) within bppm
struct_dict is a dictionary of form struct_dict[i] = j indicating the base pair (i,j)|i<j wihtin the bppm
"""
acc = 0
for sq_i in struct_dict.keys():
v = str(sq_i) + "_" + str(struct_dict[sq_i])
if v in bppm:
acc += bppm[v]
#acc += math.pow(bppm[v], 2) / len(struct_stack)
return acc
| import RNA
def getBPPM(sequence, structure = "", bppm_cutoff = 0.00001):
"""
Requires ViennaRNAtools Python module
Returns the base pair probability matrix using Vienna pf_fold, get_pr and free_pf_arrays functions.
returns upper triangular matrix, whose entries exceed a threshold
"""
bppm = {}
#'--noPS', '-d 2', t, P
if structure != "":
RNA.cvar.fold_constrained = 1
else:
RNA.cvar.fold_constrained = 0
#print "Before", structure
RNA.pf_fold(sequence, structure)
#print "After", structure
seq_len = len(sequence)+1
for i in xrange(1, seq_len):
for j in xrange(1, seq_len):
if i<j:
bpp = RNA.get_pr(i,j)
if bpp > bppm_cutoff:
bppm[str(i) + "_" + str(j)] = bpp
else:
bppm[str(i) + "_" + str(j)] = 0
RNA.free_pf_arrays()
#print bppm
#exit(1)
return bppm
def getAccuracy(struct_dict, bppm):
"""
Calculate average structuredness of given structure(stack) within bppm
struct
"""
acc = 0
for sq_i in struct_dict.keys():
v = str(sq_i) + "_" + str(struct_dict[sq_i])
if v in bppm:
acc += bppm[v]
#acc += math.pow(bppm[v], 2) / len(struct_stack)
return acc
| mit | Python |
aefd70ce490d3a0673b6b82c6efb1dd046bcf46a | Add admin interface for StockItemLabel | SchrodingersGat/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree,inventree/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree | InvenTree/label/admin.py | InvenTree/label/admin.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import admin
from .models import StockItemLabel
class StockItemLabelAdmin(admin.ModelAdmin):
list_display = ('name', 'description', 'label')
admin.site.register(StockItemLabel, StockItemLabelAdmin)
| from django.contrib import admin
# Register your models here.
| mit | Python |
31e5d9874d1376ce7732dc5bd5271433f9160684 | Remove unnecessary comment | jacebrowning/gitman | gitman/__init__.py | gitman/__init__.py | """Package for GitMan."""
from pkg_resources import DistributionNotFound, get_distribution
from .commands import delete as uninstall
from .commands import display as list
from .commands import init, install, lock, update
try:
__version__ = get_distribution("gitman").version
except DistributionNotFound:
__version__ = "???"
| """Package for GitMan."""
from pkg_resources import DistributionNotFound, get_distribution
from .commands import delete as uninstall # pylint: disable=redefined-builtin
from .commands import display as list
from .commands import init, install, lock, update
try:
__version__ = get_distribution("gitman").version
except DistributionNotFound:
__version__ = "???"
| mit | Python |
0416a0af4cc090a04df535f736f8efae8a113144 | remove obsolete default kwarg from migration | dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq | custom/ilsgateway/migrations/0005_add_pending_reporting_data_recalculation.py | custom/ilsgateway/migrations/0005_add_pending_reporting_data_recalculation.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import datetime
import jsonfield.fields
class Migration(migrations.Migration):
dependencies = [
('locations', '0001_initial'),
('ilsgateway', '0004_merge'),
]
operations = [
migrations.CreateModel(
name='PendingReportingDataRecalculation',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('domain', models.CharField(max_length=128)),
('type', models.CharField(max_length=128)),
('data', jsonfield.fields.JSONField()),
('sql_location', models.ForeignKey(to='locations.SQLLocation')),
],
options={
},
bases=(models.Model,),
)
]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import datetime
import jsonfield.fields
class Migration(migrations.Migration):
dependencies = [
('locations', '0001_initial'),
('ilsgateway', '0004_merge'),
]
operations = [
migrations.CreateModel(
name='PendingReportingDataRecalculation',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('domain', models.CharField(max_length=128)),
('type', models.CharField(max_length=128)),
('data', jsonfield.fields.JSONField(default='null')),
('sql_location', models.ForeignKey(to='locations.SQLLocation')),
],
options={
},
bases=(models.Model,),
)
]
| bsd-3-clause | Python |
914ed1a63bf19ed45dc4c79378d4424b8ace84f6 | Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/2b1dc458cc7cecee7f8b19b178f726a751f62943. | tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,paolodedios/tensorflow,Intel-Corporation/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,Intel-Corporation/tensorflow,yongtang/tensorflow,gautam1858/tensorflow,gautam1858/tensorflow,karllessard/tensorflow,Intel-Corporation/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,gautam1858/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,paolodedios/tensorflow,karllessard/tensorflow,yongtang/tensorflow,yongtang/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,yongtang/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,Intel-Corporation/tensorflow,Intel-tensorflow/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,gautam1858/tensorflow,Intel-Corporation/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-Corporation/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,karllessard/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,karllessard/tensorflow,karllessard/tensorflow,gautam1858/tensorflow,gautam1858/tensorflow,gautam1858/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,paolodedios/tensorflow,Intel-tensorflow/tensorflow,gautam1858/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,karllessard/tensorflow,tensorflow/tensorflow,paolodedios/tensorflow,yongtang/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once | third_party/tf_runtime/workspace.bzl | third_party/tf_runtime/workspace.bzl | """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "2b1dc458cc7cecee7f8b19b178f726a751f62943"
TFRT_SHA256 = "6355f78c948bb5d55be21003008e7415353147c0de0bb6d88d2d7eead8ac7758"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
| """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "68930096839ac6c0c53eec900503ff1e572156e9"
TFRT_SHA256 = "d1c7db16fd9fbd89ce6e0e527a521c7c714b301bc54d3e246f74d1b330697a47"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
| apache-2.0 | Python |
f0fa5e30b94ecfef8d5c86946e94587c604e2315 | Fix PEP8 in app_with_config.py | rnixx/kivy,akshayaurora/kivy,inclement/kivy,kivy/kivy,Cheaterman/kivy,kivy/kivy,rnixx/kivy,Cheaterman/kivy,matham/kivy,bionoid/kivy,LogicalDash/kivy,KeyWeeUsr/kivy,LogicalDash/kivy,akshayaurora/kivy,bionoid/kivy,rnixx/kivy,LogicalDash/kivy,kivy/kivy,KeyWeeUsr/kivy,Cheaterman/kivy,KeyWeeUsr/kivy,inclement/kivy,bionoid/kivy,inclement/kivy,akshayaurora/kivy,matham/kivy,matham/kivy,KeyWeeUsr/kivy,bionoid/kivy,LogicalDash/kivy,Cheaterman/kivy,matham/kivy | examples/application/app_with_config.py | examples/application/app_with_config.py | from kivy.app import App
from kivy.lang import Builder
from kivy.properties import ConfigParserProperty
KV = '''
FloatLayout:
BoxLayout:
size_hint: .5, .5
pos_hint: {'center': (.5, .5)}
orientation: 'vertical'
TextInput:
text: app.text
on_text: app.text = self.text
Slider:
min: 0
max: 100
value: app.number
on_value: app.number = self.value
'''
class ConfigApp(App):
number = ConfigParserProperty(
0, 'general', 'number',
'app', val_type=float
)
text = ConfigParserProperty(
'', 'general', 'text',
'app', val_type=str
)
def build_config(self, config):
config.setdefaults(
'general',
{
'number': 0,
'text': 'test'
}
)
def build(self):
return Builder.load_string(KV)
if __name__ == '__main__':
ConfigApp().run()
| from kivy.app import App
from kivy.lang import Builder
from kivy.properties import ConfigParserProperty
KV = '''
FloatLayout:
BoxLayout:
size_hint: .5, .5
pos_hint: {'center': (.5, .5)}
orientation: 'vertical'
TextInput:
text: app.text
on_text: app.text = self.text
Slider:
min: 0
max: 100
value: app.number
on_value: app.number = self.value
'''
class ConfigApp(App):
number = ConfigParserProperty(0, 'general', 'number', 'app', val_type=float)
text = ConfigParserProperty('', 'general', 'text', 'app', val_type=str)
def build_config(self, config):
config.setdefaults(
'general',
{
'number': 0,
'text': 'test'
}
)
def build(self):
return Builder.load_string(KV)
if __name__ == '__main__':
ConfigApp().run()
| mit | Python |
a1581c6f819506b8c2827c8f768bf0bd058d2305 | Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/de022dcbf35e3119c933a5c345b8b582a9a3cb18. | tensorflow/tensorflow,karllessard/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,paolodedios/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,karllessard/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,gautam1858/tensorflow,yongtang/tensorflow,yongtang/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,paolodedios/tensorflow,karllessard/tensorflow,Intel-Corporation/tensorflow,gautam1858/tensorflow,Intel-tensorflow/tensorflow,Intel-Corporation/tensorflow,yongtang/tensorflow,tensorflow/tensorflow,Intel-tensorflow/tensorflow,paolodedios/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,karllessard/tensorflow,Intel-tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,gautam1858/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,karllessard/tensorflow,Intel-Corporation/tensorflow,gautam1858/tensorflow,yongtang/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,karllessard/tensorflow,gautam1858/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,Intel-Corporation/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,karllessard/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,gautam1858/tensorflow,gautam1858/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,gautam1858/tensorflow,tensorflow/tensorflow,yongtang/tensorflow,paolodedios/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer | third_party/tf_runtime/workspace.bzl | third_party/tf_runtime/workspace.bzl | """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "de022dcbf35e3119c933a5c345b8b582a9a3cb18"
TFRT_SHA256 = "dc9efd26b883c026354a071a733a7e002a04dd9ae601b57f388bdf5009d35635"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = [
"http://mirror.tensorflow.org/github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
"https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
],
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
| """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "a3dc93d984dd761c539304b21ea64324e30ef217"
TFRT_SHA256 = "7ff10fa3a2213d25e5aa46e20b783c6f96d94772b9fd1c1eac1cbc7a86668b53"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = [
"http://mirror.tensorflow.org/github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
"https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
],
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
| apache-2.0 | Python |
a986d114a1f35e8e85c6b5a5235d6f1bf700f549 | Make the Connected Components operator use the utility function | mathturtle/tomviz,mathturtle/tomviz,thewtex/tomviz,cjh1/tomviz,OpenChemistry/tomviz,mathturtle/tomviz,cryos/tomviz,cryos/tomviz,cjh1/tomviz,thewtex/tomviz,OpenChemistry/tomviz,OpenChemistry/tomviz,cjh1/tomviz,OpenChemistry/tomviz,cryos/tomviz,thewtex/tomviz | tomviz/python/ConnectedComponents.py | tomviz/python/ConnectedComponents.py | def transform_scalars(dataset, background_value=0):
"""Converts a label map of connected components of foreground-valued
voxels in the input image to a label map where each connected component
has a unique label. Foreground voxels have any value other than the
background value. Input images are expected to have integral voxel types,
i.e., no float or double voxels. The connected component labels are ordered
such that the smallest connected components have the lowest label values and
the largest connected components have the highest label values.
"""
from tomviz import utils
utils.connected_components(dataset, background_value)
| def transform_scalars(dataset, background_value=0):
"""Converts a label map of connected components of foreground-valued
voxels in the input image to a label map where each connected component
has a unique label. Foreground voxels have any value other than the
background value. Input images are expected to have integral voxel types,
i.e., no float or double voxels. The connected component labels are ordered
such that the smallest connected components have the lowest label values and
the largest connected components have the highest label values.
"""
try:
import itk
import itkTypes
import vtk
from tomviz import itkutils
from tomviz import utils
except Exception as exc:
print("Could not import necessary module(s)")
print(exc)
# Return values
returnValues = None
scalarType = dataset.GetScalarType()
if scalarType == vtk.VTK_FLOAT or scalarType == vtk.VTK_DOUBLE:
raise Exception(
"Connected Components works only on images with integral types.")
# Add a try/except around the ITK portion. ITK exceptions are
# passed up to the Python layer, so we can at least report what
# went wrong with the script, e.g,, unsupported image type.
try:
# Get the ITK image. The input is assumed to have an integral type.
# Take care of casting to an unsigned short image so we can store up
# to 65,535 connected components (the number of connected components
# is limited to the maximum representable number in the voxel type
# of the input image in the ConnectedComponentsFilter).
itk_image = itkutils.convert_vtk_to_itk_image(dataset, itkTypes.US)
itk_image_type = type(itk_image)
# ConnectedComponentImageFilter
connected_filter = itk.ConnectedComponentImageFilter[
itk_image_type, itk_image_type].New()
connected_filter.SetBackgroundValue(background_value)
connected_filter.SetInput(itk_image)
# Relabel filter. This will compress the label numbers to a
# continugous range between 1 and n where n is the number of
# labels. It will also sort the components from largest to
# smallest, where the largest component has label 1, the
# second largest has label 2, and so on...
relabel_filter = itk.RelabelComponentImageFilter[
itk_image_type, itk_image_type].New()
relabel_filter.SetInput(connected_filter.GetOutput())
relabel_filter.SortByObjectSizeOn()
relabel_filter.Update()
itk_image_data = relabel_filter.GetOutput()
label_buffer = itk.PyBuffer[
itk_image_type].GetArrayFromImage(itk_image_data)
# Flip the labels so that the largest component has the highest label
# value, e.g., the labeling ordering by size goes from [1, 2, ... N] to
# [N, N-1, N-2, ..., 1]. Note that zero is the background value, so we
# do not want to change it.
import numpy as np
minimum = 1 # Minimum label is always 1, background is 0
maximum = np.max(label_buffer)
# Try more memory-efficient approach
gt_zero = label_buffer > 0
label_buffer[gt_zero] = minimum - label_buffer[gt_zero] + maximum
utils.set_array(dataset, label_buffer)
except Exception as exc:
print("Exception encountered while running ConnectedComponents")
print(exc)
return returnValues
| bsd-3-clause | Python |
fdaa5df7ef2fa6fb9936ce202a8e40f3d201ce59 | Fix test compilation issue | dhiaayachi/dynx,dhiaayachi/dynx | test/test_integration.py | test/test_integration.py | import unittest
import http.client
class TestStringMethods(unittest.TestCase):
def test_404NoConfig(self):
connRouter = http.client.HTTPConnection("localhost", 8666)
connRouter.request("GET", "/google")
response = connRouter.getresponse()
connRouter.close()
self.assertEqual(response.status, 404)
def test_200NoConfig(self):
connRouter = http.client.HTTPConnection("localhost", 8666)
connConfig = http.client.HTTPConnection("localhost", 8888)
connConfig.request("GET","/configure?location=/google&upstream=http://www.google.com&ttl=10")
response = connConfig.getresponse()
print("Body:", response.read().decode("utf-8"),"\n")
self.assertEqual(response.status, 200)
connRouter.request("GET", "/google")
response = connRouter.getresponse()
self.assertEqual(response.status, 200)
connRouter.close()
connConfig.close()
if __name__ == '__main__':
unittest.main()
| import unittest
import http.client
class TestStringMethods(unittest.TestCase):
def test_404NoConfig(self):
connRouter = http.client.HTTPConnection("localhost", 8666)
connRouter.request("GET", "/google")
response = connRouter.getresponse()
self.assertEqual(response.status, 404)
connRouter.close()
connConfig.close()
def test_200NoConfig(self):
connRouter = http.client.HTTPConnection("localhost", 8666)
connConfig = http.client.HTTPConnection("localhost", 8888)
connConfig.request("GET","/configure?location=/google&upstream=http://www.google.com&ttl=10")
response = connConfig.getresponse()
print("Body:" + response.read().decode("utf-8"))
self.assertEqual(response.status, 200)
connRouter.request("GET", "/google")
response = connRouter.getresponse()
self.assertEqual(response.status, 200)
connRouter.close()
connConfig.close()
if __name__ == '__main__':
unittest.main()
| apache-2.0 | Python |
9122042377a9b622aafa2a77d7f46d46331427eb | Adjust util.Track.__repr__ | ViKomprenas/nsndswap,ViKomprenas/nsndswap,ViKomprenas/nsndswap | nsndswap/util.py | nsndswap/util.py | #!/usr/bin/env python3
# nsndswap/util.py
# copyright 2017 ViKomprenas, 2-clause BSD license (LICENSE.md)
class Track(object):
# encapsulation? what encapsulation? just use the properties
def __init__(self, title, references=None):
self.title = title
self.references = references or []
def __repr__(self):
return f'Track({self.title!r}, {self.references!r})'
def split_attrs(attrs):
ret = {}
for attr in attrs:
ret[attr[0]] = attr[1]
return ret
def reencode(string):
return string.encode('utf-8').decode('ascii', 'ignore')
| #!/usr/bin/env python3
# nsndswap/util.py
# copyright 2017 ViKomprenas, 2-clause BSD license (LICENSE.md)
class Track(object):
# encapsulation? what encapsulation? just use the properties
def __init__(self, title, references=None):
self.title = title
self.references = references or []
def __repr__(self):
return f'Track("{self.title}", {self.references})'
def split_attrs(attrs):
ret = {}
for attr in attrs:
ret[attr[0]] = attr[1]
return ret
def reencode(string):
return string.encode('utf-8').decode('ascii', 'ignore')
| bsd-2-clause | Python |
9f6168bcd27716588e7dec49b5aae50794ead5e7 | Add --hostname argument. | rchekaluk/cloudbiolinux,heuermh/cloudbiolinux,AICIDNN/cloudbiolinux,chapmanb/cloudbiolinux,kdaily/cloudbiolinux,elkingtonmcb/cloudbiolinux,elkingtonmcb/cloudbiolinux,joemphilips/cloudbiolinux,kdaily/cloudbiolinux,rchekaluk/cloudbiolinux,pjotrp/cloudbiolinux,AICIDNN/cloudbiolinux,lpantano/cloudbiolinux,averagehat/cloudbiolinux,chapmanb/cloudbiolinux,elkingtonmcb/cloudbiolinux,kdaily/cloudbiolinux,heuermh/cloudbiolinux,rchekaluk/cloudbiolinux,lpantano/cloudbiolinux,chapmanb/cloudbiolinux,joemphilips/cloudbiolinux,heuermh/cloudbiolinux,lpantano/cloudbiolinux,averagehat/cloudbiolinux,pjotrp/cloudbiolinux,AICIDNN/cloudbiolinux,chapmanb/cloudbiolinux,averagehat/cloudbiolinux,kdaily/cloudbiolinux,pjotrp/cloudbiolinux,averagehat/cloudbiolinux,heuermh/cloudbiolinux,pjotrp/cloudbiolinux,elkingtonmcb/cloudbiolinux,rchekaluk/cloudbiolinux,AICIDNN/cloudbiolinux,joemphilips/cloudbiolinux,joemphilips/cloudbiolinux | cloudbio/deploy/main.py | cloudbio/deploy/main.py | from argparse import ArgumentParser
import yaml
from cloudbio.deploy import deploy
DESC = "Creates an on-demand cloud instance, sets up applications, and transfer files to it."
## Properties that may be specified as args or in settings file,
## argument takes precedence.
ARG_PROPERTIES = [
# VM launcher options
"files",
"compressed_files",
"actions",
"runtime_properties",
"vm_provider",
"hostname",
# CloudBioLinux options
"target",
"flavor",
"package",
# CloudMan options
"target_bucket",
]
def main():
args = parse_args()
options = parse_settings(args.settings)
for property in ARG_PROPERTIES:
_copy_arg_to_options(options, args, property)
deploy(options)
def _copy_arg_to_options(options, args, property):
arg_property = getattr(args, property)
if arg_property or not property in options:
options[property] = arg_property
def parse_args():
parser = ArgumentParser(DESC)
parser.add_argument("--settings", dest="settings", default="settings.yaml")
parser.add_argument('--action', dest="actions", action="append", default=[])
parser.add_argument('--runtime_property', dest="runtime_properties", action="append", default=[])
parser.add_argument('--compressed_file', dest="compressed_files", action="append", default=[], help="file to transfer to new instance and decompress")
parser.add_argument('--file', dest="files", action="append", default=[], help="file to transfer to new instance")
parser.add_argument("--vm_provider", dest="vm_provider", default=None, help="libcloud driver to use (or vagrant) (e.g. aws, openstack)")
parser.add_argument("--hostname", dest="hostname", default=None, help="Newly created nodes are created with this specified hostname.")
# CloudBioLinux options
parser.add_argument("--target", dest="target", default=None, help="Specify a CloudBioLinux target, used with action install_biolinux action")
parser.add_argument("--flavor", dest="flavor", default=None, help="Specify a CloudBioLinux flavor, used with action install_biolinux action")
parser.add_argument("--package", dest="package", default=None, help="Specify a CloudBioLinux package, used with action install_custom")
# CloudMan related options
parser.add_argument("--target_bucket", dest="target_bucket", default=None, help="Specify a target bucket for CloudMan bucket related actions.")
args = parser.parse_args()
if len(args.actions) == 0:
args.actions = ["transfer"]
return args
def parse_settings(name):
return _read_yaml(name)
def _read_yaml(yaml_file):
with open(yaml_file) as in_handle:
return yaml.load(in_handle)
if __name__ == "__main__":
main()
| from argparse import ArgumentParser
import yaml
from cloudbio.deploy import deploy
DESC = "Creates an on-demand cloud instance, sets up applications, and transfer files to it."
## Properties that may be specified as args or in settings file,
## argument takes precedence.
ARG_PROPERTIES = [
# VM launcher options
"files",
"compressed_files",
"actions",
"runtime_properties",
"vm_provider",
# CloudBioLinux options
"target",
"flavor",
"package",
# CloudMan options
"target_bucket",
]
def main():
args = parse_args()
options = parse_settings(args.settings)
for property in ARG_PROPERTIES:
_copy_arg_to_options(options, args, property)
deploy(options)
def _copy_arg_to_options(options, args, property):
arg_property = getattr(args, property)
if arg_property or not property in options:
options[property] = arg_property
def parse_args():
parser = ArgumentParser(DESC)
parser.add_argument("--settings", dest="settings", default="settings.yaml")
parser.add_argument('--action', dest="actions", action="append", default=[])
parser.add_argument('--runtime_property', dest="runtime_properties", action="append", default=[])
parser.add_argument('--compressed_file', dest="compressed_files", action="append", default=[], help="file to transfer to new instance and decompress")
parser.add_argument('--file', dest="files", action="append", default=[], help="file to transfer to new instance")
parser.add_argument("--vm_provider", dest="vm_provider", default=None, help="libcloud driver to use (or vagrant) (e.g. aws, openstack)")
# CloudBioLinux options
parser.add_argument("--target", dest="target", default=None, help="Specify a CloudBioLinux target, used with action install_biolinux action")
parser.add_argument("--flavor", dest="flavor", default=None, help="Specify a CloudBioLinux flavor, used with action install_biolinux action")
parser.add_argument("--package", dest="package", default=None, help="Specify a CloudBioLinux package, used with action install_custom")
# CloudMan related options
parser.add_argument("--target_bucket", dest="target_bucket", default=None, help="Specify a target bucket for CloudMan bucket related actions.")
args = parser.parse_args()
if len(args.actions) == 0:
args.actions = ["transfer"]
return args
def parse_settings(name):
return _read_yaml(name)
def _read_yaml(yaml_file):
with open(yaml_file) as in_handle:
return yaml.load(in_handle)
if __name__ == "__main__":
main()
| mit | Python |
eb60962da42d0d197d320fdeaba5e8a2058a6454 | debug auto template name | Krozark/Kraggne,Krozark/Kraggne,Krozark/Kraggne | Kraggne/contrib/flatblocks/utils.py | Kraggne/contrib/flatblocks/utils.py | from django.template.loader import select_template
def GetTemplatesPath(appname,modelname,type,template_path=None):
template_paths = []
if template_path:
template_paths.append(template_path)
template_paths.append('%s/%s/%s.html' % (appname,modelname,type))
return template_paths
def GetBlockContent(obj,context,template_path=None):
template_paths = GetTemplatesPath(obj.content_type.app_label,obj.content_type.model,'object',template_path)
template_paths.append("flatblocks/object.html")
try:
t = select_template(template_paths)
except:
return ''
context["generic_object"] = obj
return t.render(context)
def GetListContent(obj,context,template_path=None):
template_paths = GetTemplatesPath(obj.content_type.app_label,obj.content_type.model,'object_list',template_path)
template_paths.append("flatblocks/object_list.html")
try:
t = select_template(template_paths)
except:
return ''
context['generic_object_list'] = obj
return t.render(context)
def GetTemplateContent(context,template_path,**kwargs):
try:
t = select_template(template_paths)
except:
return ''
context.update(kwargs)
return t.render(context)
| from django.template.loader import select_template
def GetTemplatesPath(modelname,type,template_path=None):
template_paths = []
if template_path:
template_paths.append(template_path)
var = modelname.lower().split(".")
template_paths.append('%s/%s/%s.html' % (var[0],var[1],type))
return template_paths
def GetBlockContent(obj,context,template_path=None):
template_paths = GetTemplatesPath('flatblocks.GenericFlatblock','object',template_path)
template_paths.append("flatblocks/object.html")
try:
t = select_template(template_paths)
except:
return ''
context["generic_object"] = obj
return t.render(context)
def GetListContent(obj,context,template_path=None):
template_paths = GetTemplatesPath('flatblocks.GenericFlatblockList','object_list',template_path)
template_paths.append("flatblocks/object_list.html")
try:
t = select_template(template_paths)
except:
return ''
context['generic_object_list'] = obj
return t.render(context)
def GetTemplateContent(context,template_path,**kwargs):
try:
t = select_template(template_paths)
except:
return ''
context.update(kwargs)
return t.render(context)
| bsd-2-clause | Python |
e320a01cb35447906e92cff63d6bbeefe2029e39 | Fix JSON serialisation tests (widget format and file format) | ricklupton/sankeyview | test/test_sankey_data.py | test/test_sankey_data.py | import pytest
from floweaver.sankey_data import SankeyData, SankeyNode, SankeyLink
def test_sankey_data():
nodes = {}
links = {}
groups = {}
data = SankeyData(nodes, links, groups)
assert data.nodes is nodes
assert data.links is links
assert data.groups is groups
def test_sankey_data_json():
data = SankeyData(nodes=[SankeyNode(id='a')],
links=[SankeyLink(source='a', target='a')])
json = data.to_json()
assert json['nodes'] == [n.to_json() for n in data.nodes]
assert json['links'] == [l.to_json() for l in data.links]
def test_sankey_data_node_json():
assert SankeyNode(id='a').to_json() == {
'id': 'a',
'title': 'a',
'style': {
'direction': 'r',
'hidden': False,
'type': 'default',
}
}
assert SankeyNode(id='a', title='A').to_json()['title'] == 'A', \
'title can be overridden'
assert SankeyNode(id='a', direction='L').to_json()['style']['direction'] == 'l', \
'direction can be set'
assert SankeyNode(id='a', title='').to_json()['style']['hidden'] == True, \
'hidden when title == ""'
assert SankeyNode(id='a', hidden=True).to_json()['style']['hidden'] == True, \
'hidden when hidden == True'
def test_sankey_data_link_required_attrs():
with pytest.raises(TypeError):
SankeyLink(source='a')
with pytest.raises(TypeError):
SankeyLink(target='a')
def test_sankey_data_link_default_values():
assert SankeyLink('a', 'b').type == None
def test_sankey_data_link_json():
link = SankeyLink('a', 'b', type='c', time='d', value=2, title='link',
opacity=0.9, color='blue')
# draft JSON Sankey serialisation format
assert link.to_json() == {
'source': 'a',
'target': 'b',
'type': 'c',
'time': 'd',
'data': {
'value': 2,
},
'title': 'link',
'style': {
'opacity': 0.9,
'color': 'blue',
}
}
# format expected by ipysankeywidget
assert link.to_json(format='widget') == {
'source': 'a',
'target': 'b',
'type': 'c',
'time': 'd',
'value': 2,
'title': 'link',
'opacity': 0.9,
'color': 'blue',
}
| import pytest
from floweaver.sankey_data import SankeyData, SankeyNode, SankeyLink
def test_sankey_data():
nodes = {}
links = {}
groups = {}
data = SankeyData(nodes, links, groups)
assert data.nodes is nodes
assert data.links is links
assert data.groups is groups
def test_sankey_data_json():
data = SankeyData(nodes=[SankeyNode(id='a')],
links=[SankeyLink(source='a', target='a')])
json = data.to_json()
assert json['nodes'] == [n.to_json() for n in data.nodes]
assert json['links'] == [l.to_json() for l in data.links]
def test_sankey_data_node_json():
assert SankeyNode(id='a').to_json() == {
'id': 'a',
'title': 'a',
'style': {
'direction': 'r',
'hidden': False,
'type': 'default',
}
}
assert SankeyNode(id='a', title='A').to_json()['title'] == 'A', \
'title can be overridden'
assert SankeyNode(id='a', direction='L').to_json()['style']['direction'] == 'l', \
'direction can be set'
assert SankeyNode(id='a', title='').to_json()['style']['hidden'] == True, \
'hidden when title == ""'
assert SankeyNode(id='a', hidden=True).to_json()['style']['hidden'] == True, \
'hidden when hidden == True'
def test_sankey_data_link_required_attrs():
with pytest.raises(TypeError):
SankeyLink(source='a')
with pytest.raises(TypeError):
SankeyLink(target='a')
def test_sankey_data_link_default_values():
assert SankeyLink('a', 'b').type == None
def test_sankey_data_link_json():
link = SankeyLink('a', 'b', type='c', time='d', value=2, title='link',
opacity=0.9, color='blue')
assert link.to_json() == {
'source': 'a',
'target': 'b',
'type': 'c',
'time': 'd',
'value': 2,
'title': 'link',
# 'style': {
'opacity': 0.9,
'color': 'blue',
# }
}
| mit | Python |
71dcf2ee99389a15055c8884aef77b71808a1e13 | Clean up metainfo_imdb_url plugin | JorisDeRieck/Flexget,LynxyssCZ/Flexget,thalamus/Flexget,lildadou/Flexget,offbyone/Flexget,xfouloux/Flexget,antivirtel/Flexget,LynxyssCZ/Flexget,spencerjanssen/Flexget,crawln45/Flexget,dsemi/Flexget,LynxyssCZ/Flexget,malkavi/Flexget,malkavi/Flexget,ratoaq2/Flexget,patsissons/Flexget,Pretagonist/Flexget,sean797/Flexget,tobinjt/Flexget,tvcsantos/Flexget,lildadou/Flexget,ratoaq2/Flexget,spencerjanssen/Flexget,tsnoam/Flexget,ianstalk/Flexget,qvazzler/Flexget,qvazzler/Flexget,Flexget/Flexget,crawln45/Flexget,Danfocus/Flexget,qk4l/Flexget,camon/Flexget,JorisDeRieck/Flexget,gazpachoking/Flexget,poulpito/Flexget,sean797/Flexget,JorisDeRieck/Flexget,vfrc2/Flexget,ianstalk/Flexget,drwyrm/Flexget,LynxyssCZ/Flexget,camon/Flexget,antivirtel/Flexget,qk4l/Flexget,ibrahimkarahan/Flexget,tarzasai/Flexget,jawilson/Flexget,malkavi/Flexget,thalamus/Flexget,OmgOhnoes/Flexget,drwyrm/Flexget,thalamus/Flexget,Pretagonist/Flexget,Flexget/Flexget,oxc/Flexget,offbyone/Flexget,tvcsantos/Flexget,ibrahimkarahan/Flexget,vfrc2/Flexget,sean797/Flexget,Flexget/Flexget,v17al/Flexget,grrr2/Flexget,jawilson/Flexget,Danfocus/Flexget,OmgOhnoes/Flexget,tobinjt/Flexget,spencerjanssen/Flexget,tarzasai/Flexget,crawln45/Flexget,gazpachoking/Flexget,drwyrm/Flexget,ZefQ/Flexget,JorisDeRieck/Flexget,tobinjt/Flexget,poulpito/Flexget,jawilson/Flexget,Danfocus/Flexget,grrr2/Flexget,cvium/Flexget,qk4l/Flexget,dsemi/Flexget,tarzasai/Flexget,Danfocus/Flexget,patsissons/Flexget,ianstalk/Flexget,cvium/Flexget,poulpito/Flexget,Pretagonist/Flexget,tsnoam/Flexget,antivirtel/Flexget,ibrahimkarahan/Flexget,tsnoam/Flexget,cvium/Flexget,jacobmetrick/Flexget,oxc/Flexget,grrr2/Flexget,ZefQ/Flexget,xfouloux/Flexget,tobinjt/Flexget,qvazzler/Flexget,malkavi/Flexget,vfrc2/Flexget,jacobmetrick/Flexget,v17al/Flexget,ratoaq2/Flexget,xfouloux/Flexget,oxc/Flexget,lildadou/Flexget,patsissons/Flexget,crawln45/Flexget,jacobmetrick/Flexget,ZefQ/Flexget,jawilson/Flexget,offbyone/Flexget,OmgOhnoes/Flexget,Flexget/Flexget,v17al/Flexget,dsemi/Flexget | flexget/plugins/metainfo/imdb_url.py | flexget/plugins/metainfo/imdb_url.py | from __future__ import unicode_literals, division, absolute_import
import re
import logging
from flexget import plugin
from flexget.event import event
from flexget.utils.imdb import extract_id, make_url
log = logging.getLogger('metainfo_imdb_url')
class MetainfoImdbUrl(object):
"""
Scan entry information for imdb url.
"""
schema = {'type': 'boolean'}
def on_task_metainfo(self, task, config):
# check if disabled (value set to false)
if 'scan_imdb' in task.config:
if not task.config['scan_imdb']:
return
for entry in task.entries:
# Don't override already populated imdb_ids
if entry.get('imdb_id', eval_lazy=False):
continue
if not 'description' in entry:
continue
urls = re.findall(r'\bimdb.com/title/tt\d+\b', entry['description'])
if not urls:
continue
# Find unique imdb ids
imdb_ids = set(extract_id(url) for url in urls)
if len(imdb_ids) > 1:
log.debug('Found multiple imdb ids; not using any of: %s' % ' '.join(imdb_ids))
continue
entry['imdb_id'] = imdb_ids.pop()
entry['imdb_url'] = make_url(entry['imdb_id'])
log.debug('Found imdb url in description %s' % entry['imdb_url'])
@event('plugin.register')
def register_plugin():
plugin.register(MetainfoImdbUrl, 'scan_imdb', builtin=True, api_ver=2)
| from __future__ import unicode_literals, division, absolute_import
import re
import logging
from flexget import plugin
from flexget.event import event
log = logging.getLogger('metainfo_imdb_url')
class MetainfoImdbUrl(object):
"""
Scan entry information for imdb url.
"""
schema = {'type': 'boolean'}
def on_task_metainfo(self, task, config):
# check if disabled (value set to false)
if 'scan_imdb' in task.config:
if not task.config['scan_imdb']:
return
for entry in task.entries:
if not 'description' in entry:
continue
urls = re.findall(r'\bimdb.com/title/tt\d+\b', entry['description'])
if not urls:
continue
# Uniquify the list of urls.
urls = list(set(urls))
if 1 < len(urls):
log.debug('Found multiple imdb urls; not using any of: %s' %
' '.join(urls))
continue
url = ''.join(['http://www.', urls[0]])
entry['imdb_url'] = url
log.debug('Found imdb url in description %s' % url)
@event('plugin.register')
def register_plugin():
plugin.register(MetainfoImdbUrl, 'scan_imdb', builtin=True, api_ver=2)
| mit | Python |
8e2930e1582df7270853691db0d6cd9f68f6929f | set default workers=1 | artefactual/archivematica-storage-service,artefactual/archivematica-storage-service,artefactual/archivematica-storage-service,artefactual/archivematica-storage-service | install/storage-service.gunicorn-config.py | install/storage-service.gunicorn-config.py | # Documentation: http://docs.gunicorn.org/en/stable/configure.html
# Example: https://github.com/benoitc/gunicorn/blob/master/examples/example_config.py
import os
# http://docs.gunicorn.org/en/stable/settings.html#user
user = os.environ.get('SS_GUNICORN_USER', 'archivematica')
# http://docs.gunicorn.org/en/stable/settings.html#group
group = os.environ.get('SS_GUNICORN_GROUP', 'archivematica')
# http://docs.gunicorn.org/en/stable/settings.html#bind
bind = os.environ.get('SS_GUNICORN_BIND', '127.0.0.1:8001')
# http://docs.gunicorn.org/en/stable/settings.html#workers
workers = os.environ.get('SS_GUNICORN_WORKERS', '1')
# http://docs.gunicorn.org/en/stable/settings.html#worker-class
# WARNING: if ``worker_class`` is set to ``'gevent'``, then
# ``BAG_VALIDATION_NO_PROCESSES`` in settings/base.py *must* be set to 1.
# Otherwise reingest will fail at bagit validate. See
# https://github.com/artefactual/archivematica/issues/708
worker_class = os.environ.get('SS_GUNICORN_WORKER_CLASS', 'gevent')
# http://docs.gunicorn.org/en/stable/settings.html#timeout
timeout = os.environ.get('SS_GUNICORN_TIMEOUT', '172800')
# http://docs.gunicorn.org/en/stable/settings.html#reload
reload = os.environ.get('SS_GUNICORN_RELOAD', 'false')
# http://docs.gunicorn.org/en/stable/settings.html#reload-engine
reload_engine = os.environ.get('SS_GUNICORN_RELOAD_ENGINE', 'auto')
# http://docs.gunicorn.org/en/stable/settings.html#chdir
chdir = os.environ.get('SS_GUNICORN_CHDIR', '/usr/lib/archivematica/storage-service')
# http://docs.gunicorn.org/en/stable/settings.html#accesslog
accesslog = os.environ.get('SS_GUNICORN_ACCESSLOG', None)
# http://docs.gunicorn.org/en/stable/settings.html#errorlog
errorlog = os.environ.get('SS_GUNICORN_ERRORLOG', '-')
# http://docs.gunicorn.org/en/stable/settings.html#loglevel
loglevel = os.environ.get('SS_GUNICORN_LOGLEVEL', 'info')
# http://docs.gunicorn.org/en/stable/settings.html#proc-name
proc_name = os.environ.get('SS_GUNICORN_PROC_NAME', 'archivematica-storage-service')
# http://docs.gunicorn.org/en/stable/settings.html#sendfile
sendfile = os.environ.get('SS_GUNICORN_SENDFILE', 'false')
| # Documentation: http://docs.gunicorn.org/en/stable/configure.html
# Example: https://github.com/benoitc/gunicorn/blob/master/examples/example_config.py
import os
# http://docs.gunicorn.org/en/stable/settings.html#user
user = os.environ.get('SS_GUNICORN_USER', 'archivematica')
# http://docs.gunicorn.org/en/stable/settings.html#group
group = os.environ.get('SS_GUNICORN_GROUP', 'archivematica')
# http://docs.gunicorn.org/en/stable/settings.html#bind
bind = os.environ.get('SS_GUNICORN_BIND', '127.0.0.1:8001')
# http://docs.gunicorn.org/en/stable/settings.html#workers
workers = os.environ.get('SS_GUNICORN_WORKERS', '4')
# http://docs.gunicorn.org/en/stable/settings.html#worker-class
# WARNING: if ``worker_class`` is set to ``'gevent'``, then
# ``BAG_VALIDATION_NO_PROCESSES`` in settings/base.py *must* be set to 1.
# Otherwise reingest will fail at bagit validate. See
# https://github.com/artefactual/archivematica/issues/708
worker_class = os.environ.get('SS_GUNICORN_WORKER_CLASS', 'gevent')
# http://docs.gunicorn.org/en/stable/settings.html#timeout
timeout = os.environ.get('SS_GUNICORN_TIMEOUT', '172800')
# http://docs.gunicorn.org/en/stable/settings.html#reload
reload = os.environ.get('SS_GUNICORN_RELOAD', 'false')
# http://docs.gunicorn.org/en/stable/settings.html#reload-engine
reload_engine = os.environ.get('SS_GUNICORN_RELOAD_ENGINE', 'auto')
# http://docs.gunicorn.org/en/stable/settings.html#chdir
chdir = os.environ.get('SS_GUNICORN_CHDIR', '/usr/lib/archivematica/storage-service')
# http://docs.gunicorn.org/en/stable/settings.html#accesslog
accesslog = os.environ.get('SS_GUNICORN_ACCESSLOG', None)
# http://docs.gunicorn.org/en/stable/settings.html#errorlog
errorlog = os.environ.get('SS_GUNICORN_ERRORLOG', '-')
# http://docs.gunicorn.org/en/stable/settings.html#loglevel
loglevel = os.environ.get('SS_GUNICORN_LOGLEVEL', 'info')
# http://docs.gunicorn.org/en/stable/settings.html#proc-name
proc_name = os.environ.get('SS_GUNICORN_PROC_NAME', 'archivematica-storage-service')
# http://docs.gunicorn.org/en/stable/settings.html#sendfile
sendfile = os.environ.get('SS_GUNICORN_SENDFILE', 'false')
| agpl-3.0 | Python |
3cbf8d5f89cd46eadb47806d605b2d4a5381ccb2 | Update syncthing-discovery build server | firecat53/dockerfiles,firecat53/dockerfiles | syncthing_discovery/update_release.py | syncthing_discovery/update_release.py | #!/usr/bin/env python
"""Updates stdiscosrv Dockerfile with the latest TeamCity linux-amd64 build.
Python 2/3 compatible.
"""
import xml.etree.cElementTree as ET
try:
from urllib2 import urljoin, urlopen
except ImportError:
from urllib.request import urljoin, urlopen
BASE_URL = "https://build2.syncthing.net"
def get_result(url):
"""Download given url and return ElementTree tree
"""
res = urlopen(urljoin(BASE_URL, url))
if not res:
return ""
res = res.read().decode()
tree = ET.fromstring(res)
return tree
def get_id():
"""Get latest build id from TeamCity
Returns: 'id' - integer
"""
id_url = ("/guestAuth/app/rest/buildTypes/"
"id:DiscoveryServer_Build/builds?locator=branch:master,"
"state:finished,status:SUCCESS,count:1")
tree = get_result(id_url)
id = tree.find('build').attrib['id']
return id
def get_release():
"""Return url for latest linux-amd64 build
"""
id = get_id()
if not id:
return ""
build_url = ("/guestAuth/app/rest/builds/"
"id:{}/artifacts/children".format(id))
tree = get_result(build_url)
url = next((i.attrib['href'] for i in tree if
i.attrib['name'].startswith('stdiscosrv-linux-amd64')), None)
url = url.replace('metadata', 'content')
return url
def update_stdiscosrv():
"""Update stdicosrv Dockerfile with latest release download link
"""
url = get_release()
if not url:
return
url = urljoin(BASE_URL, url)
with open('Dockerfile') as f:
file = f.readlines()
for idx, line in enumerate(file):
if line.startswith('ADD'):
file[idx] = "ADD {} /stdiscosrv.tar.gz\n".format(url)
with open('Dockerfile', 'w') as f:
f.writelines(file)
if __name__ == "__main__":
update_stdiscosrv()
| #!/usr/bin/env python
"""Updates stdiscosrv Dockerfile with the latest Jenkins linux-amd64 build.
Python 2/3 compatible.
"""
import json
try:
from urllib2 import urlopen
except ImportError:
from urllib.request import urlopen
def get_release():
"""Get stdiscosrv latest linux-amd64 release version from the Jenkins API.
Returns: download url
"""
jenkins_url = ("https://build.syncthing.net/job/"
"stdiscosrv/lastStableBuild/api/json")
res = urlopen(jenkins_url)
if not res:
return ""
res = res.read().decode()
res = json.loads(res)
fn = [i['fileName'] for i in res['artifacts']
if 'linux-amd64' in i['fileName']][0]
return "{}artifact/{}".format(res['url'], fn)
def update_stdiscosrv():
"""Update stdicosrv Dockerfile with latest release download link
"""
url = get_release()
if not url:
return
with open('Dockerfile') as f:
file = f.readlines()
for idx, line in enumerate(file):
if line.startswith('ADD'):
file[idx] = "ADD {} /stdiscosrv.tar.gz\n".format(url)
with open('Dockerfile', 'w') as f:
f.writelines(file)
if __name__ == "__main__":
update_stdiscosrv()
| mit | Python |
c9392a6578b0894dff7a5407410e8892e9f3ae6d | Fix bad refactor of is_valid_unc_path | nithinphilips/py_win_unc,CovenantEyes/py_win_unc | win_unc/validators.py | win_unc/validators.py | from win_unc.internal.utils import take_while
from win_unc.sanitizors import sanitize_username, sanitize_unc_path
def is_valid_drive_letter(string):
"""
Drive letters are one character in length and between "A" and "Z". Case does not matter.
"""
return (len(string) == 1
and string[0].isalpha())
def is_valid_unc_path(string):
"""
Valid UNC paths are at least three characters long, begin with exactly two backslashes, not
start or end with whitepsace, and do not contain certain invalid characters
(see `sanitize_unc_path`).
"""
return (len(string) > 2
and len(take_while(lambda c: c == '\\', string)) == 2
and string == string.strip()
and string == sanitize_unc_path(string))
def is_valid_username(string):
"""
A valid Windows username (logon) is a non-empty string that does not start or end with
whitespace, and does not contain certain invalid characters (see `sanitize_username`).
"""
return (len(string) > 0
and string == string.strip()
and string == sanitize_username(string))
| from win_unc.sanitizors import sanitize_username, sanitize_unc_path
def is_valid_drive_letter(string):
"""
Drive letters are one character in length and between "A" and "Z". Case does not matter.
"""
return (len(string) == 1
and string[0].isalpha())
def is_valid_unc_path(string):
"""
Valid UNC paths are at least three characters long, begin with "\\", do not start or end with
whitepsace, and do not contain certain invalid characters (see `sanitize_unc_path`).
"""
return (len(string) > 2
and string.startswith('\\\\')
and string == string.strip()
and string == sanitize_unc_path(string))
def is_valid_username(string):
"""
A valid Windows username (logon) is a non-empty string that does not start or end with
whitespace, and does not contain certain invalid characters (see `sanitize_username`).
"""
return (len(string) > 0
and string == string.strip()
and string == sanitize_username(string))
| mit | Python |
ba2574c1b86f1f9b030a9d572149b8f49c4513aa | Update version. | sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator | Lib/idlelib/idlever.py | Lib/idlelib/idlever.py | IDLE_VERSION = "0.9b1+"
| IDLE_VERSION = "0.9b1"
| mit | Python |
81092dc7a755ce5065f2decc22b7d203b648eac2 | update LOGIN_SIGN_UP_MESSAGE email subject | lafranceinsoumise/api-django,lafranceinsoumise/api-django,lafranceinsoumise/api-django,lafranceinsoumise/api-django | agir/authentication/tasks.py | agir/authentication/tasks.py | from django.conf import settings
from django.utils import timezone
from django.utils.http import urlencode
from agir.authentication.tokens import subscription_confirmation_token_generator
from agir.lib.celery import emailing_task
from agir.lib.mailing import send_mosaico_email
from agir.lib.utils import front_url
from agir.people.actions.subscription import SUBSCRIPTION_TYPE_AP
def interleave_spaces(s, n=3):
return " ".join([s[i : i + n] for i in range(0, len(s), n)])
@emailing_task
def send_login_email(email, short_code, expiry_time):
utc_expiry_time = timezone.make_aware(
timezone.datetime.utcfromtimestamp(expiry_time), timezone.utc
)
local_expiry_time = timezone.localtime(utc_expiry_time)
send_mosaico_email(
code="LOGIN_MESSAGE",
subject="Votre code de connexion",
from_email=settings.EMAIL_FROM,
bindings={
"code": interleave_spaces(short_code),
"expiry_time": local_expiry_time.strftime("%H:%M"),
},
recipients=[email],
)
@emailing_task
def send_no_account_email(email, subscription_type=SUBSCRIPTION_TYPE_AP, **kwargs):
subscription_token = subscription_confirmation_token_generator.make_token(
email=email, type=subscription_type, **kwargs
)
confirm_subscription_url = front_url(
"subscription_confirm", auto_login=False, nsp=False
)
query_args = {
"email": email,
"type": subscription_type,
**kwargs,
"token": subscription_token,
}
confirm_subscription_url += "?" + urlencode(query_args)
send_mosaico_email(
code="LOGIN_SIGN_UP_MESSAGE",
subject="Vous n'avez pas encore de compte sur Action Populaire",
from_email=settings.EMAIL_FROM,
recipients=[email],
bindings={"SUBSCRIPTION_URL": confirm_subscription_url},
)
| from django.conf import settings
from django.utils import timezone
from django.utils.http import urlencode
from agir.authentication.tokens import subscription_confirmation_token_generator
from agir.lib.celery import emailing_task
from agir.lib.mailing import send_mosaico_email
from agir.lib.utils import front_url
from agir.people.actions.subscription import SUBSCRIPTION_TYPE_AP
def interleave_spaces(s, n=3):
return " ".join([s[i : i + n] for i in range(0, len(s), n)])
@emailing_task
def send_login_email(email, short_code, expiry_time):
utc_expiry_time = timezone.make_aware(
timezone.datetime.utcfromtimestamp(expiry_time), timezone.utc
)
local_expiry_time = timezone.localtime(utc_expiry_time)
send_mosaico_email(
code="LOGIN_MESSAGE",
subject="Votre code de connexion",
from_email=settings.EMAIL_FROM,
bindings={
"code": interleave_spaces(short_code),
"expiry_time": local_expiry_time.strftime("%H:%M"),
},
recipients=[email],
)
@emailing_task
def send_no_account_email(email, subscription_type=SUBSCRIPTION_TYPE_AP, **kwargs):
subscription_token = subscription_confirmation_token_generator.make_token(
email=email, type=subscription_type, **kwargs
)
confirm_subscription_url = front_url(
"subscription_confirm", auto_login=False, nsp=False
)
query_args = {
"email": email,
"type": subscription_type,
**kwargs,
"token": subscription_token,
}
confirm_subscription_url += "?" + urlencode(query_args)
send_mosaico_email(
code="LOGIN_SIGN_UP_MESSAGE",
subject="Vous n'avez pas encore de compte sur la platefome",
from_email=settings.EMAIL_FROM,
recipients=[email],
bindings={"SUBSCRIPTION_URL": confirm_subscription_url},
)
| agpl-3.0 | Python |
1d13bd71ff105d540c3af166056cb0b8731a3417 | Add reverse to data migration | wooey/Wooey,wooey/Wooey,wooey/Wooey,wooey/Wooey | wooey/migrations/0037_populate-jsonfield.py | wooey/migrations/0037_populate-jsonfield.py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.13 on 2018-03-04 23:14
from __future__ import unicode_literals
import json
from django.db import migrations
def populate_default(apps, schema_editor):
ScriptParameter = apps.get_model('wooey', 'ScriptParameter')
for obj in ScriptParameter.objects.all():
try:
obj.default = json.loads(obj._default)
except Exception:
obj.default = obj._default
obj.save()
def reverse_populate_default(apps, schema_editor):
ScriptParameter = apps.get_model('wooey', 'ScriptParameter')
for obj in ScriptParameter.objects.all():
obj._default = json.dumps(obj.default)
obj.save()
class Migration(migrations.Migration):
dependencies = [
('wooey', '0036_add-jsonfield'),
]
operations = [
migrations.RunPython(populate_default, reverse_populate_default)
]
| # -*- coding: utf-8 -*-
# Generated by Django 1.9.13 on 2018-03-04 23:14
from __future__ import unicode_literals
import json
from django.db import migrations
def populate_default(apps, schema_editor):
ScriptParameter = apps.get_model('wooey', 'ScriptParameter')
for obj in ScriptParameter.objects.all():
try:
obj.default = json.loads(obj._default)
except Exception:
obj.default = obj._default
obj.save()
class Migration(migrations.Migration):
dependencies = [
('wooey', '0036_add-jsonfield'),
]
operations = [
migrations.RunPython(populate_default)
]
| bsd-3-clause | Python |
b02fda823a4a4598254d465996ca53361bdfd421 | Load robot in simulator | anassinator/dqn-obstacle-avoidance | simulator.py | simulator.py | # -*- coding: utf-8 -*-
from robot import Robot
from world import World
from PythonQt import QtGui
from director import applogic
from director import objectmodel as om
from director import visualization as vis
from director.consoleapp import ConsoleApp
class Simulator(object):
"""Simulator."""
def __init__(self, robot, world):
"""Constructs the simulator.
Args:
robot: Robot.
world: World.
"""
self._robot = robot
self._world = world
self._app = ConsoleApp()
self._view = self._app.createView(useGrid=False)
self._initialize()
def _initialize(self):
"""Initializes the world."""
# Add world to view.
om.removeFromObjectModel(om.findObjectByName("world"))
vis.showPolyData(self._world.to_polydata(), "world")
# Add robot to view.
om.removeFromObjectModel(om.findObjectByName("robot"))
vis.showPolyData(self._robot.to_polydata(), "robot")
def display(self):
"""Launches and displays the simulator."""
widget = QtGui.QWidget()
layout = QtGui.QVBoxLayout(widget)
layout.addWidget(self._view)
widget.showMaximized()
# Set camera.
applogic.resetCamera(viewDirection=[0.2, 0, -1])
self._app.start()
if __name__ == "__main__":
robot = Robot()
world = World(120, 100).add_obstacles()
sim = Simulator(robot, world)
sim.display()
| # -*- coding: utf-8 -*-
from world import World
from PythonQt import QtGui
from director import applogic
from director import objectmodel as om
from director import visualization as vis
from director.consoleapp import ConsoleApp
class Simulator(object):
"""Simulator."""
def __init__(self, world):
"""Constructs the simulator.
Args:
world: World.
"""
self._world = world
self._app = ConsoleApp()
self._view = self._app.createView(useGrid=False)
self._initialize()
def _initialize(self):
"""Initializes the world."""
# Add world to view.
om.removeFromObjectModel(om.findObjectByName("world"))
vis.showPolyData(self._world.to_polydata(), "world")
def display(self):
"""Launches and displays the simulator."""
widget = QtGui.QWidget()
layout = QtGui.QVBoxLayout(widget)
layout.addWidget(self._view)
widget.showMaximized()
# Set camera.
applogic.resetCamera(viewDirection=[0.2, 0, -1])
self._app.start()
if __name__ == "__main__":
world = World(120, 100).add_obstacles()
sim = Simulator(world)
sim.display()
| mit | Python |
13a0ad8e65929d2b477891c4d1c37e93862ea156 | update to 2.58.2 | DeadSix27/python_cross_compile_script | packages/dependencies/glib2.py | packages/dependencies/glib2.py | {
'repo_type' : 'archive',
'download_locations' : [
{ 'url' : 'https://download.gnome.org/sources/glib/2.58/glib-2.58.2.tar.xz', 'hashes' : [ { 'type' : 'sha256', 'sum' : 'c7b24ed6536f1a10fc9bce7994e55c427b727602e78342821f1f07fb48753d4b' }, ], },
{ 'url' : 'https://fossies.org/linux/misc/glib-2.58.2.tar.xz/', 'hashes' : [ { 'type' : 'sha256', 'sum' : 'c7b24ed6536f1a10fc9bce7994e55c427b727602e78342821f1f07fb48753d4b' }, ], },
],
'configure_options' : '--host={target_host} --prefix={target_prefix} --with-pcre=internal --with-threads=posix --disable-fam --disable-shared --disable-libmount',
'depends_on' : [ 'libffi','gettext' ],
'run_post_patch' : [
'if [ ! -f "INSTALL" ] ; then touch INSTALL ; fi',
'echo \'<<EOF\\nEXTRA_DIST =\\nCLEANFILES =\\nEOF\' > gtk-doc.make',
'sed -i.bak "s/SUBDIRS = . m4macros glib gmodule gthread gobject gio po docs tests subprojects/SUBDIRS = . m4macros glib gmodule gthread gobject gio po subprojects/" Makefile.am',
'autoreconf -fiv',
],
'patches' : [
( 'glib2/0001-win32-Make-the-static-build-work-with-MinGW-when-pos.patch', '-p1' ),
],
'update_check' : { 'url' : 'https://developer.gnome.org/glib/', 'type' : 'httpregex', 'regex' : r'<a class="doc-link" href="2.58/" lang="">(?P<version_num>[\d.]+)<\/a>' },
'_info' : { 'version' : '2.58.2', 'fancy_name' : 'glib2' },
} | {
'repo_type' : 'archive',
'download_locations' : [
{ 'url' : 'https://download.gnome.org/sources/glib/2.58/glib-2.58.1.tar.xz', 'hashes' : [ { 'type' : 'sha256', 'sum' : '97d6a9d926b6aa3dfaadad3077cfb43eec74432ab455dff14250c769d526d7d6' }, ], },
{ 'url' : 'https://fossies.org/linux/misc/glib-2.58.1.tar.xz/', 'hashes' : [ { 'type' : 'sha256', 'sum' : '97d6a9d926b6aa3dfaadad3077cfb43eec74432ab455dff14250c769d526d7d6' }, ], },
],
'configure_options' : '--host={target_host} --prefix={target_prefix} --with-pcre=internal --with-threads=posix --disable-fam --disable-shared --disable-libmount',
'depends_on' : [ 'libffi','gettext' ],
'run_post_patch' : [
'if [ ! -f "INSTALL" ] ; then touch INSTALL ; fi',
'echo \'<<EOF\\nEXTRA_DIST =\\nCLEANFILES =\\nEOF\' > gtk-doc.make',
'sed -i.bak "s/SUBDIRS = . m4macros glib gmodule gthread gobject gio po docs tests subprojects/SUBDIRS = . m4macros glib gmodule gthread gobject gio po subprojects/" Makefile.am',
'autoreconf -fiv',
],
'patches' : [
( 'glib2/0001-win32-Make-the-static-build-work-with-MinGW-when-pos.patch', '-p1' ),
],
'update_check' : { 'url' : 'https://developer.gnome.org/glib/', 'type' : 'httpregex', 'regex' : r'<a class="doc-link" href="2.58/" lang="">(?P<version_num>[\d.]+)<\/a>' },
'_info' : { 'version' : '2.58.1', 'fancy_name' : 'glib2' },
} | mpl-2.0 | Python |
fea2c0bc02a8323ad6c759ca63663499a538186e | Undo BC-breaking change, restore 'import onnx' providing submodules. | onnx/onnx,onnx/onnx,onnx/onnx,onnx/onnx | onnx/__init__.py | onnx/__init__.py | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from .onnx_ml_pb2 import * # noqa
from .version import version as __version__ # noqa
# Import common subpackages so they're available when you 'import onnx'
import onnx.helper # noqa
import onnx.checker # noqa
import onnx.defs # noqa
import sys
def load(obj):
'''
Loads a binary protobuf that stores onnx graph
@params
Takes a file-like object (has "read" function)
or a string containing a file name
@return ONNX ModelProto object
'''
model = ModelProto()
if hasattr(obj, 'read') and callable(obj.read):
model.ParseFromString(obj.read())
else:
with open(obj, 'rb') as f:
model.ParseFromString(f.read())
return model
| from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from .onnx_ml_pb2 import * # noqa
from .version import version as __version__ # noqa
import sys
def load(obj):
'''
Loads a binary protobuf that stores onnx graph
@params
Takes a file-like object (has "read" function)
or a string containing a file name
@return ONNX ModelProto object
'''
model = ModelProto()
if hasattr(obj, 'read') and callable(obj.read):
model.ParseFromString(obj.read())
else:
with open(obj, 'rb') as f:
model.ParseFromString(f.read())
return model
| apache-2.0 | Python |
17685a78457a996a0ebdf6b69b2b9f2761bfafb3 | Delete GenerateCover() | fan-jiang/Dujing | PythonScript/GenerateBook.py | PythonScript/GenerateBook.py | import subprocess
import os
def GenerateBook():
Books = ["DaXue", "ZhongYong", "LunYu", "MengZi"]
prefaceCommand = "pandoc ..\\Source\\Preface.md -o Preface.html --standalone"
with open("Dujing.log", 'w') as trace:
subprocess.call(prefaceCommand, stdin=None, stdout=trace, stderr=None, shell=True)
os.chdir("..\\Build")
for book in Books:
subprocess.call("GenerateBook " + book, stdin=None, stdout=trace, stderr=None, shell=True)
if __name__ == '__main__':
GenerateBook()
| import subprocess
import os
def GenerateCover():
#Cover = "Cover"
#BookName = "BookName"
#BookCover = BookName + Cover
#BookCoverHTML = BookCover + ".html"
#CSS = "CSS_"
#CSSExt = "CSSExt"
#pandocCommand = "pandoc ..\\source\\" + BookCover + ".txt -o "
#+ BookCoverHTML + " -standalone " + CSS_ + Cover + ".css --verbose"
pandocCommand = "pandoc ..\\source\\test.txt -o test.html"
with open('dujing.log', 'w') as trace:
subprocess.call(pandocCommand, stdin=None, stdout=trace, stderr=None, shell=True)
def GenerateBook():
Books = ["DaXue", "ZhongYong", "LunYu", "MengZi"]
prefaceCommand = "pandoc ..\\Source\\Preface.md -o Preface.html --standalone"
with open("Dujing.log", 'w') as trace:
subprocess.call(prefaceCommand, stdin=None, stdout=trace, stderr=None, shell=True)
os.chdir("..\\Build")
for book in Books:
subprocess.call("GenerateBook " + book, stdin=None, stdout=trace, stderr=None, shell=True)
if __name__ == '__main__':
GenerateBook()
| mit | Python |
05f37b396dbfafac1ab5ec25a777e5067a00e2c4 | support curl -i param | h2rd/ppxml | pxml/__init__.py | pxml/__init__.py | #!/usr/bin/env python
# unicode: utf-8
import sys
from pygments import highlight
from pygments.formatters import TerminalFormatter
from pygments.lexers import XmlLexer
from xml.dom.minidom import parseString
INDENT=' '*2
def format_code(data):
body = ''
if data.startswith('HTTP'):
end = data.find("\r\n\r\n")
body = data[0:end]
data = data[end:].strip()
lines = [line for line in parseString(data).toprettyxml(indent=INDENT).split('\n')
if line.strip()]
return "%s\n\n%s" % (body, '\n'.join(lines),)
def color_code(code):
return highlight(code, XmlLexer(), TerminalFormatter())
def main():
data = sys.stdin.read()
data = color_code(format_code(data))
return data
if __name__ == '__main__':
print main()
| #!/usr/bin/env python
# unicode: utf-8
import sys
from pygments import highlight
from pygments.formatters import TerminalFormatter
from pygments.lexers import XmlLexer
import xml.dom.minidom as xml
import argparse
INDENT=' '*2
def format_code(data):
return xml.parseString(data).toprettyxml(indent=INDENT)
def color_code(code):
return highlight(code, XmlLexer(), TerminalFormatter())
def main():
parser = argparse.ArgumentParser(description="Command-line tool to validate and pretty-print JSON and XML")
parser.add_argument("-p", action="store_true", help="XPATH")
args = parser.parse_args()
print args
data=sys.stdin.read()
data=color_code(format_code(data))
return data
if __name__ == '__main__':
print main()
| mit | Python |
0cf4f846e4359396362fa9e13d0cca2bf4221aca | change version code | 7sDream/zhihu-py3 | zhihu/__init__.py | zhihu/__init__.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = '7sDream'
__version__ = '0.3.1'
from .client import ZhihuClient
from .question import Question
from .author import Author
from .activity import Activity
from .acttype import ActType
from .answer import Answer
from .collection import Collection
from .column import Column
from .post import Post
from .topic import Topic
__all__ = ['ZhihuClient', 'Question', 'Author', 'ActType', 'Activity',
'Answer', 'Collection', 'Column', 'Post', 'Topic']
| #!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = '7sDream'
__version__ = '0.3.0'
from .client import ZhihuClient
from .question import Question
from .author import Author
from .activity import Activity
from .acttype import ActType
from .answer import Answer
from .collection import Collection
from .column import Column
from .post import Post
from .topic import Topic
__all__ = ['ZhihuClient', 'Question', 'Author', 'ActType', 'Activity',
'Answer', 'Collection', 'Column', 'Post', 'Topic']
| mit | Python |
28cab7063328f02abc5f31c0ef79017c3127ee57 | use atomic modesetting | tomba/kmsxx,tomba/kmsxx,tomba/kmsxx,tomba/kmsxx | py/tests/test.py | py/tests/test.py | #!/usr/bin/python3
import sys
import pykms
# draw test pattern via dmabuf?
dmabuf = False
# Use omap?
omap = False
if omap:
card = pykms.OmapCard()
else:
card = pykms.Card()
if len(sys.argv) > 1:
conn_name = sys.argv[1]
else:
conn_name = ""
res = pykms.ResourceManager(card)
conn = res.reserve_connector(conn_name)
crtc = res.reserve_crtc(conn)
plane = res.reserve_generic_plane(crtc)
mode = conn.get_default_mode()
modeb = mode.to_blob(card)
if omap:
origfb = pykms.OmapFramebuffer(card, mode.hdisplay, mode.vdisplay, "XR24");
else:
origfb = pykms.DumbFramebuffer(card, mode.hdisplay, mode.vdisplay, "XR24");
if dmabuf:
fb = pykms.ExtFramebuffer(card, origfb.width, origfb.height, origfb.format,
[origfb.fd(0)], [origfb.stride(0)], [origfb.offset(0)])
else:
fb = origfb
pykms.draw_test_pattern(fb);
card.disable_planes()
req = pykms.AtomicReq(card)
req.add(conn, "CRTC_ID", crtc.id)
req.add(crtc, {"ACTIVE": 1,
"MODE_ID": modeb.id})
req.add(plane, {"FB_ID": fb.id,
"CRTC_ID": crtc.id,
"SRC_X": 0 << 16,
"SRC_Y": 0 << 16,
"SRC_W": mode.hdisplay << 16,
"SRC_H": mode.vdisplay << 16,
"CRTC_X": 0,
"CRTC_Y": 0,
"CRTC_W": mode.hdisplay,
"CRTC_H": mode.vdisplay,
"zorder": 0})
req.commit_sync(allow_modeset = True)
input("press enter to exit\n")
| #!/usr/bin/python3
import pykms
# draw test pattern via dmabuf?
dmabuf = False
# Use omap?
omap = False
if omap:
card = pykms.OmapCard()
else:
card = pykms.Card()
res = pykms.ResourceManager(card)
conn = res.reserve_connector()
crtc = res.reserve_crtc(conn)
mode = conn.get_default_mode()
if omap:
origfb = pykms.OmapFramebuffer(card, mode.hdisplay, mode.vdisplay, "XR24");
else:
origfb = pykms.DumbFramebuffer(card, mode.hdisplay, mode.vdisplay, "XR24");
if dmabuf:
fb = pykms.ExtFramebuffer(card, origfb.width, origfb.height, origfb.format,
[origfb.fd(0)], [origfb.stride(0)], [origfb.offset(0)])
else:
fb = origfb
pykms.draw_test_pattern(fb);
crtc.set_mode(conn, fb, mode)
input("press enter to exit\n")
| mpl-2.0 | Python |
296cb1f198584ea350b7861cfab9b607ed449270 | Rework to POST form. Validation temporarily commented out | ronaldbradford/cli_explorer,ronaldbradford/cli_explorer | api/api.py | api/api.py | #!/usr/bin/env python
from flask import Flask, jsonify, request, abort
from subprocess import Popen, PIPE
import sys
from crossdomain import crossdomain
# Define the web container
api = Flask(__name__)
api.config['SERVER_NAME'] = 'cli_explorer.ronaldbradford.com:4242';
# Ensure the API has endpoint discovery
@api.route('/')
@crossdomain(origin='*')
def index():
return jsonify({'apis' : [ '/api/OSeX' ]})
# Our primary API endpoint
@api.route('/api/OSeX', methods=['POST'])
@crossdomain(origin='*')
def execute():
# We only accept POST with a form POST payload
# This request has two required parameters
required = [ 'command', 'args' ]
#missing=[field for field in required if field not in request.form[field]]
#if missing:
# return jsonify({'error': str(missing)+ ' are required parameters'}), 200
# Obtain the value of passed parameters
command = request.form['command']
args = request.form['args']
# To further hobble this generic execute OS command, we retrict the commands
valid_commands = [ 'date', 'openstack', 'nova' ]
if command not in valid_commands:
return jsonify({'error': 'The supplied command is invalid'}), 200
# Build the python specific execute command
execute = [command]
if (args):
args.split(' ')
execute.append(args)
api.logger.info(execute)
# Execute the command
p = Popen(execute, stdout=PIPE, stderr=PIPE)
# Process the commands response
stdout, stderr = p.communicate()
rc = p.returncode
# Return a JSON response
return jsonify({'execute' : command + " " + args, 'status' : rc, 'stdout' : stdout, 'stderr' : stderr}), 200
if __name__ == '__main__':
api.run(host='107.170.3.28')
| #!/usr/bin/env python
from flask import Flask, jsonify, request, abort
from subprocess import Popen, PIPE
import sys
from crossdomain import crossdomain
# Define the web container
api = Flask(__name__)
api.config['SERVER_NAME'] = 'localhost:4242';
# Ensure the API has endpoint discovery
@api.route('/')
@crossdomain(origin='*')
def index():
return jsonify({'apis' : [ '/api/OSeX' ]})
# Our primary API endpoint
@api.route('/api/OSeX', methods=['POST'])
@crossdomain(origin='*')
def execute():
# We only accept POST with a JSON data payload
if not request.json:
return jsonify({'error': 'Not a valid JSON request'}), 200
# This request has two required parameters
required = [ 'command', 'args' ]
missing=[field for field in required if field not in request.json]
if missing:
return jsonify({'error': str(missing)+ ' are required parameters'}), 200
# Obtain the value of passed parameters
command = request.json['command']
args = request.json['args']
# To further hobble this generic execute OS command, we retrict the commands
valid_commands = [ 'date', 'openstack', 'nova' ]
if command not in valid_commands:
return jsonify({'error': 'The supplied command is invalid'}), 200
# Build the python specific execute command
execute = [command]
if (args):
args.split(' ')
execute.append(args)
api.logger.debug(execute)
# Execute the command
p = Popen(execute, stdout=PIPE, stderr=PIPE)
# Process the commands response
stdout, stderr = p.communicate()
rc = p.returncode
# Return a JSON response
return jsonify({'execute' : command + " " + args, 'status' : rc, 'stdout' : stdout, 'stderr' : stderr}), 200
if __name__ == '__main__':
api.run(debug=True)
| apache-2.0 | Python |
d290b2e1e75b60da6e9b36acf7ef9c62e670ee6e | fix raise class | sim0nx/python-openhab,sim0nx/python-openhab | openhab/types.py | openhab/types.py | from __future__ import absolute_import, division, print_function, unicode_literals
# -*- coding: utf-8 -*-
'''python library for accessing the openHAB REST API'''
#
# Georges Toth (c) 2016 <georges@trypill.org>
#
# python-openhab is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# python-openhab is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with python-openhab. If not, see <http://www.gnu.org/licenses/>.
#
# pylint: disable=bad-indentation
import six
import datetime
import dateutil.parser
__author__ = 'Georges Toth <georges@trypill.org>'
__license__ = 'AGPLv3+'
class CommandType(object):
'''Base command type class'''
@classmethod
def validate(cls, value):
raise NotImplementedError()
class StringType(CommandType):
@classmethod
def validate(cls, value):
if not isinstance(value, six.string_types):
raise ValueError()
class OnOffType(StringType):
@classmethod
def validate(cls, value):
super(OnOffType, cls).validate(value)
if value not in ['ON', 'OFF']:
raise ValueError()
class OpenCloseType(StringType):
@classmethod
def validate(cls, value):
super(OpenCloseType, cls).validate(value)
if value not in ['OPEN', 'CLOSED']:
raise ValueError()
class DecimalType(CommandType):
@classmethod
def validate(cls, value):
if not (isinstance(value, float) or isinstance(value, int)):
raise ValueError()
class PercentType(DecimalType):
@classmethod
def validate(cls, value):
super(PercentType, cls).validate(value)
if not (value >= 0 and value <= 100):
raise ValueError()
class IncreaseDecreaseType(StringType):
@classmethod
def validate(cls, value):
super(IncreaseDecreaseType, cls).validate(value)
if value not in ['INCREASE', 'DECREASE']:
raise ValueError()
| from __future__ import absolute_import, division, print_function, unicode_literals
# -*- coding: utf-8 -*-
'''python library for accessing the openHAB REST API'''
#
# Georges Toth (c) 2016 <georges@trypill.org>
#
# python-openhab is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# python-openhab is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with python-openhab. If not, see <http://www.gnu.org/licenses/>.
#
# pylint: disable=bad-indentation
import six
import datetime
import dateutil.parser
__author__ = 'Georges Toth <georges@trypill.org>'
__license__ = 'AGPLv3+'
class CommandType(object):
'''Base command type class'''
@classmethod
def validate(cls, value):
raise NotImplemented()
class StringType(CommandType):
@classmethod
def validate(cls, value):
if not isinstance(value, six.string_types):
raise ValueError()
class OnOffType(StringType):
@classmethod
def validate(cls, value):
super(OnOffType, cls).validate(value)
if value not in ['ON', 'OFF']:
raise ValueError()
class OpenCloseType(StringType):
@classmethod
def validate(cls, value):
super(OpenCloseType, cls).validate(value)
if value not in ['OPEN', 'CLOSED']:
raise ValueError()
class DecimalType(CommandType):
@classmethod
def validate(cls, value):
if not (isinstance(value, float) or isinstance(value, int)):
raise ValueError()
class PercentType(DecimalType):
@classmethod
def validate(cls, value):
super(PercentType, cls).validate(value)
if not (value >= 0 and value <= 100):
raise ValueError()
class IncreaseDecreaseType(StringType):
@classmethod
def validate(cls, value):
super(IncreaseDecreaseType, cls).validate(value)
if value not in ['INCREASE', 'DECREASE']:
raise ValueError()
| agpl-3.0 | Python |
81faa7704fb355dd16674d4ed089e0ced34c24c6 | Add router to the behaviors lookup | thatch45/rflo | rflo/start.py | rflo/start.py | import ioflo.app.run
import os
class Manager(object):
'''
Manage the main ioflo process
'''
def __init__(self):
self.behaviors = ['rflo.config', 'rflo.roads', 'rflo.router']
self.floscript = os.path.join(os.path.dirname(__file__), 'raft.flo')
def start(self):
ioflo.app.run.start(
name='rflo',
period=0.01,
stamp=0.0,
filepath=self.floscript,
behaviors=self.behaviors,
verbose=2,
)
| import ioflo.app.run
import os
class Manager(object):
'''
Manage the main ioflo process
'''
def __init__(self):
self.behaviors = ['rflo.config', 'rflo.roads']
self.floscript = os.path.join(os.path.dirname(__file__), 'raft.flo')
def start(self):
ioflo.app.run.start(
name='rflo',
period=0.01,
stamp=0.0,
filepath=self.floscript,
behaviors=self.behaviors,
verbose=2,
)
| apache-2.0 | Python |
e379b89ab5f012ac9ad6f6f0a058fbe3098791a6 | Add docstring to pipe.process_images | jni/skan | skan/pipe.py | skan/pipe.py | from . import pre, csr
import imageio
from tqdm import tqdm
import numpy as np
from skimage import morphology
import pandas as pd
def process_images(filenames, image_format, threshold_radius,
smooth_radius, brightness_offset, scale_metadata_path):
"""Full pipeline from images to skeleton stats with local median threshold.
Parameters
----------
filenames : list of string
The list of input filenames.
image_format : string
The format of the files. 'auto' is automatically determined by the
imageio library. See imageio documentation for valid image formats.
threshold_radius : float
The radius for median thresholding,
smooth_radius : float in [0, 1]
The value of sigma with which to Gaussian-smooth the image,
**relative to `threshold_radius`**.
brightness_offset : float
The standard brightness value with which to threshold is the local
median, `m(x, y)`. Use this value to offset from there: the threshold
used will be `m(x, y) + brightness_offset`.
scale_metadata_path : string
The path in the image dictionary to find the metadata on pixel scale,
separated by forward slashes ('/').
Returns
-------
result : pandas DataFrame
Data frame containing all computed statistics on the skeletons found
in the input image.
"""
image_format = None if image_format == 'auto' else image_format
results = []
for file in tqdm(filenames):
image = imageio.imread(file, format=image_format)
if scale_metadata_path is not None:
md_path = scale_metadata_path.split(sep='/')
meta = image.meta
for key in md_path:
meta = meta[key]
scale = float(meta)
else:
scale = 1 # measurements will be in pixel units
pixel_threshold_radius = int(np.ceil(threshold_radius / scale))
pixel_smoothing_radius = smooth_radius * pixel_threshold_radius
thresholded = pre.threshold(image, sigma=pixel_smoothing_radius,
radius=pixel_threshold_radius,
offset=brightness_offset)
skeleton = morphology.skeletonize(thresholded)
framedata = csr.summarise(skeleton, spacing=scale)
framedata['squiggle'] = np.log2(framedata['branch-distance'] /
framedata['euclidean-distance'])
framedata['filename'] = [file] * len(framedata)
results.append(framedata)
return pd.concat(results)
| from . import pre, csr
import imageio
from tqdm import tqdm
import numpy as np
from skimage import morphology
import pandas as pd
def process_images(filenames, image_format, threshold_radius,
smooth_radius, brightness_offset, scale_metadata_path):
image_format = None if image_format == 'auto' else image_format
results = []
for file in tqdm(filenames):
image = imageio.imread(file, format=image_format)
if scale_metadata_path is not None:
md_path = scale_metadata_path.split(sep='/')
meta = image.meta
for key in md_path:
meta = meta[key]
scale = float(meta)
else:
scale = 1 # measurements will be in pixel units
pixel_threshold_radius = int(np.ceil(threshold_radius / scale))
pixel_smoothing_radius = smooth_radius * pixel_threshold_radius
thresholded = pre.threshold(image, sigma=pixel_smoothing_radius,
radius=pixel_threshold_radius,
offset=brightness_offset)
skeleton = morphology.skeletonize(thresholded)
framedata = csr.summarise(skeleton, spacing=scale)
framedata['squiggle'] = np.log2(framedata['branch-distance'] /
framedata['euclidean-distance'])
framedata['filename'] = [file] * len(framedata)
results.append(framedata)
return pd.concat(results)
| bsd-3-clause | Python |
28181d9bcf7aa597b88507871ffb31f4028eb67c | Enable DEBUG when running the test suite | Flamacue/pretix,Flamacue/pretix,Flamacue/pretix,Flamacue/pretix | src/pretix/testutils/settings.py | src/pretix/testutils/settings.py | import atexit
import os
import tempfile
tmpdir = tempfile.TemporaryDirectory()
os.environ.setdefault('DATA_DIR', tmpdir.name)
from pretix.settings import * # NOQA
DATA_DIR = tmpdir.name
LOG_DIR = os.path.join(DATA_DIR, 'logs')
MEDIA_ROOT = os.path.join(DATA_DIR, 'media')
atexit.register(tmpdir.cleanup)
EMAIL_BACKEND = 'django.core.mail.outbox'
COMPRESS_ENABLED = COMPRESS_OFFLINE = False
DEBUG = True
PASSWORD_HASHERS = ['django.contrib.auth.hashers.MD5PasswordHasher']
# Disable celery
CELERY_ALWAYS_EAGER = True
HAS_CELERY = False
# Don't use redis
SESSION_ENGINE = "django.contrib.sessions.backends.db"
HAS_REDIS = False
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
}
}
| import atexit
import os
import tempfile
tmpdir = tempfile.TemporaryDirectory()
os.environ.setdefault('DATA_DIR', tmpdir.name)
from pretix.settings import * # NOQA
DATA_DIR = tmpdir.name
LOG_DIR = os.path.join(DATA_DIR, 'logs')
MEDIA_ROOT = os.path.join(DATA_DIR, 'media')
atexit.register(tmpdir.cleanup)
EMAIL_BACKEND = 'django.core.mail.outbox'
COMPRESS_ENABLED = COMPRESS_OFFLINE = False
PASSWORD_HASHERS = ['django.contrib.auth.hashers.MD5PasswordHasher']
# Disable celery
CELERY_ALWAYS_EAGER = True
HAS_CELERY = False
# Don't use redis
SESSION_ENGINE = "django.contrib.sessions.backends.db"
HAS_REDIS = False
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
}
}
| apache-2.0 | Python |
efceebba734228b9def014e98c039b774e0dd572 | Fix path to ckeditor | geelweb/django-customflatpages | src/geelweb/django/customflatpages/admin.py | src/geelweb/django/customflatpages/admin.py | from django.contrib import admin
from django import forms
from django.contrib.flatpages.admin import FlatpageForm, FlatPageAdmin
from django.contrib.flatpages.models import FlatPage
from django.db import models
from geelweb.django.customflatpages.models import CustomFlatPage
class CustomFlatPageForm(FlatpageForm):
class Meta:
model = CustomFlatPage
class CustomFlatPageAdmin(FlatPageAdmin):
form = CustomFlatPageForm
fieldsets = (
(None, {'fields': ('url', 'title', 'content', 'emplacement', 'order')}),
)
formfield_overrides = { models.TextField: {'widget': forms.Textarea(attrs={'class':'ckeditor'})}, }
list_display = ('url', 'title', 'emplacement', 'order', )
list_filter = ('emplacement', )
class Media:
js = ('ckeditor/ckeditor.js',)
admin.site.unregister(FlatPage)
admin.site.register(CustomFlatPage, CustomFlatPageAdmin)
| from django.contrib import admin
from django import forms
from django.contrib.flatpages.admin import FlatpageForm, FlatPageAdmin
from django.contrib.flatpages.models import FlatPage
from django.db import models
from geelweb.django.customflatpages.models import CustomFlatPage
class CustomFlatPageForm(FlatpageForm):
class Meta:
model = CustomFlatPage
class CustomFlatPageAdmin(FlatPageAdmin):
form = CustomFlatPageForm
fieldsets = (
(None, {'fields': ('url', 'title', 'content', 'emplacement', 'order')}),
)
formfield_overrides = { models.TextField: {'widget': forms.Textarea(attrs={'class':'ckeditor'})}, }
list_display = ('url', 'title', 'emplacement', 'order', )
list_filter = ('emplacement', )
class Media:
js = ('/static/ckeditor/ckeditor.js',)
admin.site.unregister(FlatPage)
admin.site.register(CustomFlatPage, CustomFlatPageAdmin)
| mit | Python |
88b2cfaac40cdd9e06994fb8104ace6740399564 | set new version 0.2.9 | jeanmask/opps,YACOWS/opps,YACOWS/opps,jeanmask/opps,opps/opps,williamroot/opps,YACOWS/opps,opps/opps,YACOWS/opps,opps/opps,williamroot/opps,williamroot/opps,williamroot/opps,jeanmask/opps,opps/opps,jeanmask/opps | opps/__init__.py | opps/__init__.py | # -*- coding: utf-8 -*-
import pkg_resources
pkg_resources.declare_namespace(__name__)
VERSION = (0, 2, 9)
__version__ = ".".join(map(str, VERSION))
__status__ = "Development"
__description__ = u"Open Source Content Management Platform - CMS for the "
u"magazines, newspappers websites and portals with "
u"high-traffic, using the Django Framework."
__author__ = u"Thiago Avelino"
__credits__ = ['Bruno Rocha']
__email__ = u"thiago@avelino.xxx"
__license__ = u"MIT"
__copyright__ = u"Copyright 2014, Thiago Avelino"
| # -*- coding: utf-8 -*-
import pkg_resources
pkg_resources.declare_namespace(__name__)
VERSION = (0, 2, 8, 3)
__version__ = ".".join(map(str, VERSION))
__status__ = "Development"
__description__ = u"Open Source Content Management Platform - CMS for the "
u"magazines, newspappers websites and portals with "
u"high-traffic, using the Django Framework."
__author__ = u"Thiago Avelino"
__credits__ = ['Bruno Rocha']
__email__ = u"thiago@avelino.xxx"
__license__ = u"MIT"
__copyright__ = u"Copyright 2014, Thiago Avelino"
| mit | Python |
2a531f331ef5fe4d42e92460257d650ce481a2be | set empty prefix instead of None | mihau/labDNS | labDNS/storages.py | labDNS/storages.py | try:
import redis
except ImportError:
redis = None
try:
import consul
except ImportError:
consul = None
class BaseStorage:
DEFAULT_CONFIG = dict()
def __init__(self, config):
self.config = self.DEFAULT_CONFIG
self._configure(config)
def get(self, key):
raise NotImplementedError
def _configure(self, config):
self.config.update(config)
class DictStorage(BaseStorage):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.dictionary = self.config
def get(self, key, default=None):
return self.dictionary.get(key, default)
class RedisStorage(BaseStorage):
DEFAULT_SETTINGS = dict(host='localhost', port=6379, db=0)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.redis = redis.StrictRedis(**self.config)
def get(self, key, default=None):
value = self.redis.get(key)
return value.decode("utf-8") if value else default
class ConsulStorage(BaseStorage):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.consul = consul.Consul(**self.config)
def _configure(self, config):
self.key_prefix = config.pop('key_prefix', '')
self.config.update(config)
def get(self, key, default=None):
index, data = self.consul.kv.get(self.key_prefix + key)
value = data['Value'] if data else None
return value.decode("utf-8") if value else default
| try:
import redis
except ImportError:
redis = None
try:
import consul
except ImportError:
consul = None
class BaseStorage:
DEFAULT_CONFIG = dict()
def __init__(self, config):
self.config = self.DEFAULT_CONFIG
self._configure(config)
def get(self, key):
raise NotImplementedError
def _configure(self, config):
self.config.update(config)
class DictStorage(BaseStorage):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.dictionary = self.config
def get(self, key, default=None):
return self.dictionary.get(key, default)
class RedisStorage(BaseStorage):
DEFAULT_SETTINGS = dict(host='localhost', port=6379, db=0)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.redis = redis.StrictRedis(**self.config)
def get(self, key, default=None):
value = self.redis.get(key)
return value.decode("utf-8") if value else default
class ConsulStorage(BaseStorage):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.consul = consul.Consul(**self.config)
def _configure(self, config):
self.key_prefix = config.pop('key_prefix', None)
self.config.update(config)
def get(self, key, default=None):
index, data = self.consul.kv.get(self.key_prefix + key)
value = data['Value'] if data else None
return value.decode("utf-8") if value else default
| bsd-3-clause | Python |
dd65eb488f2e683a77ccb0609e2cc3f4b58473e6 | Update 0.9 | Deavelleye/dj-CerberusAC,Deavelleye/dj-CerberusAC,Deavelleye/dj-CerberusAC,Deavelleye/dj-CerberusAC | src/cerberus_ac/admin.py | src/cerberus_ac/admin.py | # -*- coding: utf-8 -*-
"""Admin module."""
from django.contrib import admin
from django.contrib.admin.sites import AdminSite
from cerberus_ac.views import EditUserPermissions
from .models import *
class SecurityAdmin(AdminSite):
pass
class DataAdmin(AdminSite):
pass
class AuditAdmin(AdminSite):
pass
security_admin_site = SecurityAdmin(name='SecurityAdmin')
data_admin_site = DataAdmin(name='DataAdmin')
audit_admin_site = AuditAdmin(name='AuditAdmin')
# # Security Admin Pages
# # Logs
# @security_admin_site.register(AccessHistory)
# class ObjectAccessHistoryAdmin(admin.ModelAdmin):
# pass
#
# @security_admin_site.register(PrivilegeHistory)
# class PrivChangesHistoryAdmin(admin.ModelAdmin):
# pass
#
# # User Permissions
# @security_admin_site.register(RolePrivilege)
# class PermissionsAdmin(admin.ModelAdmin):
# pass
# Data Admin Pages
| # -*- coding: utf-8 -*-
"""Admin module."""
from django.contrib import admin
from django.contrib.admin.sites import AdminSite
from cerberus_ac.views import EditUserPermissions
from .models import *
class SecurityAdmin(AdminSite):
pass
class DataAdmin(AdminSite):
pass
class AuditAdmin(AdminSite):
pass
security_admin_site = SecurityAdmin(name='SecurityAdmin')
data_admin_site = DataAdmin(name='DataAdmin')
audit_admin_site = AuditAdmin(name='AuditAdmin')
# Security Admin Pages
# Logs
@security_admin_site.register(AccessHistory)
class ObjectAccessHistoryAdmin(admin.ModelAdmin):
pass
@security_admin_site.register(PrivilegeHistory)
class PrivChangesHistoryAdmin(admin.ModelAdmin):
pass
# User Permissions
@security_admin_site.register(RolePrivilege)
class PermissionsAdmin(admin.ModelAdmin):
pass
# Data Admin Pages
| isc | Python |
32935c32ae592e84cfb5d8e1c1988e5e96659be3 | fix tests | appi147/Jarvis,sukeesh/Jarvis,sukeesh/Jarvis,appi147/Jarvis,sukeesh/Jarvis,sukeesh/Jarvis | Jarvis/tests/test_brain.py | Jarvis/tests/test_brain.py | import unittest
from packages.aiml.brain import Brain
class BrainTest(unittest.TestCase):
def test_memory(self):
b = Brain()
response = b.respond("What are you")
self.assertEqual(str(response), "I'm a bot, silly!")
if __name__ == '__main__':
unittest.main()
| import unittest
from packages.aiml.brain import Brain
class BrainTest(unittest.TestCase):
def test_memory(self):
b = Brain()
response = b.respond("TEST")
self.assertEqual(str(response), 'TEST')
if __name__ == '__main__':
unittest.main()
| mit | Python |
056c7aa5bd80d629191840543636da33f303a5f1 | Remove unicode test. | lasote/django-geoposition,Teino1978-Corp/Teino1978-Corp-django-geoposition,mativs/django-geoposition,mbwk/django-geoposition,RamezIssac/django-geoposition,akiokio/django-geoposition,rmoorman/django-geoposition,lasote/django-geoposition,Teino1978-Corp/Teino1978-Corp-django-geoposition,mativs/django-geoposition,akiokio/django-geoposition,rmoorman/django-geoposition,APSL/django-geoposition,APSL/django-geoposition,philippbosch/django-geoposition,lasote/django-geoposition,pancentric/django-geoposition,philippbosch/django-geoposition,lancekrogers/django-geoposition,mativs/django-geoposition,philippbosch/django-geoposition,RamezIssac/django-geoposition,mbwk/django-geoposition,Teino1978-Corp/Teino1978-Corp-django-geoposition,lancekrogers/django-geoposition,mbwk/django-geoposition,RamezIssac/django-geoposition,akiokio/django-geoposition,pancentric/django-geoposition,rmoorman/django-geoposition,lancekrogers/django-geoposition,APSL/django-geoposition | geoposition/tests/test_geoposition.py | geoposition/tests/test_geoposition.py | from decimal import Decimal
from django.test import SimpleTestCase
from geoposition import Geoposition
class GeopositionTestCase(SimpleTestCase):
def test_init_with_decimals(self):
gp = Geoposition(Decimal('52.5'), Decimal('13.4'))
self.assertEqual(gp.latitude, Decimal('52.5'))
self.assertEqual(gp.longitude, Decimal('13.4'))
def test_init_with_strs(self):
gp = Geoposition('52.5', '13.4')
self.assertEqual(gp.latitude, Decimal('52.5'))
self.assertEqual(gp.longitude, Decimal('13.4'))
def test_init_with_floats(self):
gp = Geoposition(52.5, 13.4)
self.assertEqual(gp.latitude, Decimal('52.5'))
self.assertEqual(gp.longitude, Decimal('13.4'))
def test_repr(self):
gp = Geoposition(52.5, 13.4)
self.assertEqual(repr(gp), 'Geoposition(52.5,13.4)')
def test_len(self):
gp = Geoposition(52.5, 13.4)
self.assertEqual(len(gp), 9)
def test_equality(self):
gp1 = Geoposition(52.5, 13.4)
gp2 = Geoposition(52.5, 13.4)
self.assertEqual(gp1, gp2)
def test_inequality(self):
gp1 = Geoposition(52.5, 13.4)
gp2 = Geoposition(52.4, 13.1)
self.assertNotEqual(gp1, gp2)
| from decimal import Decimal
from django.test import SimpleTestCase
from geoposition import Geoposition
class GeopositionTestCase(SimpleTestCase):
def test_init_with_decimals(self):
gp = Geoposition(Decimal('52.5'), Decimal('13.4'))
self.assertEqual(gp.latitude, Decimal('52.5'))
self.assertEqual(gp.longitude, Decimal('13.4'))
def test_init_with_strs(self):
gp = Geoposition('52.5', '13.4')
self.assertEqual(gp.latitude, Decimal('52.5'))
self.assertEqual(gp.longitude, Decimal('13.4'))
def test_init_with_unicodes(self):
gp = Geoposition(u'52.5', u'13.4')
self.assertEqual(gp.latitude, Decimal('52.5'))
self.assertEqual(gp.longitude, Decimal('13.4'))
def test_init_with_floats(self):
gp = Geoposition(52.5, 13.4)
self.assertEqual(gp.latitude, Decimal('52.5'))
self.assertEqual(gp.longitude, Decimal('13.4'))
def test_repr(self):
gp = Geoposition(52.5, 13.4)
self.assertEqual(repr(gp), 'Geoposition(52.5,13.4)')
def test_len(self):
gp = Geoposition(52.5, 13.4)
self.assertEqual(len(gp), 9)
def test_equality(self):
gp1 = Geoposition(52.5, 13.4)
gp2 = Geoposition(52.5, 13.4)
self.assertEqual(gp1, gp2)
def test_inequality(self):
gp1 = Geoposition(52.5, 13.4)
gp2 = Geoposition(52.4, 13.1)
self.assertNotEqual(gp1, gp2)
| mit | Python |
916b4e25f163456b1f6869fb431fe4280be0bb6b | Fix YAML config reading. | praekelt/vumi-dashboard,praekelt/vumi-dashboard | twisted/plugins/graphite2holodeck.py | twisted/plugins/graphite2holodeck.py | from zope.interface import implements
import yaml
from twisted.python import usage
from twisted.plugin import IPlugin
from twisted.application.service import IServiceMaker
from vumidash.graphite_client import GraphiteClient
from vumidash.dummy_client import DummyClient
from vumidash.holodeck_pusher import HolodeckPusherService
class Options(usage.Options):
optFlags = [
["dummy", None, "Use a dummy metrics source instead of reading"
" from Graphite."],
]
optParameters = [
["graphite-url", "g", None, "The URL of the Graphite web service."],
["config", "c", None, "The YAML config file describing which metrics"
" to push."],
]
class Graphite2HolodeckServiceMaker(object):
implements(IServiceMaker, IPlugin)
tapname = "graphite2holodeck"
description = "Read data from Graphite and push it to Holodeck"
options = Options
def makeService(self, options):
graphite_url = options["graphite-url"]
with open(options["config"]) as f:
config = yaml.safe_load(f.read())
if options["dummy"]:
metrics_source = DummyClient()
else:
metrics_source = GraphiteClient(graphite_url)
holodeck_pusher = HolodeckPusherService(metrics_source, config)
return holodeck_pusher
# service maker instance for twistd
graphite2holodeck = Graphite2HolodeckServiceMaker()
| from zope.interface import implements
import yaml
from twisted.python import usage
from twisted.plugin import IPlugin
from twisted.application.service import IServiceMaker
from vumidash.graphite_client import GraphiteClient
from vumidash.dummy_client import DummyClient
from vumidash.holodeck_pusher import HolodeckPusherService
class Options(usage.Options):
optFlags = [
["dummy", None, "Use a dummy metrics source instead of reading"
" from Graphite."],
]
optParameters = [
["graphite-url", "g", None, "The URL of the Graphite web service."],
["config", "c", None, "The YAML config file describing which metrics"
" to push."],
]
class Graphite2HolodeckServiceMaker(object):
implements(IServiceMaker, IPlugin)
tapname = "graphite2holodeck"
description = "Read data from Graphite and push it to Holodeck"
options = Options
def makeService(self, options):
graphite_url = options["graphite-url"]
config = yaml.safe_load(options["config"])
if options["dummy"]:
metrics_source = DummyClient()
else:
metrics_source = GraphiteClient(graphite_url)
holodeck_pusher = HolodeckPusherService(metrics_source, config)
return holodeck_pusher
# service maker instance for twistd
graphite2holodeck = Graphite2HolodeckServiceMaker()
| bsd-3-clause | Python |
c1efaefbe6a098e3f74bed20ff55a4307ec90ddd | Use new version of Dockets | gamechanger/deferrable | dynamic_requirements.py | dynamic_requirements.py | install_requires = [
'Dockets>=0.4.0,<0.5.0',
'boto>=2.26.0',
]
test_requires = [
'nose>=1.3.0,<2.0.0',
'mock>=1.0.0,<2.0.0',
'redis>=2.10.0,<3.0.0',
'moto>=0.4.1',
]
| install_requires = [
'Dockets>=0.3.4,<0.4.0',
'boto>=2.26.0',
]
test_requires = [
'nose>=1.3.0,<2.0.0',
'mock>=1.0.0,<2.0.0',
'redis>=2.10.0,<3.0.0',
'moto>=0.4.1',
]
| mit | Python |
291d64efef1f8d464a86fb822c0b82910f406d2c | Fix path | otsaloma/docsets,otsaloma/docsets | Dataiter.docset/Contents/Resources/index.py | Dataiter.docset/Contents/Resources/index.py | #!/usr/bin/env python3
import util
db = util.create_database()
root = "https://dataiter.readthedocs.io/en/latest/"
for soup in util.soups_from_files("Documents/*.html"):
for tag in soup.select('a[href*="#dataiter."]'):
name = tag.attrs["href"].split("#")[-1]
path = root + tag.attrs["href"]
util.insert(db, name, path)
| #!/usr/bin/env python3
import util
db = util.create_database()
root = "https://dataiter.readthedocs.io/en/latest/"
for soup in util.soups_from_files("Documents/*.html"):
for tag in soup.select('a[href*="#dataiter."]'):
name = tag.attrs["href"].split("#")[-1]
path = root + name
util.insert(db, name, path)
| mit | Python |
879deef448c2d06053d5e848b414d5e2ab2754bd | Update brain.py | kankiri/pabiana | pabiana/brain.py | pabiana/brain.py | import importlib
import os
from os import path
import pip
from . import load_interfaces, repo
def main(module_name, area_name):
req_path = path.join(os.getcwd(), module_name, 'requirements.txt')
if path.isfile(req_path):
pip.main(['install', '--upgrade', '-r', req_path])
intf_path = path.join(os.getcwd(), 'interfaces.json')
if path.isfile(intf_path):
load_interfaces(intf_path)
repo['area-name'] = area_name
mod = importlib.import_module(module_name)
if hasattr(mod, 'setup'):
mod.setup()
if hasattr(mod, 'area'):
if hasattr(mod, 'config'):
params = {'clock_name': mod.config['clock-name']}
if 'clock-slot' in mod.config:
if mod.config['clock-slot'] is not None:
params['clock_slot'] = mod.config['clock-slot']
if 'subscriptions' in mod.config:
if mod.config['subscriptions'] is not None:
params['subscriptions'] = mod.config['subscriptions']
mod.area.setup(**params)
if 'context-values' in mod.config:
mod.area.context.update(mod.config['context-values'])
mod.area.run()
elif hasattr(mod, 'clock'):
if hasattr(mod, 'config'):
params = {}
if 'timeout' in mod.config:
if mod.config['timeout'] is not None:
params['timeout'] = mod.config['timeout']
if 'use-template' in mod.config:
if mod.config['use-template'] is not None:
params['use-template'] = mod.config['use-template']
mod.clock.setup(**params)
mod.clock.run()
| import importlib
import os
from os import path
import pip
from . import load_interfaces, repo
def main(module_name, area_name):
req_path = path.join(os.getcwd(), module_name, 'requirements.txt')
if path.isfile(req_path):
pip.main(['install', '--upgrade', '-r', req_path])
intf_path = path.join(os.getcwd(), 'interfaces.json')
if path.isfile(intf_path):
load_interfaces(intf_path)
repo['area-name'] = area_name
mod = importlib.import_module(module_name)
if hasattr(mod, 'setup'):
mod.setup()
if hasattr(mod, 'area'):
if hasattr(mod, 'config'):
params = {'clock_name': mod.config['clock-name']}
if 'clock-slot' in mod.config:
if mod.config['clock-slot'] is not None:
params['clock_slot'] = mod.config['clock-slot']
if 'subscriptions' in mod.config:
if mod.config['subscriptions'] is not None:
params['subscriptions'] = mod.config['subscriptions']
mod.area.setup(**params)
if 'context-values' in mod.config:
mod.area.context.update(mod.config['context-values'])
mod.area.run()
elif hasattr(mod, 'clock'):
mod.clock.run()
| mit | Python |
5ff4e4187ab73ca02ed86b06010ca7cd1e8528a9 | change description for customized invoice module | optima-ict/odoo,optima-ict/odoo,optima-ict/odoo,optima-ict/odoo,optima-ict/odoo,optima-ict/odoo | customized_invoice/__openerp__.py | customized_invoice/__openerp__.py | # -*- coding: utf-8 -*-
{
'name': "Professional Invoice Templates - Odoo9.0",
'summary': """
Make your Invoice reports look professional by branding them. Choose from three professional Invoice templates and unlimited colors """,
'description': """
This module will install a customized client invoice report for accounting module.You will be able to customize the invoice colors,logo and the style/format of invoice to look professional and appealing to your customers. You can also create your own template from scratch or edit one of the existing templates that come with this module
""",
'images': ['static/description/howto.png'],
'price': 57,
'currency': 'EUR',
'author': "Optima ICT Services LTD",
'website': "http://www.optima.co.ke",
# Categories can be used to filter modules in modules listing
# Check https://github.com/odoo/odoo/blob/master/openerp/addons/base/module/module_data.xml
# for the full list
'category': 'Uncategorized',
'version': '0.1',
# any module necessary for this one to work correctly
'depends': ['base', 'account', 'optima_social'],
# always loaded
'data': [
# 'security/ir.model.access.csv',
'views/account_invoice.xml',
'views/modern_template.xml',
'views/classic_template.xml',
'views/retro_template.xml',
'views/account_invoice_view.xml',
'views/res_company_view.xml',
'reports/reports.xml',
],
# only loaded in demonstration mode
'demo': [
'demo.xml',
],
}
| # -*- coding: utf-8 -*-
{
'name': "Professional Invoice Report Templates - Odoo9.0",
'summary': """
Make your Odoo Invoice reports look professional by branding them. Choose from Five professional Invoice templates and customize the colors and logo on the invoice to look professional ans appealing to your customers. You can also create your own template from scratch or edit one of the existing templates that come with this module """,
'description': """
This module will install a customized client invoice report for accounting module.
""",
'images': ['static/description/howto.png'],
'price': 57,
'currency': 'EUR',
'author': "Optima ICT Services LTD",
'website': "http://www.optima.co.ke",
# Categories can be used to filter modules in modules listing
# Check https://github.com/odoo/odoo/blob/master/openerp/addons/base/module/module_data.xml
# for the full list
'category': 'Uncategorized',
'version': '0.1',
# any module necessary for this one to work correctly
'depends': ['base', 'account', 'optima_social'],
# always loaded
'data': [
# 'security/ir.model.access.csv',
'views/account_invoice.xml',
'views/modern_template.xml',
'views/classic_template.xml',
'views/retro_template.xml',
'views/account_invoice_view.xml',
'views/res_company_view.xml',
'reports/reports.xml',
],
# only loaded in demonstration mode
'demo': [
'demo.xml',
],
}
| agpl-3.0 | Python |
a0831cbe30cc3f487d683138eefb43ff56b3c687 | Allow for passing through URL arguments | leifdenby/django-pyroven | pyroven/views.py | pyroven/views.py | import urllib
from django.http import HttpResponseRedirect, HttpResponse
from django.contrib.auth import authenticate, login, logout
from django.core.urlresolvers import reverse
from pyroven.utils import setting, HttpResponseSeeOther
import pyroven
def raven_return(request):
# Get the token which the Raven server sent us - this should really
# have a try/except around it to catch KeyError
token = request.GET['WLS-Response']
# See if this is a valid token
try:
user = authenticate(response_str=token, request=request)
except pyroven.MalformedResponseError:
return HttpResponseRedirect("/")
except Exception as e:
return HttpResponse(e)
if user is None:
"Print no user"
else:
login(request, user)
# Redirect somewhere sensible
next_page = request.GET.get('next', '/')
if next_page == "":
next_page = "/"
extra_url_arg_values = {}
extra_url_args = set(setting('PYROVEN_PASSTHROUGH_URL_ARGS', []))
extra_url_args.add('next')
for k in extra_url_args:
if k in request.GET:
extra_url_arg_values[k] = request.GET.get(k)
url_extra = ''
if len(extra_url_args) > 0:
url_extra = "?%s" % "&".join(["%s=%s" % (k, v) for (k, v) in extra_url_arg_values.items()])
return HttpResponseRedirect(next_page + url_extra)
def raven_login(request):
# Get the Raven object and return a redirect to the Raven server
login_url = setting('PYROVEN_LOGIN_URL')
if login_url is None:
raise Exception("pyroven error: You must define PYROVEN_LOGIN_URL in your project settings file.")
extra_url_arg_values = {}
extra_url_args = set(setting('PYROVEN_PASSTHROUGH_URL_ARGS', []))
extra_url_args.add('next')
for k in extra_url_args:
if k in request.GET:
extra_url_arg_values[k] = request.GET.get(k)
url_extra = ''
if len(extra_url_args) > 0:
url_extra = "?%s" % "&".join(["%s=%s" % (k, v) for (k, v) in extra_url_arg_values.items()])
relative_return_url = "%s%s" % (reverse('raven_return'), url_extra)
encoded_return_url = urllib.quote(request.build_absolute_uri(relative_return_url))
return HttpResponseSeeOther("%s?ver=%d&url=%s" % (login_url, 2,
encoded_return_url)
)
def raven_logout(request):
logout(request)
| import urllib
from django.http import HttpResponseRedirect
from django.contrib.auth import authenticate, login, logout
from django.core.urlresolvers import reverse
from pyroven.utils import setting, HttpResponseSeeOther
def raven_return(request):
# Get the token which the Raven server sent us - this should really
# have a try/except around it to catch KeyError
token = request.GET['WLS-Response']
# See if this is a valid token
user = authenticate(response_str=token, request=request)
if user is None:
"Print no user"
else:
login(request, user)
# Redirect somewhere sensible
next_page = request.GET.get('next', '/')
return HttpResponseRedirect(next_page)
def raven_login(request):
# Get the Raven object and return a redirect to the Raven server
login_url = setting('PYROVEN_LOGIN_URL')
if login_url is None:
raise Exception("pyroven error: You must define PYROVEN_LOGIN_URL in your project settings file.")
next_page = request.GET.get('next', None)
if next_page is not None:
relative_return_url = "%s?next=%s" % (reverse('raven_return'), next_page)
else:
relative_return_url = reverse('raven_return')
encoded_return_url = urllib.quote(request.build_absolute_uri(relative_return_url))
return HttpResponseSeeOther("%s?ver=%d&url=%s" % (login_url, 2,
encoded_return_url)
)
def raven_logout(request):
logout(request)
| mit | Python |
48111f5f94ddbc05c48fb84ef51b28d17864ca35 | Update pysam version | kyleabeauchamp/pysam,pysam-developers/pysam,kyleabeauchamp/pysam,pysam-developers/pysam,pysam-developers/pysam,kyleabeauchamp/pysam,kyleabeauchamp/pysam,kyleabeauchamp/pysam,pysam-developers/pysam | pysam/version.py | pysam/version.py | # pysam versioning information
__version__ = "0.15.0"
# TODO: upgrade number
__samtools_version__ = "1.9"
# TODO: upgrade code and number
__bcftools_version__ = "1.9"
__htslib_version__ = "1.9"
| # pysam versioning information
__version__ = "0.14.1"
# TODO: upgrade number
__samtools_version__ = "1.7"
# TODO: upgrade code and number
__bcftools_version__ = "1.6"
__htslib_version__ = "1.7"
| mit | Python |
a6d84000b8738d8a10f95f53ea71c78f837095aa | Use len since it is sufficient for list comparison | kyle-long/pyshelf,kyle-long/pyshelf,not-nexus/shelf,not-nexus/shelf | pyshelf/utils.py | pyshelf/utils.py | import os.path
import json
import jsonschema
def create_path(*args):
"""
Gets the full absolute path based on the arguments provided. The part
it adds at the beginning is the path to the root of this repository.
WARNING: Do not start one of your path sections with a "/" otherwise that
is expected to be an absolute path.
Args:
*args(List(basestring)): Each part is a segment of the same path.
"""
directory_of_this_file = os.path.dirname(os.path.realpath(__file__))
full_path = os.path.join(directory_of_this_file, "../", *args)
full_path = os.path.realpath(full_path)
return full_path
def validate_json(schema_path, data):
"""
Validates data against schema.
Args:
schema_path(string)
data(type outlined schema)
Raises:
jsonschema.ValidationError: if data does not match schema
IOError: if schema_path is invalid
jsonschema.SchemaError: if schema is flawed
"""
schema_path = create_path(schema_path)
with open(schema_path, "r") as file:
schema = file.read()
schema = json.loads(schema)
jsonschema.validate(data, schema)
def get_bucket_config(config, name):
"""
Pulls correct bucket config from application config based on name/alias.
Args:
config(dict)
name(string): bucket name or bucket reference name
Returns:
dict | None: config for bucket or None if not found
"""
bucket_config = None
for bucket in config["buckets"]:
if bucket["name"] == name or bucket.get("referenceName") == name:
bucket_config = bucket
return bucket_config
def validate_bucket_config(config):
"""
Verifies that there is no overlap in referance name and bucket name.
Args:
config(dict)
Raises:
ValueError
"""
name_list = []
for bucket in config["buckets"]:
if bucket["name"] == bucket.get("referenceName") or bucket.get("referenceName") is None:
name_list.append(bucket["name"])
else:
name_list.extend([bucket["name"], bucket["referenceName"]])
unique_list = list(set(name_list))
if len(name_list) != len(unique_list):
raise ValueError("Error in bucket config. Overlapping bucket names and reference names.")
| import os.path
import json
import jsonschema
import collections
def create_path(*args):
"""
Gets the full absolute path based on the arguments provided. The part
it adds at the beginning is the path to the root of this repository.
WARNING: Do not start one of your path sections with a "/" otherwise that
is expected to be an absolute path.
Args:
*args(List(basestring)): Each part is a segment of the same path.
"""
directory_of_this_file = os.path.dirname(os.path.realpath(__file__))
full_path = os.path.join(directory_of_this_file, "../", *args)
full_path = os.path.realpath(full_path)
return full_path
def validate_json(schema_path, data):
"""
Validates data against schema.
Args:
schema_path(string)
data(type outlined schema)
Raises:
jsonschema.ValidationError: if data does not match schema
IOError: if schema_path is invalid
jsonschema.SchemaError: if schema is flawed
"""
schema_path = create_path(schema_path)
with open(schema_path, "r") as file:
schema = file.read()
schema = json.loads(schema)
jsonschema.validate(data, schema)
def get_bucket_config(config, name):
"""
Pulls correct bucket config from application config based on name/alias.
Args:
config(dict)
name(string): bucket name or bucket reference name
Returns:
dict | None: config for bucket or None if not found
"""
bucket_config = None
for bucket in config["buckets"]:
if bucket["name"] == name or bucket.get("referenceName") == name:
bucket_config = bucket
return bucket_config
def validate_bucket_config(config):
"""
Verifies that there is no overlap in referance name and bucket name.
Args:
config(dict)
Raises:
ValueError
"""
name_list = []
for bucket in config["buckets"]:
if bucket["name"] == bucket.get("referenceName") or bucket.get("referenceName") is None:
name_list.append(bucket["name"])
else:
name_list.extend([bucket["name"], bucket["referenceName"]])
unique_list = list(set(name_list))
if collections.Counter(name_list) != collections.Counter(unique_list):
raise ValueError("Error in bucket config. Overlapping bucket names and reference names.")
| mit | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.