commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
00b8bd07875e3d747c2399b5e00f0884499691c9 | Make sure to get the correct internal WB url for file objects | felliott/osf.io,leb2dg/osf.io,Nesiehr/osf.io,Nesiehr/osf.io,pattisdr/osf.io,caseyrollins/osf.io,TomBaxter/osf.io,mfraezz/osf.io,aaxelb/osf.io,caneruguz/osf.io,icereval/osf.io,cwisecarver/osf.io,HalcyonChimera/osf.io,caneruguz/osf.io,chennan47/osf.io,Nesiehr/osf.io,hmoco/osf.io,adlius/osf.io,erinspace/osf.io,CenterForOpenScience/osf.io,cwisecarver/osf.io,aaxelb/osf.io,hmoco/osf.io,felliott/osf.io,CenterForOpenScience/osf.io,caneruguz/osf.io,leb2dg/osf.io,Nesiehr/osf.io,chennan47/osf.io,baylee-d/osf.io,cwisecarver/osf.io,baylee-d/osf.io,laurenrevere/osf.io,adlius/osf.io,Johnetordoff/osf.io,mattclark/osf.io,brianjgeiger/osf.io,crcresearch/osf.io,caseyrollins/osf.io,saradbowman/osf.io,cslzchen/osf.io,saradbowman/osf.io,aaxelb/osf.io,laurenrevere/osf.io,cslzchen/osf.io,mattclark/osf.io,hmoco/osf.io,adlius/osf.io,hmoco/osf.io,HalcyonChimera/osf.io,Johnetordoff/osf.io,laurenrevere/osf.io,binoculars/osf.io,HalcyonChimera/osf.io,chrisseto/osf.io,pattisdr/osf.io,HalcyonChimera/osf.io,brianjgeiger/osf.io,icereval/osf.io,sloria/osf.io,mfraezz/osf.io,adlius/osf.io,chrisseto/osf.io,sloria/osf.io,Johnetordoff/osf.io,mattclark/osf.io,aaxelb/osf.io,caneruguz/osf.io,cslzchen/osf.io,icereval/osf.io,brianjgeiger/osf.io,crcresearch/osf.io,chennan47/osf.io,TomBaxter/osf.io,erinspace/osf.io,felliott/osf.io,caseyrollins/osf.io,binoculars/osf.io,cwisecarver/osf.io,leb2dg/osf.io,felliott/osf.io,TomBaxter/osf.io,cslzchen/osf.io,mfraezz/osf.io,CenterForOpenScience/osf.io,baylee-d/osf.io,leb2dg/osf.io,chrisseto/osf.io,chrisseto/osf.io,sloria/osf.io,erinspace/osf.io,binoculars/osf.io,mfraezz/osf.io,brianjgeiger/osf.io,CenterForOpenScience/osf.io,pattisdr/osf.io,Johnetordoff/osf.io,crcresearch/osf.io | api/nodes/utils.py | api/nodes/utils.py | # -*- coding: utf-8 -*-
from modularodm import Q
from rest_framework.exceptions import PermissionDenied, NotFound
from rest_framework.status import is_server_error
import requests
from website.files.models import OsfStorageFile
from website.files.models import OsfStorageFolder
from website.util import waterbutler_api_url_for
from api.base.exceptions import ServiceUnavailableError
from api.base.utils import get_object_or_error
def get_file_object(node, path, provider, request):
# Don't bother going to waterbutler for osfstorage
if provider == 'osfstorage':
# Kinda like /me for a user
# The one odd case where path is not really path
if path == '/':
obj = node.get_addon('osfstorage').get_root()
else:
if path.endswith('/'):
model = OsfStorageFolder
else:
model = OsfStorageFile
obj = get_object_or_error(model, Q('node', 'eq', node.pk) & Q('_id', 'eq', path.strip('/')))
return obj
if not node.get_addon(provider) or not node.get_addon(provider).configured:
raise NotFound('The {} provider is not configured for this project.'.format(provider))
url = waterbutler_api_url_for(node._id, provider, path, _internal=True, meta=True)
waterbutler_request = requests.get(
url,
cookies=request.COOKIES,
headers={'Authorization': request.META.get('HTTP_AUTHORIZATION')},
)
if waterbutler_request.status_code == 401:
raise PermissionDenied
if waterbutler_request.status_code == 404:
raise NotFound
if is_server_error(waterbutler_request.status_code):
raise ServiceUnavailableError(detail='Could not retrieve files information at this time.')
try:
return waterbutler_request.json()['data']
except KeyError:
raise ServiceUnavailableError(detail='Could not retrieve files information at this time.')
| # -*- coding: utf-8 -*-
from modularodm import Q
from rest_framework.exceptions import PermissionDenied, NotFound
from rest_framework.status import is_server_error
import requests
from website.files.models import OsfStorageFile
from website.files.models import OsfStorageFolder
from website.util import waterbutler_api_url_for
from api.base.exceptions import ServiceUnavailableError
from api.base.utils import get_object_or_error
def get_file_object(node, path, provider, request):
# Don't bother going to waterbutler for osfstorage
if provider == 'osfstorage':
# Kinda like /me for a user
# The one odd case where path is not really path
if path == '/':
obj = node.get_addon('osfstorage').get_root()
else:
if path.endswith('/'):
model = OsfStorageFolder
else:
model = OsfStorageFile
obj = get_object_or_error(model, Q('node', 'eq', node.pk) & Q('_id', 'eq', path.strip('/')))
return obj
if not node.get_addon(provider) or not node.get_addon(provider).configured:
raise NotFound('The {} provider is not configured for this project.'.format(provider))
url = waterbutler_api_url_for(node._id, provider, path, meta=True)
waterbutler_request = requests.get(
url,
cookies=request.COOKIES,
headers={'Authorization': request.META.get('HTTP_AUTHORIZATION')},
)
if waterbutler_request.status_code == 401:
raise PermissionDenied
if waterbutler_request.status_code == 404:
raise NotFound
if is_server_error(waterbutler_request.status_code):
raise ServiceUnavailableError(detail='Could not retrieve files information at this time.')
try:
return waterbutler_request.json()['data']
except KeyError:
raise ServiceUnavailableError(detail='Could not retrieve files information at this time.')
| apache-2.0 | Python |
dc41024679edfa93534c80f02433040cb48cb8be | Bump patch version | prkumar/uplink | uplink/__about__.py | uplink/__about__.py | """
This module is the single source of truth for any package metadata
that is used both in distribution (i.e., setup.py) and within the
codebase.
"""
__version__ = "0.5.3"
| """
This module is the single source of truth for any package metadata
that is used both in distribution (i.e., setup.py) and within the
codebase.
"""
__version__ = "0.5.2"
| mit | Python |
8c20deac927bbc9e27d1890d13b457be0a36255c | Add ConsoleCommand | tiagochiavericosta/cobe,meska/cobe,LeMagnesium/cobe,wodim/cobe-ng,pteichman/cobe,pteichman/cobe,LeMagnesium/cobe,tiagochiavericosta/cobe,meska/cobe,DarkMio/cobe,DarkMio/cobe,wodim/cobe-ng | halng/commands.py | halng/commands.py | import logging
import os
import readline
import sys
from brain import Brain
from cmdparse import Command
log = logging.getLogger("hal")
class InitCommand(Command):
def __init__(self):
Command.__init__(self, "init", summary="Initialize a new brain")
self.add_option("", "--force", action="store_true")
self.add_option("", "--order", type="int", default=5)
def run(self, options, args):
filename = "hal.brain"
if os.path.exists(filename):
if options.force:
os.remove(filename)
else:
log.error("%s already exists!", filename)
return
Brain.init(filename, options.order)
class CloneCommand(Command):
def __init__(self):
Command.__init__(self, "clone", summary="Clone a MegaHAL brain")
def run(self, options, args):
if len(args) != 1:
log.error("usage: clone <MegaHAL brain>")
return
if os.path.exists("hal.brain"):
log.error("hal.brain already exists")
return
megahal_brain = args[0]
Brain.init("hal.brain")
b.clone(megahal_brain)
class LearnCommand(Command):
def __init__(self):
Command.__init__(self, "learn", summary="Learn a file of text")
def run(self, options, args):
if len(args) != 1:
log.error("usage: learn <text file>")
return
filename = args[0]
b = Brain("hal.brain")
fd = open(filename)
for line in fd.xreadlines():
b.learn(line.strip())
class ConsoleCommand(Command):
def __init__(self):
Command.__init__(self, "console", summary="Speak with Hal.")
def run(self, options, args):
b = Brain("hal.brain")
while True:
try:
cmd = raw_input("> ")
except EOFError:
print
sys.exit(0)
b.learn(cmd)
print b.reply(cmd).capitalize()
| import logging
import os
from brain import Brain
from cmdparse import Command
log = logging.getLogger("hal")
class InitCommand(Command):
def __init__(self):
Command.__init__(self, "init", summary="Initialize a new brain")
self.add_option("", "--force", action="store_true")
self.add_option("", "--order", type="int", default=5)
def run(self, options, args):
filename = "hal.brain"
if os.path.exists(filename):
if options.force:
os.remove(filename)
else:
log.error("%s already exists!", filename)
return
Brain.init(filename, options.order)
class CloneCommand(Command):
def __init__(self):
Command.__init__(self, "clone", summary="Clone a MegaHAL brain")
def run(self, options, args):
if len(args) != 1:
log.error("usage: clone <MegaHAL brain>")
return
if os.path.exists("hal.brain"):
log.error("hal.brain already exists")
return
megahal_brain = args[0]
Brain.init("hal.brain")
b.clone(megahal_brain)
class LearnCommand(Command):
def __init__(self):
Command.__init__(self, "learn", summary="Learn a file of text")
def run(self, options, args):
if len(args) != 1:
log.error("usage: learn <text file>")
return
filename = args[0]
b = Brain("hal.brain")
fd = open(filename)
for line in fd.xreadlines():
b.learn(line.strip())
| mit | Python |
a202af3e20d1a8d094b1c9683b7eaeb701541f73 | Fix broken test | cjellick/cattle,vincent99/cattle,cloudnautique/cattle,cloudnautique/cattle,cjellick/cattle,cloudnautique/cattle,rancherio/cattle,cjellick/cattle,vincent99/cattle,rancher/cattle,vincent99/cattle,cjellick/cattle,rancher/cattle,rancherio/cattle,rancher/cattle,rancherio/cattle,cloudnautique/cattle | tests/integration/cattletest/core/test_k8s.py | tests/integration/cattletest/core/test_k8s.py | from common_fixtures import * # NOQA
def test_create_k8s_container_no_k8s(context):
c = context.create_container(labels={
'io.kubernetes.pod.namespace': 'n',
'io.kubernetes.pod.name': 'p',
'io.kubernetes.container.name': 'POD',
})
c = context.client.wait_success(c)
assert c.state == 'running'
def test_create_k8s_container_no_k8s_fail(new_context, super_client):
client = new_context.client
c = new_context.create_container(labels={
'io.kubernetes.pod.namespace': 'n',
'io.kubernetes.pod.name': 'p',
'io.kubernetes.container.name': 'POD',
}, startOnCreate=False)
super_client.update(c.account(), orchestration='kubernetes')
c = client.wait_transitioning(c.start())
assert c.transitioning == 'error'
assert c.transitioningMessage == 'Failed to find labels provider'
| from common_fixtures import * # NOQA
def test_create_k8s_container_no_k8s(context):
c = context.create_container(labels={
'io.kubernetes.pod.namespace': 'n',
'io.kubernetes.pod.name': 'p',
'io.kubernetes.container.name': 'POD',
})
c = context.client.wait_success(c)
assert c.state == 'running'
def test_create_k8s_container_no_k8s_fail(new_context, super_client):
client = new_context.client
c = new_context.create_container(labels={
'io.kubernetes.pod.namespace': 'n',
'io.kubernetes.pod.name': 'p',
'io.kubernetes.container.name': 'POD',
}, startOnCreate=False)
super_client.update(c.account(), orchestration='k8s')
c = client.wait_transitioning(c.start())
assert c.transitioning == 'error'
assert c.transitioningMessage == 'Failed to find labels provider'
| apache-2.0 | Python |
301138d302edce2d6e6b3e1d824f6fbc5ab876be | Change workdir back to starting one when testing "cd" | melkamar/webstore-manager,melkamar/webstore-manager | tests/script_parser/test_generic_functions.py | tests/script_parser/test_generic_functions.py | import os
import pytest
import shutil
import zipfile
from script_parser import parser
def test_pushd_popd():
p = parser.Parser("foo")
startdir = os.getcwd()
p.execute_line("pushd tests")
assert os.getcwd() == os.path.join(startdir, 'tests')
p.execute_line("pushd files")
assert os.getcwd() == os.path.join(startdir, 'tests', 'files')
p.execute_line("popd")
assert os.getcwd() == os.path.join(startdir, 'tests')
p.execute_line("popd")
assert os.getcwd() == startdir
def test_popd_empty():
p = parser.Parser("foo")
startdir = os.getcwd()
p.execute_line("pushd tests")
assert os.getcwd() == os.path.join(startdir, 'tests')
p.execute_line("popd")
with pytest.raises(IndexError):
p.execute_line("popd")
def test_cd():
p = parser.Parser("foo")
startdir = os.getcwd()
p.execute_line("cd tests")
assert os.getcwd() == os.path.join(startdir, 'tests')
os.chdir(startdir)
assert os.getcwd() == startdir
def test_zip():
zip_fn = os.path.join(os.getcwd(), 'testzip.zip')
shutil.rmtree(zip_fn, ignore_errors=True)
if os.path.exists(zip_fn):
os.remove(zip_fn)
assert not os.path.exists(zip_fn)
p = parser.Parser("foo")
p.execute_line("zip tests/files/sample_folder testzip.zip")
assert os.path.exists(zip_fn)
archive = zipfile.ZipFile(zip_fn, 'r')
txt = archive.read('hello').decode("utf-8")
assert txt.find("Sample bare content") != -1 # Make sure expected content is present in archive
archive.close()
os.remove(zip_fn)
| import os
import pytest
import shutil
import zipfile
from script_parser import parser
def test_pushd_popd():
p = parser.Parser("foo")
startdir = os.getcwd()
p.execute_line("pushd tests")
assert os.getcwd() == os.path.join(startdir, 'tests')
p.execute_line("pushd files")
assert os.getcwd() == os.path.join(startdir, 'tests', 'files')
p.execute_line("popd")
assert os.getcwd() == os.path.join(startdir, 'tests')
p.execute_line("popd")
assert os.getcwd() == startdir
def test_popd_empty():
p = parser.Parser("foo")
startdir = os.getcwd()
p.execute_line("pushd tests")
assert os.getcwd() == os.path.join(startdir, 'tests')
p.execute_line("popd")
with pytest.raises(IndexError):
p.execute_line("popd")
def test_cd():
p = parser.Parser("foo")
startdir = os.getcwd()
p.execute_line("cd tests")
assert os.getcwd() == os.path.join(startdir, 'tests')
def test_zip():
zip_fn = os.path.join(os.getcwd(), 'testzip.zip')
shutil.rmtree(zip_fn, ignore_errors=True)
if os.path.exists(zip_fn):
os.remove(zip_fn)
assert not os.path.exists(zip_fn)
p = parser.Parser("foo")
p.execute_line("zip tests/files/sample_folder testzip.zip")
assert os.path.exists(zip_fn)
archive = zipfile.ZipFile(zip_fn, 'r')
txt = archive.read('hello').decode("utf-8")
assert txt.find("Sample bare content") != -1 # Make sure expected content is present in archive
archive.close()
os.remove(zip_fn)
| mit | Python |
c06267145205ab7c1272d0d77643aed145b3655c | Add while interrupt. | dbrentley/Raspberry-Pi-3-GPS | get_data.py | get_data.py | #!/usr/bin/env python3
from gps.gps_class import GPS
import time
import sys
def main():
gps_data = GPS('/dev/ttyS0')
print("Press Control+C to stop.")
gps_data.start()
try:
while True:
print("lat: {}, lon: {}, elevation: {}ft, speed: {}mph".format(
gps_data.lat, gps_data.lon, gps_data.altitude, gps_data.mph))
time.sleep(1)
except KeyboardInterrupt:
gps_data.stop()
sys.exit(0)
if __name__ == '__main__':
main()
| #!/usr/bin/env python3
from gps.gps_class import GPS
import time
def main():
gps_data = GPS('/dev/ttyS0')
gps_data.start()
while True:
print("lat: {}, lon: {}, elevation: {}ft, speed: {}mph".format(
gps_data.lat, gps_data.lon, gps_data.altitude, gps_data.mph))
time.sleep(1)
if __name__ == '__main__':
main()
| apache-2.0 | Python |
52d65ff926a079b4e07e8bc0fda3e3c3fe8f9437 | Remove dependency from remotecv worker on queued detector | fanhero/thumbor,fanhero/thumbor,fanhero/thumbor,fanhero/thumbor | thumbor/detectors/queued_detector/__init__.py | thumbor/detectors/queued_detector/__init__.py | from remotecv.unique_queue import UniqueQueue
from thumbor.detectors import BaseDetector
class QueuedDetector(BaseDetector):
queue = UniqueQueue()
def detect(self, callback):
engine = self.context.modules.engine
self.queue.enqueue_unique_from_string('remotecv.pyres_tasks.DetectTask', 'Detect',
args=[self.detection_type, self.context.request.image_url],
key=self.context.request.image_url)
self.context.prevent_result_storage = True
callback([])
| from remotecv import pyres_tasks
from remotecv.unique_queue import UniqueQueue
from thumbor.detectors import BaseDetector
class QueuedDetector(BaseDetector):
queue = UniqueQueue()
def detect(self, callback):
engine = self.context.modules.engine
self.queue.enqueue_unique(pyres_tasks.DetectTask,
args=[self.detection_type, self.context.request.image_url],
key=self.context.request.image_url)
self.context.prevent_result_storage = True
callback([])
| mit | Python |
c9f8663e6b0bf38f6c041a3a6b77b8a0007a9f09 | Add a name to the index URL | ImaginationForPeople/imaginationforpeople,ImaginationForPeople/imaginationforpeople,ImaginationForPeople/imaginationforpeople,ImaginationForPeople/imaginationforpeople,ImaginationForPeople/imaginationforpeople,ImaginationForPeople/imaginationforpeople | urls.py | urls.py | from django.conf.urls.defaults import *
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
from django.views.generic.simple import direct_to_template
admin.autodiscover()
urlpatterns = patterns('',
# Example:
# (r'^i4p/', include('i4p.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# (r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
(r'^admin/', include(admin.site.urls)),
url(r'^$', direct_to_template, {'template' : 'base.html'}, name="i4p-index"),
(r'^accounts/', include('registration.backends.default.urls')),
)
| from django.conf.urls.defaults import *
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
from django.views.generic.simple import direct_to_template
admin.autodiscover()
urlpatterns = patterns('',
# Example:
# (r'^i4p/', include('i4p.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# (r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
(r'^admin/', include(admin.site.urls)),
(r'^$', direct_to_template, {'template' : 'base.html'}),
(r'^accounts/', include('registration.backends.default.urls')),
)
| agpl-3.0 | Python |
24b58f2bcbff029ce5d399dc1d8f478d7b066af7 | fix image provider api | chaubold/hytra,chaubold/hytra,chaubold/hytra | toolbox/pluginsystem/image_provider_plugin.py | toolbox/pluginsystem/image_provider_plugin.py | from yapsy.IPlugin import IPlugin
class ImageProviderPlugin(IPlugin):
"""
This is the base class for all plugins that load images from a given location
"""
def activate(self):
"""
Activation of plugin could do something, but not needed here
"""
pass
def deactivate(self):
"""
Deactivation of plugin could do something, but not needed here
"""
pass
def getImageDataAtTimeFrame(self, Resource, PathInResource, timeframe):
"""
Loads image data from location.
Return one numpy array.
"""
raise NotImplementedError()
return []
def getLabelImageForFrame(self, Resource, PathInResource, timeframe):
"""
Get the label image(volume) of one time frame
"""
raise NotImplementedError()
return []
def getImageShape(self, Resource, PathInResource):
"""
extract the shape from the labelimage
"""
raise NotImplementedError()
return []
def getTimeRange(self, Resource, PathInResource):
"""
extract the time range by counting labelimages
"""
raise NotImplementedError()
return [] | from yapsy.IPlugin import IPlugin
class ImageProviderPlugin(IPlugin):
"""
This is the base class for all plugins that load images from a given location
"""
def activate(self):
"""
Activation of plugin could do something, but not needed here
"""
pass
def deactivate(self):
"""
Deactivation of plugin could do something, but not needed here
"""
pass
def getImageDataAtTimeFrame(self, Resource, PathInResource, timeframe):
"""
Loads image data from location.
Return one numpy array.
"""
raise NotImplementedError()
return []
def getLabelImageForFrame(self, Resource, PathInResource, timeframe):
"""
Get the label image(volume) of one time frame
"""
raise NotImplementedError()
return []
def _getShapeAndTimeRange(self):
"""
extract the shape from the labelimage
"""
raise NotImplementedError()
return [] | mit | Python |
151110aefcd9268b77ecbfe1e6e637bbb5f8bc1d | Update move_motor.py to python3, make it work for all motors | dwalton76/ev3dev-lang-python,rhempel/ev3dev-lang-python,dwalton76/ev3dev-lang-python | utils/move_motor.py | utils/move_motor.py | #!/usr/bin/env python3
"""
Used to adjust the position of a motor in an already assembled robot
where you can"t move the motor by hand.
"""
from ev3dev.auto import OUTPUT_A, OUTPUT_B, OUTPUT_C, OUTPUT_D, Motor
import argparse
import logging
import sys
# command line args
parser = argparse.ArgumentParser(description="Used to adjust the position of a motor in an already assembled robot")
parser.add_argument("motor", type=str, help="A, B, C or D")
parser.add_argument("degrees", type=int)
parser.add_argument("-s", "--speed", type=int, default=50)
args = parser.parse_args()
# logging
logging.basicConfig(level=logging.INFO,
format="%(asctime)s %(levelname)5s: %(message)s")
log = logging.getLogger(__name__)
if args.motor == "A":
motor = Motor(OUTPUT_A)
elif args.motor == "B":
motor = Motor(OUTPUT_B)
elif args.motor == "C":
motor = Motor(OUTPUT_C)
elif args.motor == "D":
motor = Motor(OUTPUT_D)
else:
raise Exception("%s is invalid, options are A, B, C, D")
if not motor.connected:
log.error("%s is not connected" % motor)
sys.exit(1)
if args.degrees:
log.info("Motor %s, current position %d, move to position %d, max speed %d" %
(args.motor, motor.position, args.degrees, motor.max_speed))
motor.run_to_rel_pos(speed_sp=args.speed,
position_sp=args.degrees,
stop_action='hold')
| #!/usr/bin/env python
"""
Used to adjust the position of a motor in an already assembled robot
where you can"t move the motor by hand.
"""
from ev3dev.auto import OUTPUT_A, OUTPUT_B, OUTPUT_C, OUTPUT_D
from ev3dev.helper import LargeMotor
import argparse
import logging
import sys
# command line args
parser = argparse.ArgumentParser(description="Used to adjust the position of a motor in an already assembled robot")
parser.add_argument("motor", type=str, help="A, B, C or D")
parser.add_argument("degrees", type=int)
parser.add_argument("-s", "--speed", type=int, default=50)
args = parser.parse_args()
# logging
logging.basicConfig(level=logging.DEBUG,
format="%(asctime)s %(levelname)5s: %(message)s")
log = logging.getLogger(__name__)
# For this it doesn't really matter if it is a LargeMotor or a MediumMotor
if args.motor == "A":
motor = LargeMotor(OUTPUT_A)
elif args.motor == "B":
motor = LargeMotor(OUTPUT_B)
elif args.motor == "C":
motor = LargeMotor(OUTPUT_C)
elif args.motor == "D":
motor = LargeMotor(OUTPUT_D)
else:
raise Exception("%s is invalid, options are A, B, C, D")
if not motor.connected:
log.error("%s is not connected" % motor)
sys.exit(1)
if args.degrees:
log.info("Motor %s, move to position %d, max speed %d" % (args.motor, args.degrees, motor.max_speed))
motor.run_to_rel_pos(speed_sp=args.speed,
position_sp=args.degrees,
# ramp_up_sp=500,
# ramp_down_sp=500,
stop_action='hold')
motor.wait_for_running()
motor.wait_for_stop()
motor.stop(stop_action='brake')
log.info("Motor %s stopped, final position %d" % (args.motor, motor.position))
| mit | Python |
d6763210d60a7b6caf97e0b5930e1456f1a73bb1 | Drop Py2 and six on tests/unit/modules/test_mod_random.py | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | tests/unit/modules/test_mod_random.py | tests/unit/modules/test_mod_random.py | """
:codeauthor: Rupesh Tare <rupesht@saltstack.com>
"""
import salt.modules.mod_random as mod_random
import salt.utils.pycrypto
from salt.exceptions import SaltInvocationError
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.mock import patch
from tests.support.unit import TestCase, skipIf
def _test_hashlib():
try:
import hashlib
except ImportError:
return False
if not hasattr(hashlib, "algorithms_guaranteed"):
return False
else:
return True
SUPPORTED_HASHLIB = _test_hashlib()
@skipIf(not SUPPORTED_HASHLIB, "Hashlib does not contain needed functionality")
class ModrandomTestCase(TestCase, LoaderModuleMockMixin):
"""
Test cases for salt.modules.mod_random
"""
def setup_loader_modules(self):
return {mod_random: {}}
def test_hash(self):
"""
Test for Encodes a value with the specified encoder.
"""
self.assertEqual(mod_random.hash("value")[0:4], "ec2c")
self.assertRaises(SaltInvocationError, mod_random.hash, "value", "algorithm")
def test_str_encode(self):
"""
Test for The value to be encoded.
"""
self.assertRaises(SaltInvocationError, mod_random.str_encode, "None", "abc")
self.assertRaises(SaltInvocationError, mod_random.str_encode, None)
# We're using the base64 module which does not include the trailing new line
self.assertEqual(mod_random.str_encode("A"), "QQ==")
def test_get_str(self):
"""
Test for Returns a random string of the specified length.
"""
with patch.object(salt.utils.pycrypto, "secure_password", return_value="A"):
self.assertEqual(mod_random.get_str(), "A")
def test_shadow_hash(self):
"""
Test for Generates a salted hash suitable for /etc/shadow.
"""
with patch.object(salt.utils.pycrypto, "gen_hash", return_value="A"):
self.assertEqual(mod_random.shadow_hash(), "A")
| # -*- coding: utf-8 -*-
"""
:codeauthor: Rupesh Tare <rupesht@saltstack.com>
"""
# Import Python Libs
from __future__ import absolute_import, print_function, unicode_literals
# Import Salt Libs
import salt.modules.mod_random as mod_random
import salt.utils.pycrypto
from salt.exceptions import SaltInvocationError
# Import 3rd-party libs
from salt.ext import six
# Import Salt Testing Libs
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.mock import patch
from tests.support.unit import TestCase, skipIf
def _test_hashlib():
try:
import hashlib
except ImportError:
return False
if six.PY2:
algorithms_attr_name = "algorithms"
else:
algorithms_attr_name = "algorithms_guaranteed"
if not hasattr(hashlib, algorithms_attr_name):
return False
else:
return True
SUPPORTED_HASHLIB = _test_hashlib()
@skipIf(not SUPPORTED_HASHLIB, "Hashlib does not contain needed functionality")
class ModrandomTestCase(TestCase, LoaderModuleMockMixin):
"""
Test cases for salt.modules.mod_random
"""
def setup_loader_modules(self):
return {mod_random: {}}
def test_hash(self):
"""
Test for Encodes a value with the specified encoder.
"""
self.assertEqual(mod_random.hash("value")[0:4], "ec2c")
self.assertRaises(SaltInvocationError, mod_random.hash, "value", "algorithm")
def test_str_encode(self):
"""
Test for The value to be encoded.
"""
self.assertRaises(SaltInvocationError, mod_random.str_encode, "None", "abc")
self.assertRaises(SaltInvocationError, mod_random.str_encode, None)
if six.PY2:
self.assertEqual(mod_random.str_encode("A"), "QQ==\n")
else:
# We're using the base64 module which does not include the trailing new line
self.assertEqual(mod_random.str_encode("A"), "QQ==")
def test_get_str(self):
"""
Test for Returns a random string of the specified length.
"""
with patch.object(salt.utils.pycrypto, "secure_password", return_value="A"):
self.assertEqual(mod_random.get_str(), "A")
def test_shadow_hash(self):
"""
Test for Generates a salted hash suitable for /etc/shadow.
"""
with patch.object(salt.utils.pycrypto, "gen_hash", return_value="A"):
self.assertEqual(mod_random.shadow_hash(), "A")
| apache-2.0 | Python |
b69cfe67a0881fbe497cc56aec02412db82f08a5 | Fix AssetAttributes' format_extension property | gears/gears,gears/gears,gears/gears | gears/asset_attributes.py | gears/asset_attributes.py | import os
import re
from .utils import cached_property
class AssetAttributes(object):
def __init__(self, environment, path):
self.environment = environment
self.path = path
@cached_property
def search_paths(self):
paths = [self.path]
if os.path.basename(self.path_without_extensions) != 'index':
path = os.path.join(self.path_without_extensions, 'index')
paths.append(path + ''.join(self.extensions))
return paths
@cached_property
def path_without_extensions(self):
if self.extensions:
return self.path[:-len(''.join(self.extensions))]
return self.path
@cached_property
def extensions(self):
return re.findall(r'\.[^.]+', os.path.basename(self.path))
@cached_property
def format_extension(self):
for extension in reversed(self.extensions):
engine = self.environment.engines.get(extension)
if not engine and self.environment.mimetypes.get(extension):
return extension
@cached_property
def suffix(self):
try:
index = self.extensions.index(self.format_extension)
except ValueError:
return self.extensions
return self.extensions[index:]
@cached_property
def engine_extensions(self):
return [e for e in self.suffix[1:] if self.environment.engines.get(e)]
@cached_property
def engines(self):
return [self.environment.engines.get(e) for e in self.engine_extensions]
@cached_property
def preprocessors(self):
return self._get_processors(self.environment.preprocessors)
@cached_property
def postprocessors(self):
return self._get_processors(self.environment.postprocessors)
@cached_property
def processors(self):
engines = list(reversed(self.engines))
return self.preprocessors + engines + self.postprocessors
@cached_property
def mimetype(self):
return (self.environment.mimetypes.get(self.format_extension) or
'application/octet-stream')
def _get_processors(self, storage):
return [cls(self) for cls in storage.get(self.mimetype)]
| import os
import re
from .utils import cached_property
class AssetAttributes(object):
def __init__(self, environment, path):
self.environment = environment
self.path = path
@cached_property
def search_paths(self):
paths = [self.path]
if os.path.basename(self.path_without_extensions) != 'index':
path = os.path.join(self.path_without_extensions, 'index')
paths.append(path + ''.join(self.extensions))
return paths
@cached_property
def path_without_extensions(self):
if self.extensions:
return self.path[:-len(''.join(self.extensions))]
return self.path
@cached_property
def extensions(self):
return re.findall(r'\.[^.]+', os.path.basename(self.path))
@cached_property
def format_extension(self):
for extension in self.extensions:
engine = self.environment.engines.get(extension)
if not engine and self.environment.mimetypes.get(extension):
return extension
@cached_property
def suffix(self):
try:
index = self.extensions.index(self.format_extension)
except ValueError:
return self.extensions
return self.extensions[index:]
@cached_property
def engine_extensions(self):
return [e for e in self.suffix[1:] if self.environment.engines.get(e)]
@cached_property
def engines(self):
return [self.environment.engines.get(e) for e in self.engine_extensions]
@cached_property
def preprocessors(self):
return self._get_processors(self.environment.preprocessors)
@cached_property
def postprocessors(self):
return self._get_processors(self.environment.postprocessors)
@cached_property
def processors(self):
engines = list(reversed(self.engines))
return self.preprocessors + engines + self.postprocessors
@cached_property
def mimetype(self):
return (self.environment.mimetypes.get(self.format_extension) or
'application/octet-stream')
def _get_processors(self, storage):
return [cls(self) for cls in storage.get(self.mimetype)]
| isc | Python |
a7c7bbe484183d2dee21f525c28c34c169b3f755 | fix parameterization | Naught0/qtbot | utils/user_funcs.py | utils/user_funcs.py | #!bin/env python
import asyncpg
class PGDB:
def __init__(self, db_conn):
self.db_conn = db_conn
async def fetch_user_info(self, member_id: int, column: str):
query = f'''SELECT {column} FROM user_info WHERE member_id = {member_id};'''
return await self.db_conn.fetchval(query)
async def insert_user_info(self, member_id: int, column: str, col_value):
execute = (
f'''INSERT INTO user_info (member_id, {column}) VALUES ($1, $2)
ON CONFLICT member_id DO UPDATE SET {column} = $2;''')
await self.db_conn.execute(execute, member_id, col_value)
| #!bin/env python
import asyncpg
class PGDB:
def __init__(self, db_conn):
self.db_conn = db_conn
async def fetch_user_info(self, member_id: int, column: str):
query = f'''SELECT {column} FROM user_info WHERE member_id = {member_id};'''
return await self.db_conn.fetchval(query)
async def insert_user_info(self, member_id: int, column: str, col_value):
execute = (
f'''INSERT INTO user_info (member_id, $1) VALUES ($2, $3)
ON CONFLICT member_id DO UPDATE SET $1 = $3;''')
await self.db_conn.execute(execute, (column, member_id, col_value))
| mit | Python |
f799561adb3b76a17e9da30a7cb715ae26b1a5eb | Update tsdb_serialization.py | Planet-Nine/cs207project,Planet-Nine/cs207project | timeseries/tsdb/tsdb_serialization.py | timeseries/tsdb/tsdb_serialization.py | import json
LENGTH_FIELD_LENGTH = 4
def serialize(json_obj):
'''Turn a JSON object into bytes suitable for writing out to the network.
Includes a fixed-width length field to simplify reconstruction on the other
end of the wire.'''
#your code here. Returns the bytes on the wire
try:
obj = json.dumps(json_obj)
b = obj.encode()
encoded = (len(b)+LENGTH_FIELD_LENGTH).to_bytes(LENGTH_FIELD_LENGTH,byteorder='little') + b
return encoded
except:
print('Invalid JSON object received:\n'+str(json_obj))
return None
class Deserializer(object):
'''A buffering and bytes-to-json engine.
Data can be received in arbitrary chunks of bytes, and we need a way to
reconstruct variable-length JSON objects from that interface. This class
buffers up bytes until it can detect that it has a full JSON object (via
a length field pulled off the wire). To use this, shove bytes in with the
append() function and call ready() to check if we've reconstructed a JSON
object. If True, then call deserialize to return it. That object will be
removed from this buffer after it is returned.'''
def __init__(self):
self.buf = b''
self.buflen = -1
def append(self, data):
self.buf += data
self._maybe_set_length()
def _maybe_set_length(self):
if self.buflen < 0 and len(self.buf) >= LENGTH_FIELD_LENGTH:
self.buflen = int.from_bytes(self.buf[0:LENGTH_FIELD_LENGTH], byteorder="little")
def ready(self):
return (self.buflen > 0 and len(self.buf) >= self.buflen)
def deserialize(self):
json_str = self.buf[LENGTH_FIELD_LENGTH:self.buflen].decode()
self.buf = self.buf[self.buflen:]
self.buflen = -1
# There may be more data in the buffer already, so preserve it
self._maybe_set_length()
try:
obj = json.loads(json_str)
return obj
except json.JSONDecodeError:
print('Invalid JSON object received:\n'+str(json_str))
return None
| import json
LENGTH_FIELD_LENGTH = 4
def serialize(json_obj):
'''Turn a JSON object into bytes suitable for writing out to the network.
Includes a fixed-width length field to simplify reconstruction on the other
end of the wire.'''
# your code here
class Deserializer(object):
'''A buffering and bytes-to-json engine.
Data can be received in arbitrary chunks of bytes, and we need a way to
reconstruct variable-length JSON objects from that interface. This class
buffers up bytes until it can detect that it has a full JSON object (via
a length field pulled off the wire). To use this, shove bytes in with the
append() function and call ready() to check if we've reconstructed a JSON
object. If True, then call deserialize to return it. That object will be
removed from this buffer after it is returned.'''
def __init__(self):
self.buf = b''
self.buflen = -1
def append(self, data):
self.buf += data
self._maybe_set_length()
def _maybe_set_length(self):
if self.buflen < 0 and len(self.buf) >= LENGTH_FIELD_LENGTH:
self.buflen = int.from_bytes(self.buf[0:LENGTH_FIELD_LENGTH], byteorder="little")
def ready(self):
return (self.buflen > 0 and len(self.buf) >= self.buflen)
def deserialize(self):
json_str = self.buf[LENGTH_FIELD_LENGTH:self.buflen].decode()
self.buf = self.buf[self.buflen:]
self.buflen = -1
# There may be more data in the buffer already, so preserve it
self._maybe_set_length()
try:
obj = json.loads(json_str)
return obj
except json.JSONDecodeError:
print('Invalid JSON object received:\n'+str(json_str))
return None
| mit | Python |
623f37316c7d09c18a846d437b7ff943060a5521 | Add pageTitle params to post routes | ollien/Timpani,ollien/Timpani,ollien/Timpani | timpani/webserver/controllers/user.py | timpani/webserver/controllers/user.py | import flask
import os.path
import datetime
from ... import auth
from ... import blog
from ... import configmanager
from ... import settings
from .. import webhelpers
TEMPLATE_PATH = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../../templates"))
blueprint = flask.Blueprint("user", __name__, template_folder = TEMPLATE_PATH)
@blueprint.route("/")
def showPosts():
posts = blog.getPosts()
templatePath = os.path.join(TEMPLATE_PATH, "posts.html")
postParams = webhelpers.getPostsParameters()
pageTitle = postParams["blogTitle"]
return webhelpers.renderPosts(templatePath,
posts = posts, pageTitle = pageTitle, **postParams)
@blueprint.route("/post/<int:postId>")
def showPost(postId):
post = blog.getPostById(postId)
if post == None:
flask.abort(404)
else:
templatePath = os.path.join(TEMPLATE_PATH, "posts.html")
postParams = webhelpers.getPostsParameters()
pageTitle = "%s - %s" % (pageParams["blogTitle"], post.title)
return webhelpers.renderPosts(templatePath,
posts = [post], pageTitle = pageTitle, **pageParams)
@blueprint.route("/tag/<tag>")
def showPostsWithTag(tag):
posts = blog.getPostsWithTag(tag)
templatePath = os.path.join(TEMPLATE_PATH, "posts.html")
postParams = webhelpers.getPostsParameters()
pageTitle = postParams["blogTitle"]
return webhelpers.renderPosts(templatePath,
posts = posts, pageTitle = pageTitle, **postParams)
@blueprint.route("/login", methods=["GET", "POST"])
def login():
if flask.request.method == "GET":
if webhelpers.checkForSession():
return flask.redirect("/manage")
else:
return flask.render_template("login.html")
elif flask.request.method == "POST":
if ("username" not in flask.request.form
or "password" not in flask.request.form):
flask.flash("A username and password must be provided.")
return flask.render_template("login.html")
elif auth.validateUser(flask.request.form["username"],
flask.request.form["password"]):
donePage = webhelpers.canRecoverFromRedirect()
donePage = donePage if donePage is not None else "/manage"
sessionId, expires = auth.createSession(flask.request.form["username"])
flask.session["uid"] = sessionId
flask.session.permanent = True
flask.session.permanent_session_lifetime = datetime.datetime.now() - expires
return flask.redirect(donePage)
else:
flask.flash("Invalid username or password.")
return flask.render_template("login.html")
@blueprint.route("/logout", methods=["POST"])
def logout():
if webhelpers.checkForSession():
if "uid" in flask.session:
sessionId = flask.session["uid"]
auth.invalidateSession(sessionId)
flask.session.clear()
return flask.redirect("/login")
| import flask
import os.path
import datetime
from ... import auth
from ... import blog
from ... import configmanager
from ... import settings
from .. import webhelpers
TEMPLATE_PATH = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../../templates"))
blueprint = flask.Blueprint("user", __name__, template_folder = TEMPLATE_PATH)
@blueprint.route("/")
def showPosts():
posts = blog.getPosts()
templatePath = os.path.join(TEMPLATE_PATH, "posts.html")
return webhelpers.renderPosts(templatePath,
posts = posts, **webhelpers.getPostsParameters())
@blueprint.route("/post/<int:postId>")
def showPost(postId):
post = blog.getPostById(postId)
if post == None:
flask.abort(404)
else:
templatePath = os.path.join(TEMPLATE_PATH, "posts.html")
return webhelpers.renderPosts(templatePath,
posts = [post], **webhelpers.getPostsParameters())
@blueprint.route("/tag/<tag>")
def showPostsWithTag(tag):
posts = blog.getPostsWithTag(tag)
templatePath = os.path.join(TEMPLATE_PATH, "posts.html")
return webhelpers.renderPosts(templatePath,
posts = posts, **webhelpers.getPostsParameters())
@blueprint.route("/login", methods=["GET", "POST"])
def login():
if flask.request.method == "GET":
if webhelpers.checkForSession():
return flask.redirect("/manage")
else:
return flask.render_template("login.html")
elif flask.request.method == "POST":
if ("username" not in flask.request.form
or "password" not in flask.request.form):
flask.flash("A username and password must be provided.")
return flask.render_template("login.html")
elif auth.validateUser(flask.request.form["username"],
flask.request.form["password"]):
donePage = webhelpers.canRecoverFromRedirect()
donePage = donePage if donePage is not None else "/manage"
sessionId, expires = auth.createSession(flask.request.form["username"])
flask.session["uid"] = sessionId
flask.session.permanent = True
flask.session.permanent_session_lifetime = datetime.datetime.now() - expires
return flask.redirect(donePage)
else:
flask.flash("Invalid username or password.")
return flask.render_template("login.html")
@blueprint.route("/logout", methods=["POST"])
def logout():
if webhelpers.checkForSession():
if "uid" in flask.session:
sessionId = flask.session["uid"]
auth.invalidateSession(sessionId)
flask.session.clear()
return flask.redirect("/login")
| mit | Python |
7f2b41a706cf110e6c9cd888917e6762fad69dbc | Replace session with flask.session in user.py | ollien/Timpani,ollien/Timpani,ollien/Timpani | timpani/webserver/controllers/user.py | timpani/webserver/controllers/user.py | import flask
import os.path
import datetime
from ... import auth
from ... import blog
from ... import configmanager
from .. import webhelpers
FILE_LOCATION = os.path.abspath(os.path.dirname(__file__))
CONFIG_PATH = os.path.abspath(os.path.join(FILE_LOCATION, "../../../configs/"))
TEMPLATE_PATH = os.path.abspath(os.path.join(FILE_LOCATION, "../../../templates"))
configs = configmanager.ConfigManager(configPath = CONFIG_PATH)
templateConfig = configs["templates"]
blueprint = flask.Blueprint("user", __name__, template_folder = TEMPLATE_PATH)
@blueprint.route("/")
def showPosts():
posts = blog.getPosts()
return flask.render_template("posts.html", posts = posts)
@blueprint.route("/post/<int:postId>")
def showPost(postId):
post = blog.getPostById(postId)
if post == None:
flask.abort(404)
else:
return flask.render_template("posts.html", posts = [post])
@blueprint.route("/tag/<tag>")
def showPostsWithTag(tag):
posts = blog.getPostsWithTag(tag)
return flask.render_template("posts.html", posts = posts)
@blueprint.route("/login", methods=["GET", "POST"])
def login():
if flask.request.method == "GET":
if webhelpers.checkForSession():
return flask.redirect("/manage")
else:
return flask.render_template("login.html")
elif flask.request.method == "POST":
if "username" not in flask.request.form or "password" not in flask.request.form:
return flask.render_template("login.html", error = "A username and password must be provided.")
elif auth.validateUser(flask.request.form["username"], flask.request.form["password"]):
donePage = webhelpers.canRecoverFromRedirect()
donePage = donePage if donePage is not None else "/manage"
sessionId, expires = auth.createSession(flask.request.form["username"])
flask.session["uid"] = sessionId
flask.session.permanent = True
flask.session.permanent_session_lifetime = datetime.datetime.now() - expires
return flask.redirect(donePage)
else:
return flask.render_template("login.html", error = "Invalid username or password.")
@blueprint.route("/logout", methods=["POST"])
def logout():
if webhelpers.checkForSession():
if "uid" in flask.session:
sessionId = flask.session["uid"]
auth.invalidateSession(sessionId)
flask.session.clear()
return flask.redirect("/login")
| import flask
import os.path
import datetime
from ... import auth
from ... import blog
from ... import configmanager
from .. import webhelpers
FILE_LOCATION = os.path.abspath(os.path.dirname(__file__))
CONFIG_PATH = os.path.abspath(os.path.join(FILE_LOCATION, "../../../configs/"))
TEMPLATE_PATH = os.path.abspath(os.path.join(FILE_LOCATION, "../../../templates"))
configs = configmanager.ConfigManager(configPath = CONFIG_PATH)
templateConfig = configs["templates"]
blueprint = flask.Blueprint("user", __name__, template_folder = TEMPLATE_PATH)
@blueprint.route("/")
def showPosts():
posts = blog.getPosts()
return flask.render_template("posts.html", posts = posts)
@blueprint.route("/post/<int:postId>")
def showPost(postId):
post = blog.getPostById(postId)
if post == None:
flask.abort(404)
else:
return flask.render_template("posts.html", posts = [post])
@blueprint.route("/tag/<tag>")
def showPostsWithTag(tag):
posts = blog.getPostsWithTag(tag)
return flask.render_template("posts.html", posts = posts)
@blueprint.route("/login", methods=["GET", "POST"])
def login():
if flask.request.method == "GET":
if webhelpers.checkForSession():
return flask.redirect("/manage")
else:
return flask.render_template("login.html")
elif flask.request.method == "POST":
if "username" not in flask.request.form or "password" not in flask.request.form:
return flask.render_template("login.html", error = "A username and password must be provided.")
elif auth.validateUser(flask.request.form["username"], flask.request.form["password"]):
donePage = webhelpers.canRecoverFromRedirect()
donePage = donePage if donePage is not None else "/manage"
sessionId, expires = auth.createSession(flask.request.form["username"])
flask.session["uid"] = sessionId
flask.session.permanent = True
flask.session.permanent_session_lifetime = datetime.datetime.now() - expires
return flask.redirect(donePage)
else:
return flask.render_template("login.html", error = "Invalid username or password.")
@blueprint.route("/logout", methods=["POST"])
def logout():
if webhelpers.checkForSession():
if "uid" in flask.session:
sessionId = flask.session["uid"]
auth.invalidateSession(sessionId)
session.clear()
return flask.redirect("/login")
| mit | Python |
84fe0f1bde39f007da08c93298f53ae1ad623fd1 | Add file that should be deployed to REQUIRED_PATHS, for JPF | dbeyer/benchexec,dbeyer/benchexec,sosy-lab/benchexec,ultimate-pa/benchexec,sosy-lab/benchexec,sosy-lab/benchexec,dbeyer/benchexec,sosy-lab/benchexec,ultimate-pa/benchexec,ultimate-pa/benchexec,ultimate-pa/benchexec,ultimate-pa/benchexec,ultimate-pa/benchexec,sosy-lab/benchexec,dbeyer/benchexec,sosy-lab/benchexec | benchexec/tools/jpf.py | benchexec/tools/jpf.py | """
BenchExec is a framework for reliable benchmarking.
This file is part of BenchExec.
Copyright (C) 2007-2018 Dirk Beyer
All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import benchexec.util as util
import benchexec.tools.template
import benchexec.result as result
class Tool(benchexec.tools.template.BaseTool):
"""
Tool info for JPF (plain jpf-core)
(https://github.com/javapathfinder/jpf-core/).
"""
REQUIRED_PATHS = [
"../bin",
"../build",
"../jpf.properties"
]
def executable(self):
return util.find_executable('bin/jpf-core-sv-comp')
def version(self, executable):
jpf = os.path.join(os.path.dirname(executable), "jpf")
output = self._version_from_tool(jpf, arg="-version")
first_line = output.splitlines()[0]
return first_line.split(":")[-1].strip()
def name(self):
return 'JPF'
def cmdline(self, executable, options, tasks, propertyfile, rlimits):
options = options + ['--propertyfile', propertyfile]
return [executable] + options + tasks
def determine_result(self, returncode, returnsignal, output, isTimeout):
# parse output
status = result.RESULT_UNKNOWN
for line in output:
if 'UNSAFE' in line:
status = result.RESULT_FALSE_PROP
elif 'SAFE' in line:
status = result.RESULT_TRUE_PROP
return status
| """
BenchExec is a framework for reliable benchmarking.
This file is part of BenchExec.
Copyright (C) 2007-2018 Dirk Beyer
All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import benchexec.util as util
import benchexec.tools.template
import benchexec.result as result
class Tool(benchexec.tools.template.BaseTool):
"""
Tool info for JPF (plain jpf-core)
(https://github.com/javapathfinder/jpf-core/).
"""
REQUIRED_PATHS = [
"../bin",
"../build"
]
def executable(self):
return util.find_executable('bin/jpf-core-sv-comp')
def version(self, executable):
jpf = os.path.join(os.path.dirname(executable), "jpf")
output = self._version_from_tool(jpf, arg="-version")
first_line = output.splitlines()[0]
return first_line.split(":")[-1].strip()
def name(self):
return 'JPF'
def cmdline(self, executable, options, tasks, propertyfile, rlimits):
options = options + ['--propertyfile', propertyfile]
return [executable] + options + tasks
def determine_result(self, returncode, returnsignal, output, isTimeout):
# parse output
status = result.RESULT_UNKNOWN
for line in output:
if 'UNSAFE' in line:
status = result.RESULT_FALSE_PROP
elif 'SAFE' in line:
status = result.RESULT_TRUE_PROP
return status
| apache-2.0 | Python |
199761c550e80f8e8c2882a09694f2bde08c8d75 | Remove get_repo(). | s3rvac/git-branch-viewer,s3rvac/git-branch-viewer | viewer/web/views.py | viewer/web/views.py | """
viewer.web.views
~~~~~~~~~~~~~~~~
Views for the web.
:copyright: © 2014 by Petr Zemek <s3rvac@gmail.com> and contributors
:license: BSD, see LICENSE for more details
"""
from flask import render_template
from flask import g
from viewer import git
from . import app
@app.before_request
def before_request():
g.repo = git.Repo(app.config['GIT_REPO_PATH'])
@app.route('/')
def index():
branches = g.repo.get_branches_on_remote(app.config['GIT_REMOTE'])
return render_template('index.html', repo_name=g.repo.name, branches=branches)
| """
viewer.web.views
~~~~~~~~~~~~~~~~
Views for the web.
:copyright: © 2014 by Petr Zemek <s3rvac@gmail.com> and contributors
:license: BSD, see LICENSE for more details
"""
from flask import render_template
from flask import g
from viewer import git
from . import app
def get_repo():
return git.Repo(app.config['GIT_REPO_PATH'])
@app.before_request
def before_request():
g.repo = get_repo()
@app.route('/')
def index():
branches = g.repo.get_branches_on_remote(app.config['GIT_REMOTE'])
return render_template('index.html', repo_name=g.repo.name, branches=branches)
| bsd-3-clause | Python |
805ba9d93e04ed3991d8d00132e439df02eaff79 | Add import_from_dir() function | bennoleslie/pyutil | util.py | util.py | """Snipppets of potentially reusable code that don't deserve their
own library."""
import bisect
class Location:
def __init__(self, name, line, col):
self.name = name
self.line = line
self.col = col
def __str__(self):
return "{}:{}.{}".format(self.name, self.line, self.col)
class Locator:
"""Locator provides a way to convert an absolute offset in a
string into a Location object.
"""
def __init__(self, data, name='<string>'):
self.name = name
self.line_offsets = [0]
for line_len in map(len, data.splitlines(True)):
self.line_offsets.append(self.line_offsets[-1] + line_len)
def locate(self, offset):
"""Return a Location() object for the given offset."""
line = bisect.bisect_right(self.line_offsets, offset)
col = offset - self.line_offsets[line - 1]
return Location(self.name, line, col)
def import_from_dir(module_name, dir_name):
"""Import a module form a specific directory.
Sometimes you might want to load a package from a specific
directory. For example, you may be loading a plugin of some
description.
This function ensures that only modules from a specific
directory are loaded to avoid any chance of loading a
module of the same name from somewhere else.
After loading the module is removed from sys.modules to
avoid other namespace clashes.
"""
saved_sys_path = sys.path
saved_module = None
if module_name in sys.modules:
saved_module = sys.modules[module_name]
try:
module = __import__(module_name)
return module
finally:
sys.path = saved_sys_path
if saved_module:
sys.modules[module_name] = saved_module
else:
del sys.modules[module_name]
| """Snipppets of potentially reusable code that don't deserve their
own library."""
import bisect
class Location:
def __init__(self, name, line, col):
self.name = name
self.line = line
self.col = col
def __str__(self):
return "{}:{}.{}".format(self.name, self.line, self.col)
class Locator:
"""Locator provides a way to convert an absolute offset in a
string into a Location object.
"""
def __init__(self, data, name='<string>'):
self.name = name
self.line_offsets = [0]
for line_len in map(len, data.splitlines(True)):
self.line_offsets.append(self.line_offsets[-1] + line_len)
def locate(self, offset):
"""Return a Location() object for the given offset."""
line = bisect.bisect_right(self.line_offsets, offset)
col = offset - self.line_offsets[line - 1]
return Location(self.name, line, col)
| mit | Python |
88f57982d8f93bb9e4d6915081dabaa143708419 | Fix method of getting server version, add docstring | igboyes/virtool,virtool/virtool,virtool/virtool,igboyes/virtool | virtool/db/mongo.py | virtool/db/mongo.py | import logging
import sys
from typing import Any, Awaitable, Callable, Dict, List
import pymongo.errors
import semver
from motor.motor_asyncio import AsyncIOMotorClient
import virtool.db.core
import virtool.db.utils
MINIMUM_MONGO_VERSION = "3.6.0"
logger = logging.getLogger("mongo")
async def connect(
config: Dict[str, Any],
enqueue_change: Callable[[str, str, List[str]], Awaitable[None]]
):
"""
Connect to a MongoDB server and return an application database object.
:param config: the application's configuration dictionary
:param enqueue_change: a function that can to report change to the database
"""
db_client = AsyncIOMotorClient(
config["db_connection_string"],
serverSelectionTimeoutMS=6000
)
try:
await db_client.list_database_names()
except pymongo.errors.ServerSelectionTimeoutError:
logger.critical("Could not connect to MongoDB server")
sys.exit(1)
await check_mongo_version(db_client)
db = db_client[config["db_name"]]
return virtool.db.core.DB(
db,
enqueue_change
)
async def check_mongo_version(db: AsyncIOMotorClient):
"""
Check the MongoDB version. Log a critical error and exit if it is too old.
:param db: the application database object
"""
server_version = await get_server_version(db)
if semver.compare(server_version, MINIMUM_MONGO_VERSION) == -1:
logger.critical(
f"Virtool requires MongoDB {MINIMUM_MONGO_VERSION}. Found {server_version}."
)
sys.exit(1)
logger.info(f"Found MongoDB {server_version}")
async def get_server_version(db: AsyncIOMotorClient) -> str:
"""
Gets a server version string from the running MongoDB client.
:param db: the application database object
:return: MongoDB server version in string format
"""
return (await db.motor_client.client.server_info())["version"]
| import logging
import sys
from typing import Any, Awaitable, Callable, Dict, List
import pymongo.errors
import semver
from motor.motor_asyncio import AsyncIOMotorClient
import virtool.db.core
import virtool.db.utils
MINIMUM_MONGO_VERSION = "3.6.0"
logger = logging.getLogger("mongo")
async def connect(
config: Dict[str, Any],
enqueue_change: Callable[[str, str, List[str]], Awaitable[None]]
):
"""
Connect to a MongoDB server and return an application database object.
:param config: the application's configuration dictionary
:param enqueue_change: a function that can to report change to the database
"""
db_client = AsyncIOMotorClient(
config["db_connection_string"],
serverSelectionTimeoutMS=6000
)
try:
await db_client.list_database_names()
except pymongo.errors.ServerSelectionTimeoutError:
logger.critical("Could not connect to MongoDB server")
sys.exit(1)
await check_mongo_version(db_client)
db = db_client[config["db_name"]]
return virtool.db.core.DB(
db,
enqueue_change
)
async def check_mongo_version(db: AsyncIOMotorClient):
"""
Check the MongoDB version. Log a critical error and exit if it is too old.
:param db: the application database object
"""
server_version = await get_server_version(db)
if semver.compare(server_version, MINIMUM_MONGO_VERSION) == -1:
logger.critical(
f"Virtool requires MongoDB {MINIMUM_MONGO_VERSION}. Found {server_version}."
)
sys.exit(1)
logger.info(f"Found MongoDB {server_version}")
async def get_server_version(db: AsyncIOMotorClient) -> str:
return (await db.server_info())["version"]
| mit | Python |
566e7b17d220eec143487f6d9ea7db647c557b10 | Update variantPipeline.py | lauringlab/variant_pipeline,lauringlab/variant_pipeline,lauringlab/variant_pipeline,lauringlab/variant_pipeline | bin/variantPipeline.py | bin/variantPipeline.py | import os
import os.path
import argparse
import shutil
import subprocess as s
import sys
parser = argparse.ArgumentParser(description='This is a wrapper to set up and run the bpipe command')
parser.add_argument('-i',action='store',dest='input_dir',help='Directory containing the input fastqs')
parser.add_argument('-o',action='store',dest='output_dir',help='The final directory that will hold the output. If it does\'t exsit it will be made')
parser.add_argument('-r',action='store',dest='ref',help='The name of the reference files used for bowtie alignment')
parser.add_argument('-p',action='store',dest='control',help='The sample name of the plasmid control used for varinat calling')
parser.add_argument('-t',action='store_true',dest='test',default=False,help='Boolean switch to run program in test mode. Everything will be set up but bpipe will run in test mode')
args=parser.parse_args()
input_dir=os.path.abspath(args.input_dir)
output_dir=os.path.abspath(args.output_dir)
ref=os.path.abspath(args.ref)
control=args.control
bin_dir=os.path.dirname(os.path.realpath(__file__))
script_dir=os.path.abspath(bin_dir+'/..'+'/scripts/')
lib_dir=os.path.abspath(bin_dir+'/..'+'/lib/')
bpipe_command=lib_dir+'/bpipe-0.9.8.7/bin/bpipe'
test=args.test
print "Processing fastqs from " + input_dir
print "Results will be saved to " + output_dir
print "Using " + ref +" for a reference and \n" + control + " as the control sample"
if not os.path.exists(output_dir):
os.makedirs(output_dir)
os.chdir(output_dir)
shutil.copy(script_dir+'/variantPipeline.bpipe.stages.groovy',output_dir)
shutil.copy(script_dir+'/variantPipeline.bpipe.groovy',output_dir)
# add variables to config reference to config file
with open(output_dir+'/variantPipeline.bpipe.config.groovy','w') as config:
config.write('REFERENCE='+'\"'+ ref+ '\"'+'\n')
config.write('REFERENCE_FA='+ '\"'+ref+ '.fa' '\"'+'\n')
config.write('SCRIPTS='+ '\"'+script_dir+ '\"'+'\n')
config.write('LIBRARY_LOCATION='+ '\"'+lib_dir+'\"'+ '\n')
config.write('CONTROL='+ '\"'+control+ '\"'+'\n')
#throttled to 8 processors to be a good neighbor.
#note that running unthrottled can result in errors when bpipe overallocates threads/memory
if test==False:
command= bpipe_command + " run -n 8 " + output_dir + "/variantPipeline.bpipe.groovy " + input_dir + "/*.fastq"
else:
command=bpipe_command + " test -n 8 " + output_dir + "/variantPipeline.bpipe.groovy " + input_dir +"/*.fastq"
print "submitting command: \n"+command
#p = s.Popen("time", stdin=s.PIPE, stdout=s.PIPE, stderr=s.STDOUT)
#p.stdin.write(command)
#out, err = p.communicate()
s.call(command,shell=True)#, stdout=subprocess.PIPE)
#output = process.communicate()[0]
sys.exit(0)
| import os
import os.path
import argparse
import shutil
import subprocess as s
import sys
parser = argparse.ArgumentParser(description='This is a wrapper to set up and run the bpipe command')
parser.add_argument('-i',action='store',dest='input_dir',help='Directory containing the input fastqs')
parser.add_argument('-o',action='store',dest='output_dir',help='The final directory that will hold the output. If it does\'t exsit it will be made')
parser.add_argument('-r',action='store',dest='ref',help='The name of the reference files used for bowtie alignment')
parser.add_argument('-p',action='store',dest='control',help='The sample name of the plasmid control used for varinat calling')
parser.add_argument('-t',action='store_true',dest='test',default=False,help='Boolean switch to run program in test mode. Everything will be set up but bpipe will run in test mode')
args=parser.parse_args()
input_dir=os.path.abspath(args.input_dir)
output_dir=os.path.abspath(args.output_dir)
ref=os.path.abspath(args.ref)
control=args.control
bin_dir=os.path.dirname(os.path.realpath(__file__))
script_dir=os.path.abspath(bin_dir+'/..'+'/scripts/')
lib_dir=os.path.abspath(bin_dir+'/..'+'/lib/')
bpipe_command=lib_dir+'/bpipe-0.9.8.7/bin/bpipe'
test=args.test
print "Processing fastqs from " + input_dir
print "Results will be saved to " + output_dir
print "Using " + ref +" for a reference and \n" + control + " as the control sample"
if not os.path.exists(output_dir):
os.makedirs(output_dir)
os.chdir(output_dir)
shutil.copy(script_dir+'/variantPipeline.bpipe.stages.groovy',output_dir)
shutil.copy(script_dir+'/variantPipeline.bpipe.groovy',output_dir)
# add variables to config reference to config file
with open(output_dir+'/variantPipeline.bpipe.config.groovy','w') as config:
config.write('REFERENCE='+'\"'+ ref+ '\"'+'\n')
config.write('REFERENCE_FA='+ '\"'+ref+ '.fa' '\"'+'\n')
config.write('SCRIPTS='+ '\"'+script_dir+ '\"'+'\n')
config.write('LIBRARY_LOCATION='+ '\"'+lib_dir+'\"'+ '\n')
config.write('CONTROL='+ '\"'+control+ '\"'+'\n')
#throttled to 8 processors to be a good neighbor.
#note that running unthrottled can result in errors when bpipe overallocates threads/memory
if test==False:
command= bpipe_command + " run -n 8 -r " + output_dir + "/variantPipeline.bpipe.groovy " + input_dir + "/*.fastq"
else:
command=bpipe_command + " test -n 8 " + output_dir + "/variantPipeline.bpipe.groovy " + input_dir +"/*.fastq"
print "submitting command: \n"+command
#p = s.Popen("time", stdin=s.PIPE, stdout=s.PIPE, stderr=s.STDOUT)
#p.stdin.write(command)
#out, err = p.communicate()
s.call(command,shell=True)#, stdout=subprocess.PIPE)
#output = process.communicate()[0]
sys.exit(0)
| apache-2.0 | Python |
85eb6a24aa65c6967e30a9c3f9cbbd84e124140c | Remove comma from docstring | gratipay/gratipay.com,gratipay/gratipay.com,gratipay/gratipay.com,gratipay/gratipay.com | bin/verify-identity.py | bin/verify-identity.py | #!/usr/bin/env python
"""verify-identity.py <participant_id> <country_code>
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import sys
from gratipay import wireup
from gratipay.models.participant import Participant
from gratipay.models.country import Country
wireup.db(wireup.env())
participant = Participant.from_id(int(sys.argv[1]))
country = Country.from_code(sys.argv[2])
participant.set_identity_verification(country.id, True)
| #!/usr/bin/env python
"""verify-identity.py <participant_id>, <country_code>
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import sys
from gratipay import wireup
from gratipay.models.participant import Participant
from gratipay.models.country import Country
wireup.db(wireup.env())
participant = Participant.from_id(int(sys.argv[1]))
country = Country.from_code(sys.argv[2])
participant.set_identity_verification(country.id, True)
| mit | Python |
db6ee1cdcc31c8c1c2899d100cb0d1a9baf0e71e | Remove old initializers | admiyo/keystone,kwss/keystone,openstack/keystone,maestro-hybrid-cloud/keystone,cbrucks/keystone_ldap,pvo/keystone,sileht/deb-openstack-keystone,citrix-openstack-build/keystone,ajayaa/keystone,nuxeh/keystone,jonnary/keystone,roopali8/keystone,cbrucks/Federated_Keystone,jamielennox/keystone,himanshu-setia/keystone,roopali8/keystone,MaheshIBM/keystone,savi-dev/keystone,vivekdhayaal/keystone,promptworks/keystone,reeshupatel/demo,jumpstarter-io/keystone,rushiagr/keystone,ntt-sic/keystone,dims/keystone,klmitch/keystone,klmitch/keystone,UTSA-ICS/keystone-kerberos,rajalokan/keystone,cloudbau/keystone,cbrucks/Federated_Keystone,dstanek/keystone,savi-dev/keystone,cloudbau/keystone,ilay09/keystone,rodrigods/keystone,ntt-pf-lab/backup_keystone,ntt-sic/keystone,UTSA-ICS/keystone-kerberos,reeshupatel/demo,kwss/keystone,dims/keystone,dsiddharth/access-keys,pvo/keystone,nuxeh/keystone,rickerc/keystone_audit,ging/keystone,cbrucks/keystone_ldap,derekchiang/keystone,rushiagr/keystone,idjaw/keystone,citrix-openstack-build/keystone,takeshineshiro/keystone,sileht/deb-openstack-keystone,blueboxgroup/keystone,ntt-sic/keystone,ajayaa/keystone,rickerc/keystone_audit,vivekdhayaal/keystone,JioCloud/keystone,rushiagr/keystone,idjaw/keystone,openstack/keystone,derekchiang/keystone,JioCloud/keystone,blueboxgroup/keystone,ilay09/keystone,townbull/keystone-dtrust,jumpstarter-io/keystone,savi-dev/keystone,cernops/keystone,sileht/deb-openstack-keystone,derekchiang/keystone,jamielennox/keystone,dsiddharth/access-keys,maestro-hybrid-cloud/keystone,townbull/keystone-dtrust,cbrucks/Federated_Keystone,rajalokan/keystone,nuxeh/keystone,vivekdhayaal/keystone,himanshu-setia/keystone,mahak/keystone,dstanek/keystone,reeshupatel/demo,jumpstarter-io/keystone,promptworks/keystone,pvo/keystone,dstanek/keystone,townbull/keystone-dtrust,ntt-pf-lab/backup_keystone,rajalokan/keystone,mahak/keystone,openstack/keystone,admiyo/keystone,cernops/keystone,ging/keystone,ilay09/keystone,admiyo/keystone,citrix-openstack-build/keystone,mahak/keystone,kwss/keystone,cloudbau/keystone,promptworks/keystone,ntt-pf-lab/backup_keystone,cbrucks/keystone_ldap,MaheshIBM/keystone,jonnary/keystone,rodrigods/keystone,dsiddharth/access-keys,takeshineshiro/keystone,rickerc/keystone_audit | keystone/__init__.py | keystone/__init__.py | # Copyright (C) 2011 OpenStack LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Main module for Keystone service. This installs the gettext function
for all sub modules and packages.'''
import gettext
__version__ = '0.9'
# This installs the _(...) function as a built-in so all other modules
# don't need to.
gettext.install('keystone')
def version():
return __version__
| # Copyright (C) 2011 OpenStack LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Main module for Keystone service. This installs the gettext function
for all sub modules and packages.'''
import gettext
__version__ = '0.9'
# This installs the _(...) function as a built-in so all other modules
# don't need to.
gettext.install('keystone')
def version():
return __version__
'''
TODO(Ziad): Commenting out so we don't load this always - remove eventually
#TOKEN AUTH
from auth_protocols.auth_token \
import filter_factory as tokenauth_factory
#BASIC AUTH
from auth_protocols.auth_basic \
import filter_factory as basicauth_factory
#OPENID AUTH
from auth_protocols.auth_openid \
import filter_factory as openidauth_factory
#Remote Auth handler
from middleware.remoteauth \
import filter_factory as remoteauth_factory
'''
| apache-2.0 | Python |
7b9f84d365ef0eeb43238fc6779c6d0adfe29ac7 | clean up imports | pteichman/kibot-modules | kibot/modules/woo.py | kibot/modules/woo.py | import kibot.BaseModule
import random
import re
from kibot.irclib import nm_to_n
class woo(kibot.BaseModule.BaseModule):
def __init__(self, bot):
random.seed()
kibot.BaseModule.BaseModule.__init__(self, bot)
def _on_pubmsg(self, conn, event):
message = event.args[0]
message = re.sub("<\S+>", "", message)
match = re.match("\s*(\S+)\s*[,:]\s*(.*?)\s*$", message)
if match:
to = match.group(1)
text = match.group(2)
else:
to = None
text = message
speaker = nm_to_n(event.source)
m = re.search("\\bw(oo+)s?\\b([!.?,])?", text.lower())
# forecast is 70% chance of woo
chance = 0.7
if m and random.random() < chance:
punc = {"!" : "!",
"." : ".",
"?" : "!"}
woo = "w%s%s" % (m.group(1), punc.get(m.group(2), ""))
if random.random() < 0.1:
# be extra vocal sometimes
woo = "w%s%s" % (m.group(1)*2, punc.get(m.group(2), ""))
conn.privmsg(event.target, woo)
| import kibot.BaseModule
import random
from kibot.irclib import nm_to_n
class woo(kibot.BaseModule.BaseModule):
def __init__(self, bot):
random.seed()
kibot.BaseModule.BaseModule.__init__(self, bot)
def _on_pubmsg(self, conn, event):
import string, re
message = event.args[0]
message = re.sub("<\S+>", "", message)
match = re.match("\s*(\S+)\s*[,:]\s*(.*?)\s*$", message)
if match:
to = match.group(1)
text = match.group(2)
else:
to = None
text = message
speaker = nm_to_n(event.source)
m = re.search("\\bw(oo+)s?\\b([!.?,])?", text.lower())
# forecast is 70% chance of woo
chance = 0.7
if m and random.random() < chance:
punc = {"!" : "!",
"." : ".",
"?" : "!"}
woo = "w%s%s" % (m.group(1), punc.get(m.group(2), ""))
if random.random() < 0.1:
# be extra vocal sometimes
woo = "w%s%s" % (m.group(1)*2, punc.get(m.group(2), ""))
conn.privmsg(event.target, woo)
| mit | Python |
3df89cb9705b3475132940e50d15e422ec3ea7ab | fix itertools import for mapproxy-util scales --repeat | drnextgis/mapproxy,camptocamp/mapproxy,mapproxy/mapproxy,olt/mapproxy,vrsource/mapproxy,mapproxy/mapproxy,vrsource/mapproxy,camptocamp/mapproxy,olt/mapproxy,drnextgis/mapproxy | mapproxy/compat/itertools.py | mapproxy/compat/itertools.py | from __future__ import absolute_import
import sys
PY2 = sys.version_info[0] == 2
PY3 = not PY2
if PY2:
from itertools import (
izip,
izip_longest,
imap,
islice,
chain,
groupby,
cycle,
)
else:
izip = zip
imap = map
from itertools import (
zip_longest as izip_longest,
islice,
chain,
groupby,
cycle,
)
| from __future__ import absolute_import
import sys
PY2 = sys.version_info[0] == 2
PY3 = not PY2
if PY2:
from itertools import (
izip,
izip_longest,
imap,
islice,
chain,
groupby,
)
else:
izip = zip
imap = map
from itertools import (
zip_longest as izip_longest,
islice,
chain,
groupby,
)
| apache-2.0 | Python |
551b590115da4d6eac356b7f94ec6f2197da3ce0 | bump version | benzid-wael/djangorestframework-utils | django_rest_utils/__init__.py | django_rest_utils/__init__.py | __version__ = '0.0.1'
| __version__ = '0.0.0'
| isc | Python |
11e64afa2192a8e71cba9357f332f4e8edd66bba | bump to 2.1.2 | BlokeOne/premailer-1,lavr/premailer,kengruven/premailer,BlokeOne/premailer-1,peterbe/premailer,kengruven/premailer,graingert/premailer,industrydive/premailer,graingert/premailer,industrydive/premailer,ionelmc/premailer,ionelmc/premailer,peterbe/premailer,peterbe/premailer,lavr/premailer | premailer/__init__.py | premailer/__init__.py | from premailer import Premailer, transform
__version__ = '2.1.2'
| from premailer import Premailer, transform
__version__ = '2.1.1'
| bsd-3-clause | Python |
2ef86fa9ecc0c6f8ec8480a545123e66fdb90af5 | Improve unicode parsing. From https://github.com/bmander/gtfs/commit/e0390740ef951bebc2e1a5448abc68e51891833d | gpichot/pygtfs,jarondl/pygtfs,howeik/pygtfs | gtfs/feed.py | gtfs/feed.py | from codecs import iterdecode
from zipfile import ZipFile
import os
import csv
class CSV(object):
"""A CSV file."""
def __init__(self, header, rows):
self.header = header
self.rows = rows
def __repr__(self):
return '<CSV %s>' % self.header
def __iter__(self):
return self
def next(self):
return dict(zip(self.header, self.rows.next()))
class Feed(object):
"""A collection of CSV files with headers, either zipped into an archive
or loose in a folder."""
def __init__(self, filename):
self.filename = filename
self.zf = None
if not os.path.isdir(filename):
self.zf = ZipFile(filename)
def __repr__(self):
return '<Feed %s>' % self.filename
def unicode_csv_reader(self, file_handle, encoding='utf-8'):
reader = csv.reader([x.encode(encoding) for x in iterdecode(file_handle, encoding)])
for row in reader:
yield [unicode(x, encoding) for x in row]
return
def reader(self, filename, encoding='utf-8'):
if self.zf:
try:
file_handle = self.zf.read(filename).split('\n')
except IOError:
raise IOError('%s is not present in feed' % filename)
else:
file_handle = open(os.path.join(self.filename, filename))
return self.unicode_csv_reader(file_handle, encoding)
def read_table(self, filename):
rows = self.reader(filename)
return CSV(header=rows.next(), rows=rows)
| from codecs import iterdecode
from zipfile import ZipFile
import os
import csv
class CSV(object):
"""A CSV file."""
def __init__(self, header, rows):
self.header = header
self.rows = rows
def __repr__(self):
return '<CSV %s>' % self.header
def __iter__(self):
return self
def next(self):
return dict(zip(self.header, self.rows.next()))
class Feed(object):
"""A collection of CSV files with headers, either zipped into an archive
or loose in a folder."""
def __init__(self, filename):
self.filename = filename
self.zf = None
if not os.path.isdir(filename):
self.zf = ZipFile(filename)
def __repr__(self):
return '<Feed %s>' % self.filename
def reader(self, filename, encoding='utf-8'):
if self.zf:
try:
file_handle = self.zf.read(filename).split('\n')
except IOError:
raise IOError('%s is not present in feed' % filename)
else:
file_handle = open(os.path.join(self.filename, filename))
return csv.reader(iterdecode(file_handle, encoding))
def read_table(self, filename):
rows = self.reader(filename)
return CSV(header=rows.next(), rows=rows)
| mit | Python |
59b09f9251c5a41f5362e2d6e1438f2c33274f89 | Bump version | marteinn/The-Big-Username-Blacklist-Python | the_big_username_blacklist/__init__.py | the_big_username_blacklist/__init__.py | # -*- coding: utf-8 -*-
"""
the-big-username-blacklist
--------------------------
A opinionated username blacklist
"""
__title__ = "the_big_username_blacklist"
__version__ = "1.5.2"
__build__ = 152
__author__ = "Martin Sandström"
__license__ = "MIT"
__copyright__ = "Copyright 2015-2018 Martin Sandström"
from .validator import validate, get_blacklist # NOQA
| # -*- coding: utf-8 -*-
"""
the-big-username-blacklist
--------------------------
A opinionated username blacklist
"""
__title__ = "the_big_username_blacklist"
__version__ = "1.5.0"
__build__ = 150
__author__ = "Martin Sandström"
__license__ = "MIT"
__copyright__ = "Copyright 2015-2017 Martin Sandström"
from .validator import validate, get_blacklist # NOQA
| mit | Python |
7a3f8adf66ce2de398ad7168a11ecaa526c26db5 | Check content of public IP response against a list of valid characters. | jaymed/python-blessclient | blessclient/user_ip.py | blessclient/user_ip.py | import contextlib
import logging
import string
import time
from urllib2 import urlopen
VALID_IP_CHARACTERS = string.hexdigits + '.:'
class UserIP(object):
def __init__(self, bless_cache, maxcachetime, ip_urls, fixed_ip=False):
self.fresh = False
self.currentIP = None
self.cache = bless_cache
self.maxcachetime = maxcachetime
self.ip_urls = ip_urls
if fixed_ip:
self.currentIP = fixed_ip
self.fresh = True
def getIP(self):
if self.fresh and self.currentIP:
return self.currentIP
lastip = self.cache.get('lastip')
lastiptime = self.cache.get('lastipchecktime')
if lastiptime and lastiptime + self.maxcachetime > time.time():
return lastip
self._refreshIP()
return self.currentIP
def _refreshIP(self):
logging.debug("Getting current public IP")
ip = None
for url in self.ip_urls:
if ip:
break
else:
ip = self._fetchIP(url)
if not ip:
raise Exception('Could not refresh public IP')
self.currentIP = ip
self.fresh = True
self.cache.set('lastip', self.currentIP)
self.cache.set('lastipchecktime', time.time())
self.cache.save()
def _fetchIP(self, url):
try:
with contextlib.closing(urlopen(url, timeout=2)) as f:
if f.getcode() == 200:
content = f.read().strip()
for c in content[:40]:
if c not in VALID_IP_CHARACTERS:
raise ValueError("Public IP response included invalid character '{}'.".format(c))
return content
except:
logging.debug('Could not refresh public IP from {}'.format(url), exc_info=True)
return None
| import contextlib
import logging
import time
from urllib2 import urlopen
class UserIP(object):
def __init__(self, bless_cache, maxcachetime, ip_urls, fixed_ip=False):
self.fresh = False
self.currentIP = None
self.cache = bless_cache
self.maxcachetime = maxcachetime
self.ip_urls = ip_urls
if fixed_ip:
self.currentIP = fixed_ip
self.fresh = True
def getIP(self):
if self.fresh and self.currentIP:
return self.currentIP
lastip = self.cache.get('lastip')
lastiptime = self.cache.get('lastipchecktime')
if lastiptime and lastiptime + self.maxcachetime > time.time():
return lastip
self._refreshIP()
return self.currentIP
def _refreshIP(self):
logging.debug("Getting current public IP")
ip = None
for url in self.ip_urls:
if ip:
break
else:
ip = self._fetchIP(url)
if not ip:
raise Exception('Could not refresh public IP')
self.currentIP = ip
self.fresh = True
self.cache.set('lastip', self.currentIP)
self.cache.set('lastipchecktime', time.time())
self.cache.save()
def _fetchIP(self, url):
try:
with contextlib.closing(urlopen(url, timeout=2)) as f:
if f.getcode() == 200:
return f.read().strip()
except:
logging.debug('Could not refresh public IP from {}'.format(url), exc_info=True)
return None
| apache-2.0 | Python |
7c816832aa57065cd37100ebd088ad49782ecfa6 | Fix names displayed in admin | jesuejunior/golingo,jesuejunior/golingo,jesuejunior/golingo | quiz/admin.py | quiz/admin.py | __author__ = 'jesuejunior'
from quiz.models import Question, Lesson, Answer, Unity, Media, Result
from django.contrib import admin
class UnityAdmin(admin.ModelAdmin):
list_display = ('id', 'number', 'name', 'description', )
list_filter = ('number', 'name', 'description', )
search_fields = ('id', 'number', 'name', 'description', )
admin.site.register(Unity, UnityAdmin)
class AnswerAdmin(admin.ModelAdmin):
list_display = ('id', 'name', 'extra')
list_filter = ('name', 'extra',)
admin.site.register(Answer, AnswerAdmin)
class LessonAdmin(admin.ModelAdmin):
list_display = ('id', 'name',)
list_filter = ('name', )
admin.site.register(Lesson, LessonAdmin)
class MediaAdmin(admin.ModelAdmin):
list_display = ('id', 'name', 'url', )
list_filter = ('name', )
admin.site.register(Media, MediaAdmin)
class QuestionAdmin(admin.ModelAdmin):
list_display = ('id', 'name', 'answer_correct', 'lesson', 'audio', 'image')
list_filter = ('name', 'lesson')
ordering = ['id']
search_fields = ('id', 'name', 'answer_correct', 'lesson', )
filter_horizontal = ('answers', )
def lookup_allowed(self, lookup, value):
return True
admin.site.register(Question, QuestionAdmin)
class ResultAdmin(admin.ModelAdmin):
list_display = ('id', 'user', 'lesson', 'correct', 'wrong', 'total')
list_filter = ('user', 'lesson', 'correct', 'wrong', 'total', )
admin.site.register(Result, ResultAdmin) | __author__ = 'jesuejunior'
from quiz.models import Question, Lesson, Answer, Unity, Media, Result
from django.contrib import admin
class UnityAdmin(admin.ModelAdmin):
list_display = ('id', 'number', 'name', 'description', )
list_filter = ('number', 'name', 'description', )
search_fields = ('id', 'number', 'name', 'description', )
admin.site.register(Unity, UnityAdmin)
class AnswerAdmin(admin.ModelAdmin):
list_display = ('id', 'name', )
list_filter = ('name',)
admin.site.register(Answer, AnswerAdmin)
class LessonAdmin(admin.ModelAdmin):
list_display = ('id', 'name',)
list_filter = ('name', )
admin.site.register(Lesson, LessonAdmin)
class MediaAdmin(admin.ModelAdmin):
list_display = ('id', 'name', 'url', )
list_filter = ('name', )
admin.site.register(Media, MediaAdmin)
class QuestionAdmin(admin.ModelAdmin):
list_display = ('id', 'name', 'answer_correct', 'lesson', 'audio', 'image')
list_filter = ('name', 'lesson')
ordering = ['id']
search_fields = ('id', 'name', 'answer_correct', 'lesson', )
filter_horizontal = ('answers', )
def lookup_allowed(self, lookup, value):
return True
admin.site.register(Question, QuestionAdmin)
class ResultAdmin(admin.ModelAdmin):
list_display = ('id', 'user', 'lesson', 'correct', 'wrong', 'total')
list_filter = ('user', 'lesson', 'correct', 'wrong', 'total', )
admin.site.register(Result, ResultAdmin) | bsd-3-clause | Python |
73ff56f4b8859e82b0d69a6505c982e26de27859 | Add randcolor function to uitl | joseph346/cellular | util.py | util.py | import colorsys
import random
def randcolor():
hue = random.random()
sat = random.randint(700, 1000) / 1000
val = random.randint(700, 1000) / 1000
return tuple(int(f*255) for f in colorsys.hsv_to_rgb(hue, sat, val))
def product(nums):
r = 1
for n in nums:
r *= n
return r
def choose(n, k):
if 0 <= k <= n:
ntok = 1
ktok = 1
for t in range(1, min(k, n - k) + 1):
ntok *= n
ktok *= t
n -= 1
return ntok // ktok
else:
return 0
def format_floats(floats):
fstr = ' '.join('{:10.08f}' for _ in floats)
return fstr.format(*floats)
|
def product(nums):
r = 1
for n in nums:
r *= n
return r
def choose(n, k):
if 0 <= k <= n:
ntok = 1
ktok = 1
for t in range(1, min(k, n - k) + 1):
ntok *= n
ktok *= t
n -= 1
return ntok // ktok
else:
return 0
def format_floats(floats):
fstr = ' '.join('{:10.08f}' for _ in floats)
return fstr.format(*floats)
| unlicense | Python |
e463cff89aa244e2241d6b71d81fd04f6d16a53a | Fix variable names | hkaju/Ising2D,hkaju/Ising2D,hkaju/Ising2D | util.py | util.py | import glob
import subprocess
def generate_report():
'''Generate a PDF report containing magnetization data and lattice snapshots.'''
n_lattices = len(glob.glob("data/lattice*.csv"))
#R code template
report_template = open("templates/report.template.r", 'r').read()
#R code template for a lattice file
latticeplot_template = """data{0} <- read.csv('data/lattice{0}.csv', header=T)
levelplot(spin~x*y, data{0}, main='Sweeps: {0}', colorkey=FALSE, xlab='', ylab='', at=c(-1, 0, 1))\n"""
#Generate R code for each lattice file
lattice_plots = ""
for n in range(n_lattices):
lattice_plots += latticeplot_template.format(n)
#Write R file to disk
open('report.r', 'w').write(report_template % lattice_plots)
#Run R and compile report
subprocess.call(['R', '-f report.r'])
print("Report generated!")
def write_lattice(lattice, filename):
'''Write the lattice configuration to a CSV file.'''
f = open(filename, 'w')
f.write("x,y,spin\n")
for x in range(lattice.length):
for y in range(lattice.length):
f.write("%i,%i,%i\n" % (x, y, lattice.lattice[y][x]))
f.close()
| import glob
import subprocess
def generate_report():
'''Generate a PDF report containing magnetization data and lattice snapshots.'''
n_lattices = len(glob.glob("data/lattice*.csv"))
#R code template
report_template = open("templates/report.template.r", 'r').read()
#R code template for a lattice file
latticeplot_template = """data{0} <- read.csv('data/lattice{0}.csv', header=T)
levelplot(spin~x*y, data{0}, main='Sweeps: {0}', colorkey=FALSE, xlab='', ylab='', at=c(-1, 0, 1))\n"""
#Generate R code for each lattice file
lattice_plots = ""
for n in range(n_lattices):
lattice_plots += lattemplate.format(i)
#Write R file to disk
open('report.r', 'w').write(report_template % lattice_plots)
#Run R and compile report
subprocess.call(['R', '-f report.r'])
print("Report generated!")
def write_lattice(lattice, filename):
'''Write the lattice configuration to a CSV file.'''
f = open(filename, 'w')
f.write("x,y,spin\n")
for x in range(lattice.length):
for y in range(lattice.length):
f.write("%i,%i,%i\n" % (x, y, lattice.lattice[y][x]))
f.close()
| mit | Python |
fc30af765493f8ba1be5113992ff5d8beb043554 | Update test script for new website (#25) | joyzoursky/docker-python-chromedriver,joyzoursky/docker-python-chromedriver | test_script.py | test_script.py | """
A simple selenium test example written by python
"""
import unittest
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException
class TestTemplate(unittest.TestCase):
"""Include test cases on a given url"""
def setUp(self):
"""Start web driver"""
chrome_options = webdriver.ChromeOptions()
chrome_options.add_argument('--no-sandbox')
chrome_options.add_argument('--headless')
chrome_options.add_argument('--disable-gpu')
chrome_options.add_argument("--window-size=1920,1080")
self.driver = webdriver.Chrome(options=chrome_options)
self.driver.implicitly_wait(10)
def tearDown(self):
"""Stop web driver"""
self.driver.quit()
def test_case_1(self):
"""Find and click top-left logo button"""
try:
self.driver.get('https://www.oursky.com/')
el = self.driver.find_element_by_class_name('header__logo')
el.click()
except NoSuchElementException as ex:
self.fail(ex.msg)
def test_case_2(self):
"""Find and click top-right Start your project button"""
try:
self.driver.get('https://www.oursky.com/')
el = self.driver.find_element_by_class_name("header__cta")
el.click()
except NoSuchElementException as ex:
self.fail(ex.msg)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(TestTemplate)
unittest.TextTestRunner(verbosity=2).run(suite)
| """
A simple selenium test example written by python
"""
import unittest
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException
class TestTemplate(unittest.TestCase):
"""Include test cases on a given url"""
def setUp(self):
"""Start web driver"""
chrome_options = webdriver.ChromeOptions()
chrome_options.add_argument('--no-sandbox')
chrome_options.add_argument('--headless')
chrome_options.add_argument('--disable-gpu')
self.driver = webdriver.Chrome(chrome_options=chrome_options)
self.driver.implicitly_wait(10)
def tearDown(self):
"""Stop web driver"""
self.driver.quit()
def test_case_1(self):
"""Find and click top-right button"""
try:
self.driver.get('https://www.oursky.com/')
el = self.driver.find_element_by_class_name('btn-header')
el.click()
except NoSuchElementException as ex:
self.fail(ex.msg)
def test_case_2(self):
"""Find and click Learn more button"""
try:
self.driver.get('https://www.oursky.com/')
el = self.driver.find_element_by_xpath(".//*[@id='tag-line-wrap']/span/a")
el.click()
except NoSuchElementException as ex:
self.fail(ex.msg)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(TestTemplate)
unittest.TextTestRunner(verbosity=2).run(suite)
| mit | Python |
b5d0694fe9d5e22ea82be9a78bbe4f755bec250d | Remove unneeded funfactory defaults (Jinja and LocaleURLMiddleware) | akatsoulas/remo,mozilla/remo,mozilla/remo,tsmrachel/remo,Mte90/remo,flamingspaz/remo,akatsoulas/remo,Mte90/remo,tsmrachel/remo,abdullah2891/remo,chirilo/remo,flamingspaz/remo,Mte90/remo,Mte90/remo,tsmrachel/remo,flamingspaz/remo,chirilo/remo,akatsoulas/remo,abdullah2891/remo,flamingspaz/remo,johngian/remo,johngian/remo,abdullah2891/remo,chirilo/remo,johngian/remo,mozilla/remo,akatsoulas/remo,chirilo/remo,abdullah2891/remo,johngian/remo,mozilla/remo,tsmrachel/remo | remo/settings/base.py | remo/settings/base.py | # This is your project's main settings file that can be committed to your
# repo. If you need to override a setting locally, use settings_local.py
from funfactory.settings_base import *
# Bundles is a dictionary of two dictionaries, css and js, which list css files
# and js files that can be bundled together by the minify app.
MINIFY_BUNDLES = {
'css': {
'example_css': (
'css/examples/main.css',
),
'example_mobile_css': (
'css/examples/mobile.css',
),
},
'js': {
'example_js': (
'js/examples/libs/jquery-1.4.4.min.js',
'js/examples/libs/jquery.cookie.js',
'js/examples/init.js',
),
}
}
# Defines the views served for root URLs.
ROOT_URLCONF = 'remo.urls'
INSTALLED_APPS = list(INSTALLED_APPS) + [
# Application base, containing global templates.
'remo.base',
'remo.landing',
'remo.profiles',
'django_browserid',
]
# Because Jinja2 is the default template loader, add any non-Jinja templated
# apps here:
JINGO_EXCLUDE_APPS = [
'admin',
]
# Tells the extract script what files to look for L10n in and what function
# handles the extraction. The Tower library expects this.
# # Use this if you have localizable HTML files:
# DOMAIN_METHODS['lhtml'] = [
# ('**/templates/**.lhtml',
# 'tower.management.commands.extract.extract_tower_template'),
# ]
# # Use this if you have localizable HTML files:
# DOMAIN_METHODS['javascript'] = [
# # Make sure that this won't pull in strings from external libraries you
# # may use.
# ('media/js/**.js', 'javascript'),
# ]
LOGGING = dict(loggers=dict(playdoh = {'level': logging.DEBUG}))
# Set profile module
AUTH_PROFILE_MODULE = 'profiles.UserProfile'
# Add BrowserID as authentication backend
AUTHENTICATION_BACKENDS = ('django_browserid.auth.BrowserIDBackend',
'django.contrib.auth.backends.ModelBackend',
)
# Remove jinja template engine. To be revisited
TEMPLATE_LOADERS = filter(lambda x: x != 'jingo.Loader', TEMPLATE_LOADERS)
# Remove LocaleURLMiddleware since we are not localing our website
MIDDLEWARE_CLASSES = filter(
lambda x: x!='funfactory.middleware.LocaleURLMiddleware',
MIDDLEWARE_CLASSES)
| # This is your project's main settings file that can be committed to your
# repo. If you need to override a setting locally, use settings_local.py
from funfactory.settings_base import *
# Bundles is a dictionary of two dictionaries, css and js, which list css files
# and js files that can be bundled together by the minify app.
MINIFY_BUNDLES = {
'css': {
'example_css': (
'css/examples/main.css',
),
'example_mobile_css': (
'css/examples/mobile.css',
),
},
'js': {
'example_js': (
'js/examples/libs/jquery-1.4.4.min.js',
'js/examples/libs/jquery.cookie.js',
'js/examples/init.js',
),
}
}
# Defines the views served for root URLs.
ROOT_URLCONF = 'remo.urls'
INSTALLED_APPS = list(INSTALLED_APPS) + [
# Application base, containing global templates.
'remo.base',
'remo.landing',
'remo.profiles',
'django_browserid',
]
# Because Jinja2 is the default template loader, add any non-Jinja templated
# apps here:
JINGO_EXCLUDE_APPS = [
'admin',
]
# Tells the extract script what files to look for L10n in and what function
# handles the extraction. The Tower library expects this.
# # Use this if you have localizable HTML files:
# DOMAIN_METHODS['lhtml'] = [
# ('**/templates/**.lhtml',
# 'tower.management.commands.extract.extract_tower_template'),
# ]
# # Use this if you have localizable HTML files:
# DOMAIN_METHODS['javascript'] = [
# # Make sure that this won't pull in strings from external libraries you
# # may use.
# ('media/js/**.js', 'javascript'),
# ]
LOGGING = dict(loggers=dict(playdoh = {'level': logging.DEBUG}))
# Set profile module
AUTH_PROFILE_MODULE = 'profiles.UserProfile'
# Add BrowserID as authentication backend
AUTHENTICATION_BACKENDS = ('django_browserid.auth.BrowserIDBackend',
'django.contrib.auth.backends.ModelBackend',
)
| bsd-3-clause | Python |
7801c5d7430233eb78ab8b2a91f5960bd808b2c7 | Move admin authentication into before_request handler | Encrylize/flask-blogger,Encrylize/flask-blogger,Encrylize/flask-blogger | app/admin/views.py | app/admin/views.py | from flask import Blueprint, render_template, redirect, url_for
from flask_security import current_user
admin = Blueprint('admin', __name__)
@admin.route('/')
@admin.route('/index')
def index():
return render_template('admin/index.html', title='Admin')
@admin.before_request
def require_login():
if not current_user.is_authenticated:
return redirect(url_for('security.login', next='admin'))
| from flask import Blueprint, render_template
from flask_security import login_required
admin = Blueprint('admin', __name__)
@admin.route('/')
@admin.route('/index')
@login_required
def index():
return render_template('admin/index.html', title='Admin')
| mit | Python |
e87adc4e1d2c191f7d72ba80c2fe048d9c392eeb | Test SCEs | jonfoster/pyxb1,jonfoster/pyxb2,jonfoster/pyxb2,CantemoInternal/pyxb,jonfoster/pyxb2,jonfoster/pyxb-upstream-mirror,pabigot/pyxb,jonfoster/pyxb1,jonfoster/pyxb-upstream-mirror,balanced/PyXB,pabigot/pyxb,balanced/PyXB,CantemoInternal/pyxb,balanced/PyXB,jonfoster/pyxb-upstream-mirror,CantemoInternal/pyxb | pyxb/utils/xmlre.py | pyxb/utils/xmlre.py | # Copyright 2009, Peter A. Bigot
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain a
# copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# http://www.xmlschemareference.com/examples/Ch14/regexpDemo.xsd
# http://www.xmlschemareference.com/examples/Ch14/regexpDemo.xml
import unicode
class RegularExpressionError (ValueError):
def __init__ (self, position, description):
self.position = position
ValueError.__init__(description)
def MatchCharacterClass (text, position):
if position >= len(text):
return None
c = text[position]
np = position + 1
if '.' == c:
return (unicode.WildcardEsc, np)
if '[' == c:
cg = _MatchCharGroup(text, np)
if cg is not None:
(result, np) = cg
if (np < len(text)) and (']' == text[np]):
return (result, np+1)
raise RegularExpressionError(np, "Character group missing closing ']'")
raise RegularExpressionError(position, "Unable to identify character group after '['")
if '\\' == c:
if np >= len(text):
raise RegularExpressionError(np, "Missing escape identifier after '\\'")
nc = text[np]
cs = unicode.SingleCharEsc.get(nc)
if cs is None:
cs = unicode.MultiCharEsc.get(nc)
if cs is not None:
return (cs, np+1)
if 'p' == nc:
pass
elif 'P' == nc:
pass
else:
raise RegularExpressionError(np, "Unrecognized escape identifier '\\%s'" % (cs,))
return None
import unittest
class TestXMLRE (unittest.TestCase):
def testRangeErrors (self):
self.assertTrue(MatchCharacterClass('', 1) is None)
def testWildcardEscape (self):
(charset, position) = MatchCharacterClass('.', 0)
self.assertEqual(charset, unicode.WildcardEsc)
self.assertEqual(position, 1)
def testSingleCharEscapes (self):
# 17 chars recognized as escapes
self.assertEqual(len(unicode.SingleCharEsc), 17)
(charset, position) = MatchCharacterClass(r'\t', 0)
self.assertEqual(charset.asTuples(), [ (9, 9) ])
self.assertEqual(2, position)
(charset, position) = MatchCharacterClass(r'\?', 0)
self.assertEqual(charset.asTuples(), [ (ord('?'), ord('?')) ])
self.assertEqual(2, position)
(charset, position) = MatchCharacterClass(r'\\', 0)
self.assertEqual(charset.asTuples(), [ (ord('\\'), ord('\\')) ])
self.assertEqual(2, position)
if __name__ == '__main__':
unittest.main()
| # Copyright 2009, Peter A. Bigot
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain a
# copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# http://www.xmlschemareference.com/examples/Ch14/regexpDemo.xsd
# http://www.xmlschemareference.com/examples/Ch14/regexpDemo.xml
import unicode
class RegularExpressionError (ValueError):
pass
def MatchCharacterClass (text, position):
if position >= len(text):
return None
c = text[position]
if '.' == c:
return (unicode.WildcardEsc, position+1)
if '[' == c:
cg = _MatchCharGroup(text, position+1)
if cg is not None:
(result, new_position) = cg
if (new_position < len(text)) and (']' == text[new_position]):
return (result, new_position+1)
raise RegularExpressionError(new_position, "Character group missing closing ']'")
raise RegularExpressionError(position, "Unable to identify character group after '['")
if '\\' == c:
pass
return None
import unittest
class TestXMLRE (unittest.TestCase):
def testRangeErrors (self):
self.assertTrue(MatchCharacterClass('', 1) is None)
def testWildcardEscape (self):
(charset, position) = MatchCharacterClass('.', 0)
self.assertEqual(charset, unicode.WildcardEsc)
self.assertEqual(position, 1)
if __name__ == '__main__':
unittest.main()
| apache-2.0 | Python |
5b757b29b4ea69dcc4e432e7c54b92b241d8e3a0 | Send proper HTTP headers for files | Surye/relaygram | relaygram/http_server.py | relaygram/http_server.py | import http.server
from threading import Thread
import os.path
import mimetypes
class HTTPHandler:
def __init__(self, config):
self.config = config
handler = HTTPHandler.make_http_handler(self.config['media_dir'])
self.httpd = http.server.HTTPServer(('', self.config['media']['port']), handler)
self.thread = Thread(target=self.main_loop)
def run(self):
self.thread.start()
return self
def main_loop(self):
self.httpd.serve_forever()
@staticmethod
def make_http_handler(root_path):
class RelayGramHTTPHandler(http.server.BaseHTTPRequestHandler):
def __init__(self, *args, **kwargs):
super(RelayGramHTTPHandler, self).__init__(*args, **kwargs)
def do_GET(self):
file_path = os.path.abspath(root_path + self.path)
if os.path.commonprefix([root_path, file_path]) != os.path.abspath(root_path): # Detect path traversal attempt
self.send_error(501, "Nice try")
else:
if not os.path.exists(file_path) or not os.path.isfile(file_path):
self.send_error(404, 'File Not Found')
else:
mimetype = mimetypes.guess_type(file_path)
self.send_response(200)
if mimetype[0]:
self.send_header('Content-Type', mimetype[0])
self.send_header('Content-Length', os.path.getsize(file_path))
self.end_headers()
self.wfile.write(open(file_path, mode='rb').read())
return RelayGramHTTPHandler
| import http.server
from threading import Thread
import os.path
class HTTPHandler:
def __init__(self, config):
self.config = config
handler = HTTPHandler.make_http_handler(self.config['media_dir'])
self.httpd = http.server.HTTPServer(('', self.config['media']['port']), handler)
self.thread = Thread(target=self.main_loop)
def run(self):
self.thread.start()
return self
def main_loop(self):
self.httpd.serve_forever()
@staticmethod
def make_http_handler(root_path):
class RelayGramHTTPHandler(http.server.BaseHTTPRequestHandler):
def __init__(self, *args, **kwargs):
super(RelayGramHTTPHandler, self).__init__(*args, **kwargs)
def do_GET(self):
file_path = os.path.abspath(root_path + self.path)
if os.path.commonprefix([root_path, file_path]) != os.path.abspath(root_path): # Detect path traversal attempt
self.send_error(501, "Nice try")
else:
if not os.path.exists(file_path) or not os.path.isfile(file_path):
self.send_error(404, 'File Not Found')
else:
self.send_response(200)
self.wfile.write(open(file_path, mode='rb').read())
return RelayGramHTTPHandler
| mit | Python |
c4e0c99ab14ba9ad3a5f5084798be0297a28865b | add decision_function to example | adykstra/mne-python,teonlamont/mne-python,teonlamont/mne-python,larsoner/mne-python,rkmaddox/mne-python,Odingod/mne-python,Teekuningas/mne-python,trachelr/mne-python,yousrabk/mne-python,pravsripad/mne-python,mne-tools/mne-python,larsoner/mne-python,dimkal/mne-python,olafhauk/mne-python,jaeilepp/mne-python,Teekuningas/mne-python,cjayb/mne-python,kambysese/mne-python,jmontoyam/mne-python,yousrabk/mne-python,wronk/mne-python,jniediek/mne-python,alexandrebarachant/mne-python,mne-tools/mne-python,Eric89GXL/mne-python,dimkal/mne-python,matthew-tucker/mne-python,wronk/mne-python,lorenzo-desantis/mne-python,leggitta/mne-python,jaeilepp/mne-python,wmvanvliet/mne-python,Odingod/mne-python,nicproulx/mne-python,pravsripad/mne-python,aestrivex/mne-python,pravsripad/mne-python,rkmaddox/mne-python,mne-tools/mne-python,olafhauk/mne-python,bloyl/mne-python,andyh616/mne-python,kingjr/mne-python,ARudiuk/mne-python,adykstra/mne-python,cjayb/mne-python,lorenzo-desantis/mne-python,wmvanvliet/mne-python,kingjr/mne-python,leggitta/mne-python,aestrivex/mne-python,trachelr/mne-python,antiface/mne-python,cmoutard/mne-python,Teekuningas/mne-python,alexandrebarachant/mne-python,bloyl/mne-python,wmvanvliet/mne-python,kambysese/mne-python,olafhauk/mne-python,dgwakeman/mne-python,larsoner/mne-python,jniediek/mne-python,matthew-tucker/mne-python,Eric89GXL/mne-python,dgwakeman/mne-python,nicproulx/mne-python,cmoutard/mne-python,ARudiuk/mne-python,jmontoyam/mne-python,drammock/mne-python,antiface/mne-python,kingjr/mne-python,andyh616/mne-python,drammock/mne-python,drammock/mne-python | examples/decoding/plot_decoding_time_generalization.py | examples/decoding/plot_decoding_time_generalization.py | """
==========================================================
Decoding sensor space data with Generalization Across Time
==========================================================
This example runs the analysis computed in:
Jean-Remi King, Alexandre Gramfort, Aaron Schurger, Lionel Naccache
and Stanislas Dehaene, "Two distinct dynamic modes subtend the detection of
unexpected sounds", PLOS ONE, 2013,
http://www.ncbi.nlm.nih.gov/pubmed/24475052
The idea is to learn at one time instant and assess if the decoder
can predict accurately over time.
"""
# Authors: Jean-Remi King <jeanremi.king@gmail.com>
# Alexandre Gramfort <alexandre.gramfort@telecom-paristech.fr>
# Denis Engemann <denis.engemann@gmail.com>
#
# License: BSD (3-clause)
import mne
from mne.datasets import spm_face
from mne.decoding import GeneralizationAcrossTime
print(__doc__)
# Preprocess data
data_path = spm_face.data_path()
# Load and filter data, set up epochs
raw_fname = data_path + '/MEG/spm/SPM_CTF_MEG_example_faces%d_3D_raw.fif'
events_fname = data_path + '/MEG/sample/sample_audvis_filt-0-40_raw-eve.fif'
raw = mne.io.Raw(raw_fname % 1, preload=True) # Take first run
raw.append(mne.io.Raw(raw_fname % 2, preload=True)) # Take second run too
picks = mne.pick_types(raw.info, meg=True, exclude='bads')
raw.filter(1, 45, method='iir')
events = mne.find_events(raw, stim_channel='UPPT001')
event_id = {"faces": 1, "scrambled": 2}
tmin, tmax = -0.1, 0.5
decim = 4 # decimate to make the example faster to run
epochs = mne.Epochs(raw, events, event_id, tmin, tmax, proj=True,
picks=picks, baseline=None, preload=True,
reject=dict(mag=1.5e-12), decim=decim, verbose=False)
# Define decoder. The decision_function is employed to use AUC for scoring
gat = GeneralizationAcrossTime(predict_mode='cross-validation',
predict_type='decision_function', n_jobs=1)
# fit and score
gat.fit(epochs)
gat.score(epochs)
gat.plot(vmin=0.1, vmax=0.9,
title="Generalization Across Time (faces vs. scrambled)")
gat.plot_diagonal() # plot decoding across time (correspond to GAT diagonal)
| """
==========================================================
Decoding sensor space data with Generalization Across Time
==========================================================
This example runs the analysis computed in:
Jean-Remi King, Alexandre Gramfort, Aaron Schurger, Lionel Naccache
and Stanislas Dehaene, "Two distinct dynamic modes subtend the detection of
unexpected sounds", PLOS ONE, 2013,
http://www.ncbi.nlm.nih.gov/pubmed/24475052
The idea is to learn at one time instant and assess if the decoder
can predict accurately over time.
"""
# Authors: Jean-Remi King <jeanremi.king@gmail.com>
# Alexandre Gramfort <alexandre.gramfort@telecom-paristech.fr>
# Denis Engemann <denis.engemann@gmail.com>
#
# License: BSD (3-clause)
import mne
from mne.datasets import spm_face
from mne.decoding import GeneralizationAcrossTime
print(__doc__)
# Preprocess data
data_path = spm_face.data_path()
# Load and filter data, set up epochs
raw_fname = data_path + '/MEG/spm/SPM_CTF_MEG_example_faces%d_3D_raw.fif'
events_fname = data_path + '/MEG/sample/sample_audvis_filt-0-40_raw-eve.fif'
raw = mne.io.Raw(raw_fname % 1, preload=True) # Take first run
raw.append(mne.io.Raw(raw_fname % 2, preload=True)) # Take second run too
picks = mne.pick_types(raw.info, meg=True, exclude='bads')
raw.filter(1, 45, method='iir')
events = mne.find_events(raw, stim_channel='UPPT001')
event_id = {"faces": 1, "scrambled": 2}
tmin, tmax = -0.1, 0.5
decim = 4 # decimate to make the example faster to run
epochs = mne.Epochs(raw, events, event_id, tmin, tmax, proj=True,
picks=picks, baseline=None, preload=True,
reject=dict(mag=1.5e-12), decim=decim, verbose=False)
# Define decoder. The decision_function is employed to use AUC for scoring
gat = GeneralizationAcrossTime(predict_mode='cross-validation', n_jobs=1)
# fit and score
gat.fit(epochs)
gat.score(epochs)
gat.plot(vmin=0.1, vmax=0.9,
title="Generalization Across Time (faces vs. scrambled)")
gat.plot_diagonal() # plot decoding across time (correspond to GAT diagonal)
| bsd-3-clause | Python |
196cf804fddb1f2943ddee60d0219b6c8dc4e439 | Add email field to User model. We will use this field as the unique user identifier. | thomasbhatia/kigata | app/models/user.py | app/models/user.py | from app.models import Model
from app.factory import mongodb
import datetime
from marshmallow import Schema, fields, ValidationError
from flask import current_app, json
from bson import ObjectId
from app.helpers import generate_sid
now = datetime.datetime.now()
class User(Model):
collection = mongodb.db.users
def __unicode__(self):
return self.person_id
@classmethod
def set_active(cls, *args):
if args:
data = dict(is_actice=args[0])
cls.set(data)
@classmethod
def is_admin(cls):
if hasattr(cls, 'is_admin'):
if cls.is_admin is True:
return True
@classmethod
def create_user(cls, *args):
"""Creates and saves a User with the given username, e-mail and password."""
current_app.logger.warn(args)
data = {}
if args:
data = args[0]
constants = dict(is_active=False,
last_accessed=now,
date_joined=now,
shared_secret_renewal_interval=21600, # 6 hours
time_to_renew_shared_secret=0,
get_settings=False,
secret=generate_sid()
)
data.update(constants)
current_app.logger.warn(data)
user = cls(data)
user.save()
current_app.logger.info(user.id)
new_user = cls.get({'_id': ObjectId(user.id)})
return new_user
# User Schema
class UserSchema(Schema):
_id = fields.Str(dump_only=True, required=True)
first_name = fields.Str(required=True)
last_name = fields.Str(required=True)
is_active = fields.Boolean()
last_accessed = fields.DateTime()
date_joined = fields.DateTime(dump_only=True)
secret = fields.Str()
email = fields.Email()
| from app.models import Model
from app.factory import mongodb
import datetime
from marshmallow import Schema, fields, ValidationError
from flask import current_app, json
from bson import ObjectId
from app.helpers import generate_sid
now = datetime.datetime.now()
class User(Model):
collection = mongodb.db.users
def __unicode__(self):
return self.person_id
@classmethod
def set_active(cls, *args):
if args:
data = dict(is_actice=args[0])
cls.set(data)
@classmethod
def is_admin(cls):
if hasattr(cls, 'is_admin'):
if cls.is_admin is True:
return True
@classmethod
def create_user(cls, *args):
"""Creates and saves a User with the given username, e-mail and password."""
current_app.logger.warn(args)
data = {}
if args:
data = args[0]
constants = dict(is_active=False,
last_accessed=now,
date_joined=now,
shared_secret_renewal_interval=21600, # 6 hours
time_to_renew_shared_secret=0,
get_settings=False,
secret=generate_sid()
)
data.update(constants)
current_app.logger.warn(data)
user = cls(data)
user.save()
current_app.logger.info(user.id)
new_user = cls.get({'_id': ObjectId(user.id)})
return new_user
# User Schema
class UserSchema(Schema):
_id = fields.Str(dump_only=True, required=True)
first_name = fields.Str(required=True)
last_name = fields.Str(required=True)
is_active = fields.Boolean()
last_accessed = fields.DateTime()
date_joined = fields.DateTime(dump_only=True)
secret = fields.Str()
| bsd-3-clause | Python |
bdc800f7dc00933cd8fec0fa6ea13bd0dfc1050d | fix wsgi | tarvitz/djtp,tarvitz/djtp,tarvitz/djtp,tarvitz/djtp | wsgi.py | wsgi.py | """
WSGI config for mong project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
| """
WSGI config for mong project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "mong.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
| bsd-3-clause | Python |
74fd7a24bdde74222dac0b4725947e27abdfb469 | remove video before retrying | benjaoming/ka-lite-zim,benjaoming/ka-lite-zim,benjaoming/ka-lite-zim | kalite_zim/utils.py | kalite_zim/utils.py | from __future__ import unicode_literals
from __future__ import print_function
from __future__ import absolute_import
import logging
import os
import urllib
from colorlog import ColoredFormatter
from django.conf import settings
from fle_utils.videos import get_outside_video_urls
from . import __name__ as base_path
base_path = os.path.abspath(base_path)
LOG_LEVEL = logging.DEBUG
LOGFORMAT = " %(log_color)s%(levelname)-8s%(reset)s | %(log_color)s%(message)s%(reset)s"
logging.root.setLevel(LOG_LEVEL)
formatter = ColoredFormatter(LOGFORMAT)
stream = logging.StreamHandler()
stream.setLevel(LOG_LEVEL)
stream.setFormatter(formatter)
logger = logging.getLogger(__name__)
logger.setLevel(LOG_LEVEL)
logger.addHandler(stream)
logger.propagate = False
def download_video(youtube_id, video_format, dest_dir):
"""
Fetch a video from the default
"""
download_url = ("http://%s/download/videos/" % (settings.CENTRAL_SERVER_HOST)) + "%s/%s"
url, thumb_url = get_outside_video_urls(youtube_id, download_url=download_url, format=video_format)
video_filename = os.path.join(dest_dir, "{}.{}".format(youtube_id, video_format))
thumbnail_filename = os.path.join(dest_dir, "{}.png".format(youtube_id))
def delete_download_garbage():
if os.path.isfile(video_filename):
os.unlink(video_filename)
if os.path.isfile(thumbnail_filename):
os.unlink(thumbnail_filename)
try:
retries = 0
while retries < 5:
try:
__, response = urllib.urlretrieve(url, video_filename)
except:
delete_download_garbage()
retries += 1
logger.warning("Retrying {}".format(retries))
pass
if not response.type.startswith("video"):
raise Exception("Video download failed: {}".format(url))
__, response = urllib.urlretrieve(thumb_url, thumbnail_filename)
if not response.type.startswith("image"):
logger.warning("Thumbnail missing, tried: {}".format(thumb_url))
except (Exception, KeyboardInterrupt):
delete_download_garbage()
raise
| from __future__ import unicode_literals
from __future__ import print_function
from __future__ import absolute_import
import logging
import os
import urllib
from colorlog import ColoredFormatter
from django.conf import settings
from fle_utils.videos import get_outside_video_urls
from . import __name__ as base_path
base_path = os.path.abspath(base_path)
LOG_LEVEL = logging.DEBUG
LOGFORMAT = " %(log_color)s%(levelname)-8s%(reset)s | %(log_color)s%(message)s%(reset)s"
logging.root.setLevel(LOG_LEVEL)
formatter = ColoredFormatter(LOGFORMAT)
stream = logging.StreamHandler()
stream.setLevel(LOG_LEVEL)
stream.setFormatter(formatter)
logger = logging.getLogger(__name__)
logger.setLevel(LOG_LEVEL)
logger.addHandler(stream)
logger.propagate = False
def download_video(youtube_id, video_format, dest_dir):
"""
Fetch a video from the default
"""
download_url = ("http://%s/download/videos/" % (settings.CENTRAL_SERVER_HOST)) + "%s/%s"
url, thumb_url = get_outside_video_urls(youtube_id, download_url=download_url, format=video_format)
video_filename = os.path.join(dest_dir, "{}.{}".format(youtube_id, video_format))
thumbnail_filename = os.path.join(dest_dir, "{}.png".format(youtube_id))
def delete_download_garbage():
if os.path.isfile(video_filename):
os.unlink(video_filename)
if os.path.isfile(thumbnail_filename):
os.unlink(thumbnail_filename)
try:
retries = 0
while retries < 5:
try:
__, response = urllib.urlretrieve(url, video_filename)
except:
retries += 1
logger.warning("Retrying {}".format(retries))
pass
if not response.type.startswith("video"):
raise Exception("Video download failed: {}".format(url))
__, response = urllib.urlretrieve(thumb_url, thumbnail_filename)
if not response.type.startswith("image"):
logger.warning("Thumbnail missing, tried: {}".format(thumb_url))
except (Exception, KeyboardInterrupt):
delete_download_garbage()
raise
| mit | Python |
fa57bd304f5924586b6c46076c83f1c0f1dc11ee | remove py3k imcompatible string raise from data example (this deprecated syntax is not need for the tests) | PyCQA/astroid | test/data/module2.py | test/data/module2.py | # copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
# copyright 2003-2010 Sylvain Thenault, all rights reserved.
# contact mailto:thenault@gmail.com
#
# This file is part of logilab-astng.
#
# logilab-astng is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 2.1 of the License, or (at your
# option) any later version.
#
# logilab-astng is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
# for more details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with logilab-astng. If not, see <http://www.gnu.org/licenses/>.
from __future__ import generators
from data.module import YO, YOUPI
import data
class Specialization(YOUPI, YO): pass
class Metaclass(type): pass
class Interface: pass
class MyIFace(Interface): pass
class AnotherIFace(Interface): pass
class MyException(Exception): pass
class MyError(MyException): pass
class AbstractClass(object):
def to_override(self, whatever):
raise NotImplementedError()
def return_something(self, param):
if param:
return 'toto'
return
class Concrete0:
__implements__ = MyIFace
class Concrete1:
__implements__ = MyIFace, AnotherIFace
class Concrete2:
__implements__ = (MyIFace,
AnotherIFace)
class Concrete23(Concrete1): pass
del YO.member
del YO
[SYN1, SYN2] = Concrete0, Concrete1
assert `1`
b = 1 | 2 & 3 ^ 8
bb = 1 | two | 6
ccc = one & two & three
dddd = x ^ o ^ r
exec 'c = 3'
exec 'c = 3' in {}, {}
def raise_string(a=2, *args, **kwargs):
raise Exception, 'yo'
yield 'coucou'
a = b + 2
c = b * 2
c = b / 2
c = b // 2
c = b - 2
c = b % 2
c = b ** 2
c = b << 2
c = b >> 2
c = ~b
c = not b
d = [c]
e = d[:]
e = d[a:b:c]
raise_string(*args, **kwargs)
print >> stream, 'bonjour'
print >> stream, 'salut',
def make_class(any, base=data.module.YO, *args, **kwargs):
"""check base is correctly resolved to Concrete0"""
class Aaaa(base):
"""dynamic class"""
return Aaaa
from os.path import abspath
import os as myos
class A:
pass
class A(A):
pass
| # copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
# copyright 2003-2010 Sylvain Thenault, all rights reserved.
# contact mailto:thenault@gmail.com
#
# This file is part of logilab-astng.
#
# logilab-astng is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 2.1 of the License, or (at your
# option) any later version.
#
# logilab-astng is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
# for more details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with logilab-astng. If not, see <http://www.gnu.org/licenses/>.
from __future__ import generators
from data.module import YO, YOUPI
import data
class Specialization(YOUPI, YO): pass
class Metaclass(type): pass
class Interface: pass
class MyIFace(Interface): pass
class AnotherIFace(Interface): pass
class MyException(Exception): pass
class MyError(MyException): pass
class AbstractClass(object):
def to_override(self, whatever):
raise NotImplementedError()
def return_something(self, param):
if param:
return 'toto'
return
class Concrete0:
__implements__ = MyIFace
class Concrete1:
__implements__ = MyIFace, AnotherIFace
class Concrete2:
__implements__ = (MyIFace,
AnotherIFace)
class Concrete23(Concrete1): pass
del YO.member
del YO
[SYN1, SYN2] = Concrete0, Concrete1
assert `1`
b = 1 | 2 & 3 ^ 8
bb = 1 | two | 6
ccc = one & two & three
dddd = x ^ o ^ r
exec 'c = 3'
exec 'c = 3' in {}, {}
def raise_string(a=2, *args, **kwargs):
raise 'pas glop'
raise Exception, 'yo'
yield 'coucou'
a = b + 2
c = b * 2
c = b / 2
c = b // 2
c = b - 2
c = b % 2
c = b ** 2
c = b << 2
c = b >> 2
c = ~b
c = not b
d = [c]
e = d[:]
e = d[a:b:c]
raise_string(*args, **kwargs)
print >> stream, 'bonjour'
print >> stream, 'salut',
def make_class(any, base=data.module.YO, *args, **kwargs):
"""check base is correctly resolved to Concrete0"""
class Aaaa(base):
"""dynamic class"""
return Aaaa
from os.path import abspath
import os as myos
class A:
pass
class A(A):
pass
| lgpl-2.1 | Python |
9759a806407cd9259e7ef4eb0303aa9f3b6ae336 | remove left-in print statements. | kuansim/timemap,ukris/timemapper,ukris/timemapper,ukris/timemapper,kuansim/timemap,kuansim/timemap,okfn/timemapper,okfn/timemapper,okfn/timemapper | hypernotes/web.py | hypernotes/web.py | import os
from flask import Flask, jsonify, render_template, json, request, redirect
from core import app
import logic
@app.route("/")
def home():
return 'Nothing to see here - go to api'
@app.route('/api/v1/note/<id>', methods=['GET', 'POST'])
def api_note(id):
if request.method == 'GET':
out = logic.note_get(id)
return jsonify(out)
else:
pass
@app.route('/api/v1/note', methods=['GET', 'POST', 'PUT'])
def api_note_index():
if request.method == 'GET':
# TODO: query
pass
else:
data = json.loads(request.data)
logic.note_upsert(data['id'], data)
out = {
'status': 'ok'
}
return jsonify(out)
if __name__ == "__main__":
app.run(host='0.0.0.0', debug=True)
| import os
from flask import Flask, jsonify, render_template, json, request, redirect
from core import app
import logic
@app.route("/")
def home():
return 'Nothing to see here - go to api'
@app.route('/api/v1/note/<id>', methods=['GET', 'POST'])
def api_note(id):
if request.method == 'GET':
out = logic.note_get(id)
return jsonify(out)
else:
pass
@app.route('/api/v1/note', methods=['GET', 'POST', 'PUT'])
def api_note_index():
if request.method == 'GET':
# TODO: query
pass
else:
print 'XXXX', request.data
data = json.loads(request.data)
print 'XXXXX', data
logic.note_upsert(data['id'], data)
out = {
'status': 'ok'
}
return jsonify(out)
if __name__ == "__main__":
app.run(host='0.0.0.0', debug=True)
| mit | Python |
6ce14f21cec2c37939f68aaf40d5227c80636e53 | Add docstring to explain the code | laboiteproject/laboite-backend,laboiteproject/laboite-backend,bgaultier/laboitepro,bgaultier/laboitepro,bgaultier/laboitepro,laboiteproject/laboite-backend | app_bikes/forms.py | app_bikes/forms.py | from dal import autocomplete
from django import forms
class BikeModelForm(forms.ModelForm):
def __init__(self, *args, **kw):
super(BikeModelForm, self).__init__(*args, **kw)
if self.instance is not None:
# Saved instance is loaded, setup choices to display the selected value
self.fields['id_station'].widget.choices = ((self.instance.id_station, self.instance.station),)
def validate_unique(self):
"""If the form had an error and a station was chosen, we need to setup the widget choices to the previously selected value for the autocomplete to display it properly"""
super(BikeModelForm, self).validate_unique()
if self.errors and 'id_station' in self.data:
self.fields['id_station'].widget.choices = ((self.cleaned_data['id_station'], self.data['station']),)
class Meta:
widgets = {
'id_station': autocomplete.ListSelect2(url='station-autocomplete',
forward=('provider',),
attrs={'data-allow-clear': 'false'})
}
class Media:
js = ('js/admin_form.js',)
| from dal import autocomplete
from django import forms
class BikeModelForm(forms.ModelForm):
def __init__(self, *args, **kw):
super(BikeModelForm, self).__init__(*args, **kw)
if self.instance is not None:
# Saved instance is loaded, setup choices to display the selected value
self.fields['id_station'].widget.choices = ((self.instance.id_station, self.instance.station),)
def validate_unique(self):
super(BikeModelForm, self).validate_unique()
if self.errors and 'id_station' in self.data:
# A station was chosen, reinit choices with it
self.fields['id_station'].widget.choices = ((self.cleaned_data['id_station'], self.data['station']),)
class Meta:
widgets = {
'id_station': autocomplete.ListSelect2(url='station-autocomplete',
forward=('provider',),
attrs={'data-allow-clear': 'false'})
}
class Media:
js = ('js/admin_form.js',)
| agpl-3.0 | Python |
c18be2bbf78bc84b4642a749d536ba1ff3a50a0e | Set version number to 0.9. | live-clones/pybtex | pybtex/__version__.py | pybtex/__version__.py | version = '0.9'
| version = '20090402'
| mit | Python |
b22c5cdeb75f77c15eba5ef9ad2f8fca24d1a9db | add json mixin for core views | tarvitz/djtp,tarvitz/djtp,tarvitz/djtp,tarvitz/djtp | apps/core/views.py | apps/core/views.py | # Create your views here.
# coding: utf-8
from apps.core.helpers import render_to
from django.http import HttpResponse
try:
import simplejson as json
except ImportError:
import json
from django.views.generic.edit import (
FormMixin, TemplateResponseMixin, DeletionMixin
)
@render_to('index.html')
def index(request):
return {}
def write_redirect(request, pk):
response = HttpResponse()
response.write('redirected with: %s' % pk)
return response
@render_to('index.html')
def test_redirect(request, pk):
return {
'redirect': 'core:write-redirect',
'redirect-args': (pk, )
}
class JSONViewMixin(TemplateResponseMixin):
#helpers
def convert_context_to_json(self, context):
return json.dumps(context, default=model_json_encoder)
def get_context_data(self, **kwargs):
context = super(TemplateResponseMixin, self).get_context_data(**kwargs)
context.update(self.kwargs)
return context
def delete(self, request, *args, **kwargs):
"""
Calls the delete() method on the fetched object and then
redirects to the success URL.
"""
self.object = self.get_object()
self.object.delete()
if self.request.is_ajax():
return self.render_to_response({'success': True})
return HttpResponseRedirect(self.get_success_url())
def form_valid(self, form):
if self.request.is_ajax():
return self.render_to_response(form)
return super(JSONViewMixin, self).form_valid(form)
def render_to_response(self, context, **response_kwargs):
if self.request.is_ajax():
context = self.convert_context_to_json(context)
response_kwargs.update({"content_type": 'application/json'})
response = HttpResponse(context, **response_kwargs)
return response
return super(JSONViewMixin, self).render_to_response(
context, **response_kwargs
)
| # Create your views here.
# coding: utf-8
from apps.core.helpers import render_to
from django.http import HttpResponse
@render_to('index.html')
def index(request):
return {}
def write_redirect(request, pk):
response = HttpResponse()
response.write('redirected with: %s' % pk)
return response
@render_to('index.html')
def test_redirect(request, pk):
return {
'redirect': 'core:write-redirect',
'redirect-args': (pk, )
}
| bsd-3-clause | Python |
8b81699181f6b01784427d77e627bf722f78a68a | remove print() , use logger instead. | haandol/hongmoa,haandol/honey | apps/decorators.py | apps/decorators.py | import re
import traceback
from functools import wraps
from loggers import logger
TOKENIZE_PATTERN = re.compile(r'["“](.+?)["”]|(\S+)', re.U | re.S)
def _extract_tokens(message):
'''Parse the given message, extract command and split'em into tokens
Args:
message (str): user gave message
Returns:
(list): tokens
'''
return list(filter(lambda x: x and x.strip(), TOKENIZE_PATTERN.split(message)))
def on_command(commands):
def decorator(func):
func.commands = commands
@wraps(func)
async def _decorator(robot, channel, user, message):
if commands:
tokens = _extract_tokens(message)
try:
channel, message = await func(robot, channel, user, tokens)
logger.debug('[Debug] message: {}'.format(message))
if channel:
if dict == type(message) and 'text' in message:
robot.client.api_call(
'chat.postMessage', channel=channel, **message
)
else:
robot.client.rtm_send_message(channel, str(message))
return message
else:
logger.warning('Can not send to empty channel')
except:
logger.error('Can not deliver the message because...')
traceback.print_exc()
return None
return _decorator
return decorator
| import re
import traceback
from functools import wraps
TOKENIZE_PATTERN = re.compile(r'["“](.+?)["”]|(\S+)', re.U | re.S)
def _extract_tokens(message):
'''Parse the given message, extract command and split'em into tokens
Args:
message (str): user gave message
Returns:
(list): tokens
'''
return list(filter(lambda x: x and x.strip(), TOKENIZE_PATTERN.split(message)))
def on_command(commands):
def decorator(func):
func.commands = commands
@wraps(func)
async def _decorator(robot, channel, user, message):
if commands:
tokens = _extract_tokens(message)
try:
channel, message = await func(robot, channel, user, tokens)
if channel:
if dict == type(message) and 'text' in message:
robot.client.api_call(
'chat.postMessage', channel=channel, **message
)
else:
robot.client.rtm_send_message(channel, str(message))
return message
else:
print("[Warn] Can not send to empty channel")
except:
print("[Error] Can not deliver the message because...")
traceback.print_exc()
print()
return None
return _decorator
return decorator
| mit | Python |
8b56d561bb120f1f3dba8962721c2156967e0d83 | Make rotation matrices | adrianliaw/PyCuber | pycuber/cube/cubie.py | pycuber/cube/cubie.py | import numpy as np
class Cubie(np.ndarray):
def __new__(subtype, side_colour_map, **kwargs):
if isinstance(side_colour_map, Cubie):
return side_colour_map
side_colour_map = np.array(side_colour_map)
if side_colour_map.shape == (6,):
return side_colour_map
ret = np.ndarray.__new__(subtype, (6, ), "int8", **kwargs)
ret.fill(-1)
ret[side_colour_map[:, 0]] = side_colour_map[:, 1]
return ret
U, L, F, R, B, D = range(6)
ROT = np.zeros((3, 6, 6), int)
X, Y, Z = range(3)
rotation_patterns = np.array([
[[F, U], [U, B], [B, D], [D, F], [L, L], [R, R]],
[[L, B], [B, R], [R, F], [F, L], [U, U], [D, D]],
[[L, U], [U, R], [R, D], [D, L], [F, F], [B, B]],
])
for i, pattern in enumerate(rotation_patterns):
ROT[i][pattern[:, 0], pattern[:, 1]] = 1
print(ROT)
if __name__ == "__main__":
mapping = np.array([[0, 0], [1, 1], [2, 2]])
cubie = Cubie(mapping)
print(cubie)
print(Cubie(cubie))
print(Cubie(np.array([0, 1, 2, -1, -1, -1])))
print(Cubie([0, 1, 2, -1, -1, -1]))
| import numpy as np
class Cubie(np.ndarray):
def __new__(subtype, side_colour_map, **kwargs):
if isinstance(side_colour_map, Cubie):
return side_colour_map
if not isinstance(side_colour_map, np.ndarray):
side_colour_map = np.array(side_colour_map)
if side_colour_map.shape == (6,):
return side_colour_map
ret = np.ndarray.__new__(subtype, (6, ), "int8", **kwargs)
ret.fill(-1)
ret[side_colour_map[:, 0]] = side_colour_map[:, 1]
return ret
if __name__ == "__main__":
mapping = np.array([[0, 0], [1, 1], [2, 2]])
cubie = Cubie(mapping)
print(cubie)
print(Cubie(cubie))
print(Cubie(np.array([0, 1, 2, -1, -1, -1])))
print(Cubie([0, 1, 2, -1, -1, -1]))
| mit | Python |
e7b105105ae1dd826f195eb5d6c6716a9940b9c3 | Document get_exe_path() | innogames/igcommit | igcommit/utils.py | igcommit/utils.py | """igcommit - Utility functions
Copyright (c) 2021 InnoGames GmbH
Portions Copyright (c) 2021 Emre Hasegeli
"""
from os import X_OK, access, environ
def get_exe_path(exe):
"""Traverse the PATH to find where the executable is
This should behave similar to the shell built-in "which".
"""
for dir_path in environ['PATH'].split(':'):
path = dir_path.strip('"') + '/' + exe
if access(path, X_OK):
return path
def iter_buffer(iterable, amount):
assert amount > 1
memo = []
for elem in iterable:
if elem is not None:
memo.append(elem)
if len(memo) < amount:
continue
yield memo.pop(0)
for elem in memo:
yield elem
| """igcommit - Utility functions
Copyright (c) 2021 InnoGames GmbH
Portions Copyright (c) 2021 Emre Hasegeli
"""
from os import X_OK, access, environ
def get_exe_path(exe):
for dir_path in environ['PATH'].split(':'):
path = dir_path.strip('"') + '/' + exe
if access(path, X_OK):
return path
def iter_buffer(iterable, amount):
assert amount > 1
memo = []
for elem in iterable:
if elem is not None:
memo.append(elem)
if len(memo) < amount:
continue
yield memo.pop(0)
for elem in memo:
yield elem
| mit | Python |
0b1f38b8354a0ad6a021f247a7bc1336ae5d50fb | Change some of the relative imports, which fail in doctests, to absolute imports. | mikemhenry/arcade,mikemhenry/arcade | arcade/__init__.py | arcade/__init__.py | """
The Arcade Library
A Python simple, easy to use module for creating 2D games.
"""
import arcade.key
import arcade.color
from arcade.version import *
from arcade.window_commands import *
from arcade.draw_commands import *
from arcade.sprite import *
from arcade.physics_engines import *
from arcade.physics_engine_2d import *
from arcade.application import *
from arcade.sound import *
from arcade.shape_objects import *
| """
The Arcade Library
A Python simple, easy to use module for creating 2D games.
"""
import arcade.key
import arcade.color
from .version import *
from .window_commands import *
from .draw_commands import *
from .sprite import *
from .physics_engines import *
from .physics_engine_2d import *
from .application import *
from .sound import *
from .shape_objects import *
| mit | Python |
7150674eec92120c6107fe728798bdcefba1efd7 | Use os.path.sep in util.py for regex DEFAULT_SKIP_FILES | certik/pyjamas,andreyvit/pyjamas,andreyvit/pyjamas,andreyvit/pyjamas,certik/pyjamas,certik/pyjamas,certik/pyjamas,andreyvit/pyjamas | pyjs/src/pyjs/util.py | pyjs/src/pyjs/util.py | import os
import shutil
import re
import logging
DEFAULT_SKIP_FILES=re.compile(
r"^(.*\%(sep)s)?("
r"(\..*)|"
r"(#.*#)|"
r"(.*~)|"
r"(.*\.py[co])|"
r"(.*\/RCS\/?.*)|"
r"(.*\/CVS\/?.*)|"
r"(.*\.egg-info.*)|"
r")$" % {'sep': os.path.sep})
def copytree_exists(src, dst, symlinks=False,
skip_files=DEFAULT_SKIP_FILES):
if not os.path.exists(src):
return
names = os.listdir(src)
if not os.path.exists(dst):
os.mkdir(dst)
errors = []
for name in names:
srcname = os.path.join(src, name)
if skip_files.match(srcname):
logging.debug('Ignoring file \'%s\': File matches ignore regex.',
srcname)
continue
dstname = os.path.join(dst, name)
try:
if symlinks and os.path.islink(srcname):
linkto = os.readlink(srcname)
os.symlink(linkto, dstname)
elif os.path.isdir(srcname):
copytree_exists(srcname, dstname, symlinks, skip_files=skip_files)
else:
shutil.copy2(srcname, dstname)
except (IOError, os.error), why:
errors.append((srcname, dstname, why))
if errors:
print errors
def copy_exists(srcname, dstname, symlinks=False):
if not os.path.exists(srcname):
return
errors = []
try:
if symlinks and os.path.islink(srcname):
linkto = os.readlink(srcname)
os.symlink(linkto, dstname)
else:
shutil.copyfile(srcname, dstname)
shutil.copystat(srcname, dstname)
except (IOError, os.error), why:
errors.append((srcname, dstname, why))
if errors:
print errors
| import os
import shutil
import re
import logging
DEFAULT_SKIP_FILES=re.compile(
r"^(.*%(sep)s)?("
r"(\..*)|"
r"(#.*#)|"
r"(.*~)|"
r"(.*\.py[co])|"
r"(.*\/RCS\/?.*)|"
r"(.*\/CVS\/?.*)|"
r"(.*\.egg-info.*)|"
r")$" % {'sep': os.path.sep})
def copytree_exists(src, dst, symlinks=False,
skip_files=DEFAULT_SKIP_FILES):
if not os.path.exists(src):
return
names = os.listdir(src)
if not os.path.exists(dst):
os.mkdir(dst)
errors = []
for name in names:
srcname = os.path.join(src, name)
if skip_files.match(srcname):
logging.debug('Ignoring file \'%s\': File matches ignore regex.',
srcname)
continue
dstname = os.path.join(dst, name)
try:
if symlinks and os.path.islink(srcname):
linkto = os.readlink(srcname)
os.symlink(linkto, dstname)
elif os.path.isdir(srcname):
copytree_exists(srcname, dstname, symlinks, skip_files=skip_files)
else:
shutil.copy2(srcname, dstname)
except (IOError, os.error), why:
errors.append((srcname, dstname, why))
if errors:
print errors
def copy_exists(srcname, dstname, symlinks=False):
if not os.path.exists(srcname):
return
errors = []
try:
if symlinks and os.path.islink(srcname):
linkto = os.readlink(srcname)
os.symlink(linkto, dstname)
else:
shutil.copyfile(srcname, dstname)
shutil.copystat(srcname, dstname)
except (IOError, os.error), why:
errors.append((srcname, dstname, why))
if errors:
print errors
| apache-2.0 | Python |
6d55e296e99bfff28f6bdf694206871b8d04de62 | Fix client metadata extractor test | msmolens/girder,data-exp-lab/girder,jcfr/girder,Xarthisius/girder,sutartmelson/girder,Kitware/girder,opadron/girder,kotfic/girder,adsorensen/girder,msmolens/girder,kotfic/girder,jcfr/girder,sutartmelson/girder,Kitware/girder,salamb/girder,salamb/girder,chrismattmann/girder,data-exp-lab/girder,girder/girder,jcfr/girder,RafaelPalomar/girder,msmolens/girder,RafaelPalomar/girder,data-exp-lab/girder,salamb/girder,RafaelPalomar/girder,kotfic/girder,sutartmelson/girder,chrismattmann/girder,adsorensen/girder,msmolens/girder,jbeezley/girder,essamjoubori/girder,Kitware/girder,Xarthisius/girder,jbeezley/girder,sutartmelson/girder,manthey/girder,data-exp-lab/girder,Xarthisius/girder,salamb/girder,kotfic/girder,chrismattmann/girder,chrismattmann/girder,essamjoubori/girder,girder/girder,opadron/girder,adsorensen/girder,opadron/girder,sutartmelson/girder,jcfr/girder,Xarthisius/girder,salamb/girder,kotfic/girder,opadron/girder,essamjoubori/girder,Kitware/girder,chrismattmann/girder,adsorensen/girder,girder/girder,manthey/girder,opadron/girder,Xarthisius/girder,girder/girder,manthey/girder,essamjoubori/girder,data-exp-lab/girder,manthey/girder,adsorensen/girder,jbeezley/girder,jbeezley/girder,jcfr/girder,RafaelPalomar/girder,msmolens/girder,RafaelPalomar/girder,essamjoubori/girder | plugins/metadata_extractor/plugin_tests/client_metadata_extractor_test.py | plugins/metadata_extractor/plugin_tests/client_metadata_extractor_test.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import os.path
import sys
import time
from girder.constants import ROOT_DIR
from server.metadata_extractor import ClientMetadataExtractor
from tests import base
from . metadata_extractor_test import MetadataExtractorTestCase
def setUpModule():
os.environ['PORT'] = '50001'
base.startServer(False)
def tearDownModule():
base.stopServer()
class ClientMetadataExtractorTestCase(MetadataExtractorTestCase):
def testClientMetadataExtractor(self):
item = self.model('item').load(self.item['_id'], user=self.user)
self.assertEqual(item['name'], self.name)
self.assertNotHasKeys(item, ['meta'])
clientPath = os.path.join(ROOT_DIR, 'clients', 'python')
sys.path.insert(0, clientPath)
from GirderClient import GirderClient
client = GirderClient('localhost', 50001)
client.authenticate(self.user['login'], self.password)
extractor = ClientMetadataExtractor(client, self.path, self.item['_id'])
extractor.extractMetadata()
sys.path.remove(clientPath)
item = self.model('item').load(self.item['_id'], user=self.user)
self.assertEqual(item['name'], self.name)
self.assertHasKeys(item, ['meta'])
self.assertEqual(item['meta']['MIME type'], self.mimeType)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import os.path
import sys
import time
from girder.constants import ROOT_DIR
from server.metadata_extractor import ClientMetadataExtractor
from tests import base
from . metadata_extractor_test import MetadataExtractorTestCase
def setUpModule():
os.environ['PORT'] = '50001'
base.startServer(False)
def tearDownModule():
base.stopServer()
class ClientMetadataExtractorTestCase(MetadataExtractorTestCase):
def testClientMetadataExtractor(self):
time.sleep(0.2)
item = self.model('item').load(self.item['_id'], user=self.user)
self.assertEqual(item['name'], self.name)
self.assertNotHasKeys(item, ['meta'])
clientPath = os.path.join(ROOT_DIR, 'clients', 'python')
sys.path.insert(0, clientPath)
from GirderClient import GirderClient
client = GirderClient('localhost', 50001)
client.authenticate(self.user['login'], self.password)
extractor = ClientMetadataExtractor(client, self.path, self.item['_id'])
extractor.extractMetadata()
sys.path.remove(clientPath)
start = time.time()
while True:
if time.time() - start > 15:
break
item = self.model('item').load(self.item['_id'], user=self.user)
if 'meta' in item and item['meta']['MIME type'] == self.mimeType:
break
time.sleep(0.2)
self.assertEqual(item['name'], self.name)
self.assertHasKeys(item, ['meta'])
self.assertEqual(item['meta']['MIME type'], self.mimeType)
| apache-2.0 | Python |
6627f00d592597b41bd3b288cd4ad3dccbecdecc | add prefix config | google-code-export/beets,google-code-export/beets,google-code-export/beets | beetsplug/fuzzy.py | beetsplug/fuzzy.py | # This file is part of beets.
# Copyright 2013, Philippe Mongeau.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
"""Like beet list, but with fuzzy matching
"""
from beets.plugins import BeetsPlugin
from beets.library import PluginQuery
from beets.ui import Subcommand, decargs, print_obj
from beets import util
import beets
import difflib
class FuzzyQuery(PluginQuery):
def __init__(self, field, pattern):
super(FuzzyQuery, self).__init__(field, pattern)
# self.field = field
self.name = 'PLUGIN'
self.prefix = beets.config['fuzzy']['prefix'].get() or '~'
self.threshold = beets.config['fuzzy']['threshold'].as_number() or 0.7
def match(self, pattern, val):
if pattern is None:
return False
val = util.as_string(val)
queryMatcher = difflib.SequenceMatcher(None, pattern, val)
return queryMatcher.quick_ratio() > self.threshold
class FuzzyPlugin(BeetsPlugin):
def __init__(self):
super(FuzzyPlugin, self).__init__(self)
self.config.add({
'threshold': 0.7,
'prefix': '~',
})
def queries(self):
return [FuzzyQuery]
| # This file is part of beets.
# Copyright 2013, Philippe Mongeau.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
"""Like beet list, but with fuzzy matching
"""
from beets.plugins import BeetsPlugin
from beets.library import PluginQuery
from beets.ui import Subcommand, decargs, print_obj
from beets import config
from beets import util
import difflib
class FuzzyQuery(PluginQuery):
def __init__(self, field, pattern):
super(FuzzyQuery, self).__init__(field, pattern)
# self.field = field
self.name = 'PLUGIN'
self.prefix = "~"
def match(self, pattern, val):
if pattern is None:
return False
val = util.as_string(val)
queryMatcher = difflib.SequenceMatcher(None, pattern, val)
return queryMatcher.quick_ratio() > config['fuzzy']['threshold'].as_number()
class FuzzyPlugin(BeetsPlugin):
def __init__(self):
super(FuzzyPlugin, self).__init__(self)
self.config.add({
'threshold': 0.7,
})
def queries(self):
return [FuzzyQuery]
| mit | Python |
44574107a22a2cb20304151ee1b3907df4d1fcd4 | print warning instead of crash for overlapping input intervals | glennhickey/teHmm,glennhickey/teHmm | bin/fillTermini.py | bin/fillTermini.py | #!/usr/bin/env python
#Copyright (C) 2014 by Glenn Hickey
#
#Released under the MIT license, see LICENSE.txt
import sys
import os
import argparse
import copy
from pybedtools import BedTool, Interval
"""
Stick a bed interval between pairs of lastz termini. Script written to be used
in conjunction with tsdFinder.py:
lastz termini -> fill termini -> bed input (which gets merged up automatically)
for tsdFinder.py. Example:
scaffold_1 141 225 1+ 43 +
scaffold_1 4479 4563 1+ 43 +
becomes
scaffold_1 141 225 1+ 43 +
scaffold_1 225 4479 1+ 43 +
scaffold_1 4479 4563 1+ 43 +
Note: also works on output of cleanTermini.
"""
def main(argv=None):
if argv is None:
argv = sys.argv
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
description="Add interval between pairs of candidate termini. Input "
"bed must have pairs of termini (left first) in contiguous rows (or be "
"like output of cleanTermini.py")
parser.add_argument("inBed", help="bed with ltr results to process")
parser.add_argument("outBed", help="bed to write output to.")
args = parser.parse_args()
assert os.path.exists(args.inBed)
outFile = open(args.outBed, "w")
prevInterval = None
for interval in BedTool(args.inBed):
# Right termini
if prevInterval is not None:
if interval.name != prevInterval.name and (
interval.name != "R_Term" or prevInterval.name != "L_Term"):
raise RuntimeError("Consecutive intervals dont have same id"
"\n%s%s" % (prevInterval, interval))
# make the new interval, dont bother giving a new name for now
fillInterval = copy.deepcopy(prevInterval)
fillInterval.start = min(prevInterval.end, interval.end)
fillInterval.end = max(prevInterval.start, interval.start)
outFile.write(str(prevInterval))
if fillInterval.start >= fillInterval.end:
print "No fill written for overlapping intervals\n%s%s" % (
prevInterval, interval)
else:
outFile.write(str(fillInterval))
outFile.write(str(interval))
prevInterval = None
# Left termini
else:
prevInterval = interval
outFile.close()
if __name__ == "__main__":
sys.exit(main())
| #!/usr/bin/env python
#Copyright (C) 2014 by Glenn Hickey
#
#Released under the MIT license, see LICENSE.txt
import sys
import os
import argparse
import copy
from pybedtools import BedTool, Interval
"""
Stick a bed interval between pairs of lastz termini. Script written to be used
in conjunction with tsdFinder.py:
lastz termini -> fill termini -> bed input (which gets merged up automatically)
for tsdFinder.py. Example:
scaffold_1 141 225 1+ 43 +
scaffold_1 4479 4563 1+ 43 +
becomes
scaffold_1 141 225 1+ 43 +
scaffold_1 225 4479 1+ 43 +
scaffold_1 4479 4563 1+ 43 +
Note: also works on output of cleanTermini.
"""
def main(argv=None):
if argv is None:
argv = sys.argv
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
description="Add interval between pairs of candidate termini. Input "
"bed must have pairs of termini (left first) in contiguous rows (or be "
"like output of cleanTermini.py")
parser.add_argument("inBed", help="bed with ltr results to process")
parser.add_argument("outBed", help="bed to write output to.")
args = parser.parse_args()
assert os.path.exists(args.inBed)
outFile = open(args.outBed, "w")
prevInterval = None
for interval in BedTool(args.inBed):
# Right termini
if prevInterval is not None:
if interval.name != prevInterval.name and (
interval.name != "R_Term" or prevInterval.name != "L_Term"):
raise RuntimeError("Consecutive intervals dont have same id"
"\n%s%s" % (prevInterval, interval))
# make the new interval, dont bother giving a new name for now
fillInterval = copy.deepcopy(prevInterval)
fillInterval.start = prevInterval.end
fillInterval.end = interval.start
outFile.write(str(prevInterval))
outFile.write(str(fillInterval))
outFile.write(str(interval))
prevInterval = None
# Left termini
else:
prevInterval = interval
outFile.close()
if __name__ == "__main__":
sys.exit(main())
| mit | Python |
1ba050509865832ce72b8ee79084b56c9836c9ff | update the test file to reflect the rename of the blockfinder.use_sql_database_call method | ioerror/blockfinder,d1b/blockfinder,Starefossen/docker-blockfinder,Starefossen/docker-blockfinder,ioerror/blockfinder,d1b/blockfinder | blockfindertest.py | blockfindertest.py | #!/usr/bin/python
import blockfinder
import unittest
import os
class CheckReverseLookup(unittest.TestCase):
ipValues = ( (3229318011, '192.123.123.123'),
(3463778365, '206.117.16.61'),
(4278190202, '255.0.0.122'),
(3654084623, '217.204.232.15'),
(134217728, '8.0.0.0'))
rirValues = ( ('217.204.232.15', 'GB'),
('188.72.225.100', 'DE'),
('8.8.8.1', 'US'))
cache_dir = str(os.path.expanduser('~')) + "/.blockfinder/"
def test_rir_lookup(self):
for ip, cc in self.rirValues:
result = blockfinder.rir_lookup(ip, self.cache_dir)
self.assertEqual(result[0], cc)
def test_ip_address_to_dec(self):
for dec, ip in self.ipValues:
result = blockfinder.ip_address_to_dec(ip)
self.assertEqual(result, dec)
class CheckBlockFinder(unittest.TestCase):
cache_dir = str(os.path.expanduser('~')) + "/.blockfinder/"
# You can add known blocks to the tuple as a list
# they will be looked up and checked
knownResults = ( ('mm', ['203.81.64.0/19',
'203.81.160.0/20']),
('kp', ['175.45.176.0/22']))
def test_ipv4_bf(self):
blockfinder.verbose = 0
for cc, values in self.knownResults:
self.result = blockfinder.use_sql_database("ipv4", cc.upper(), self.cache_dir)
self.assertEqual(self.result, values)
if __name__ == '__main__':
unittest.main()
| #!/usr/bin/python
import blockfinder
import unittest
import os
class CheckReverseLookup(unittest.TestCase):
ipValues = ( (3229318011, '192.123.123.123'),
(3463778365, '206.117.16.61'),
(4278190202, '255.0.0.122'),
(3654084623, '217.204.232.15'),
(134217728, '8.0.0.0'))
rirValues = ( ('217.204.232.15', 'GB'),
('188.72.225.100', 'DE'),
('8.8.8.1', 'US'))
cache_dir = str(os.path.expanduser('~')) + "/.blockfinder/"
def test_rir_lookup(self):
for ip, cc in self.rirValues:
result = blockfinder.rir_lookup(ip, self.cache_dir)
self.assertEqual(result[0], cc)
def test_ip_address_to_dec(self):
for dec, ip in self.ipValues:
result = blockfinder.ip_address_to_dec(ip)
self.assertEqual(result, dec)
class CheckBlockFinder(unittest.TestCase):
cache_dir = str(os.path.expanduser('~')) + "/.blockfinder/"
# You can add known blocks to the tuple as a list
# they will be looked up and checked
knownResults = ( ('mm', ['203.81.64.0/19',
'203.81.160.0/20']),
('kp', ['175.45.176.0/22']))
def test_ipv4_bf(self):
blockfinder.verbose = 0
for cc, values in self.knownResults:
self.result = blockfinder.use_sql_database_call("ipv4", cc.upper(), self.cache_dir)
self.assertEqual(self.result, values)
if __name__ == '__main__':
unittest.main()
| bsd-2-clause | Python |
e70a45baa04a8f79bc0fd47d52fac24ca9a5985e | Clean '--' values | dpxxdp/berniemetrics,Rumel/berniemetrics,dpxxdp/berniemetrics,fpagnoux/berniemetrics,dpxxdp/berniemetrics,Rumel/berniemetrics,Rumel/berniemetrics,dpxxdp/berniemetrics,fpagnoux/berniemetrics,fpagnoux/berniemetrics,Rumel/berniemetrics,fpagnoux/berniemetrics | private/realclearpolitics-scraper/realclearpolitics/spiders/spider.py | private/realclearpolitics-scraper/realclearpolitics/spiders/spider.py | import scrapy
from realclearpolitics.items import TableItem
class RcpSpider(scrapy.Spider):
name = "realclearpoliticsSpider"
start_urls = []
columns = ['Poll','Date', 'Sample', 'Spread']
def __init__(self, url, extra_fields = {}):
self.url = url
self.extra_fields = extra_fields
def start_requests(self):
return [scrapy.FormRequest(self.url,
callback=self.parse)]
def parse(self, response):
table = response.css('.data').pop()
legend = table.css('tr')[0]
fieldNames = legend.css('th::text').extract()
nb_fields = len(fieldNames)
items = []
contentLines = table.css('tr')[1::]
for line in contentLines:
item = TableItem()
item['field'] = {}
values = line.css('td::text, td span::text, td a::text').extract()
for i in range(nb_fields):
if fieldNames[i] in RcpSpider.columns:
item[fieldNames[i]] = values[i]
elif values[i] != '--':
item['field'][fieldNames[i]] = values[i]
for fieldName, value in self.extra_fields.iteritems():
item[fieldName] = value
items.append(item)
return items
| import scrapy
from realclearpolitics.items import TableItem
class RcpSpider(scrapy.Spider):
name = "realclearpoliticsSpider"
start_urls = []
columns = ['Poll','Date', 'Sample', 'Spread']
def __init__(self, url, extra_fields = {}):
self.url = url
self.extra_fields = extra_fields
def start_requests(self):
return [scrapy.FormRequest(self.url,
callback=self.parse)]
def parse(self, response):
table = response.css('.data').pop()
legend = table.css('tr')[0]
fieldNames = legend.css('th::text').extract()
nb_fields = len(fieldNames)
items = []
contentLines = table.css('tr')[1::]
for line in contentLines:
item = TableItem()
item['field'] = {}
values = line.css('td::text, td span::text, td a::text').extract()
for i in range(nb_fields):
if fieldNames[i] in RcpSpider.columns:
item[fieldNames[i]] = values[i]
else:
item['field'][fieldNames[i]] = values[i]
for fieldName, value in self.extra_fields.iteritems():
item[fieldName] = value
items.append(item)
return items
| mit | Python |
ed4f786de54dde50cb26cfe4859507579806a14b | Adjust to avoid bugs with other values in context | ingadhoc/sale,ingadhoc/sale,ingadhoc/sale,ingadhoc/sale | portal_sale_distributor/models/ir_action_act_window.py | portal_sale_distributor/models/ir_action_act_window.py | ##############################################################################
# For copyright and license notices, see __manifest__.py file in module root
# directory
##############################################################################
from odoo import models, api
from odoo.tools.safe_eval import safe_eval
class ActWindowView(models.Model):
_inherit = 'ir.actions.act_window'
def read(self, fields=None, load='_classic_read'):
result = super().read(fields, load=load)
for value in result:
if value.get('context') and 'portal_products' in value.get('context'):
eval_ctx = dict(self.env.context)
try:
ctx = safe_eval(value.get('context', '{}'), eval_ctx)
except:
ctx = {}
pricelist = self.env.user.partner_id.property_product_pricelist
ctx.update({'pricelist': pricelist.id, 'partner': self.env.user.partner_id.id})
value.update({'context': str(ctx)})
return result
| ##############################################################################
# For copyright and license notices, see __manifest__.py file in module root
# directory
##############################################################################
from odoo import models, api
from odoo.tools.safe_eval import safe_eval
class ActWindowView(models.Model):
_inherit = 'ir.actions.act_window'
def read(self, fields=None, load='_classic_read'):
result = super().read(fields, load=load)
if result and result[0].get('context'):
ctx = safe_eval(result[0].get('context', '{}'))
if ctx.get('portal_products'):
pricelist = self.env.user.partner_id.property_product_pricelist
ctx.update({'pricelist': pricelist.id, 'partner': self.env.user.partner_id})
result[0].update({'context': ctx})
return result
| agpl-3.0 | Python |
e6cb753e625d53281a0cc6146911d267aa147643 | fix argparse | samcheck/PyMedia,samcheck/PyMedia,samcheck/PyMedia | whats_on_tv.py | whats_on_tv.py | #!venv/bin/python3
# media_namer.py - Renames passed media files in a folder (and subfolders) using
# OMDB for movies and theTVDB for TV shows
import sys
import os
import logging
import argparse
import random
import subprocess
import shlex
import videoLister
def main():
# set up logging
logger = logging.getLogger(__name__)
logging.basicConfig(filename='whats_on_tv.log', filemode='w', level=logging.WARNING,
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
# set up argparse
parser = argparse.ArgumentParser()
parser.add_argument('-i', '--input', help='Input directory to search.')
args = parser.parse_args()
if args.input:
in_path = args.input
else:
parser.print_help()
sys.exit(1)
# One time loop to generate a list of available media files in path
mList = []
for item in videoLister.videoDir(in_path):
logger.info("Found: {}".format(item))
mList.append(item)
# Randomly select a video to play
choice = random.choice(mList)
logger.info("Playing: {}".format(os.path.basename(choice)))
# Launch selected video with MPV in full screen
play_command = 'mpv "{}" --really-quiet --fs &'.format(choice)
proc = subprocess.Popen(shlex.split(play_command))
# use proc.terminate() to kill
if __name__ == '__main__':
main()
| #!venv/bin/python3
# media_namer.py - Renames passed media files in a folder (and subfolders) using
# OMDB for movies and theTVDB for TV shows
import sys
import os
import logging
import argparse
import random
import subprocess
import shlex
import videoLister
def main():
# set up logging
logger = logging.getLogger(__name__)
logging.basicConfig(filename='whats_on_tv.log', filemode='w', level=logging.WARNING,
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
# set up argparse
parser = argparse.ArgumentParser()
parser.add_argument('-i', '--input', help='Input directory to clone.')
args = parser.parse_args()
if args.input:
in_path = args.input
else:
parser.print_help()
sys.exit(1)
# One time loop to generate a list of available media files in path
mList = []
for item in videoLister.videoDir(in_path):
logger.info("Found: {}".format(item))
mList.append(item)
# Randomly select a video to play
choice = random.choice(mList)
logger.info("Playing: {}".format(os.path.basename(choice)))
# Launch selected video with MPV in full screen
play_command = 'mpv "{}" --really-quiet --fs &'.format(choice)
proc = subprocess.Popen(shlex.split(play_command))
# use proc.terminate() to kill
if __name__ == '__main__':
main()
| mit | Python |
8f6abdc12292d3b8cc4ebd6a35563da05501aecd | add config parameter to dbImport | igemsoftware/Shenzhen_BGIC_0101_2013,erasche/jbrowse,SuLab/jbrowse,igemsoftware/Shenzhen_BGIC_0101_2013,igemsoftware/Shenzhen_BGIC_0101_2013,igemsoftware/Shenzhen_BGIC_0101_2013,limeng12/jbrowse,GreggHelt2/apollo-test,GMOD/jbrowse,SuLab/jbrowse,Arabidopsis-Information-Portal/jbrowse,erasche/jbrowse,erasche/jbrowse,nathandunn/jbrowse,GMOD/jbrowse,Arabidopsis-Information-Portal/jbrowse,erasche/jbrowse,GreggHelt2/apollo-test,GreggHelt2/apollo-test,limeng12/jbrowse,limeng12/jbrowse,limeng12/jbrowse,GMOD/jbrowse,SuLab/jbrowse,Arabidopsis-Information-Portal/jbrowse,nathandunn/jbrowse,igemsoftware/Shenzhen_BGIC_0101_2013,Arabidopsis-Information-Portal/jbrowse,limeng12/jbrowse,Arabidopsis-Information-Portal/jbrowse,erasche/jbrowse,limeng12/jbrowse,erasche/jbrowse,GreggHelt2/apollo-test,GMOD/jbrowse,igemsoftware/Shenzhen_BGIC_0101_2013,GreggHelt2/apollo-test,Arabidopsis-Information-Portal/jbrowse,nathandunn/jbrowse,nathandunn/jbrowse,erasche/jbrowse,GMOD/jbrowse,SuLab/jbrowse,limeng12/jbrowse,SuLab/jbrowse,Arabidopsis-Information-Portal/jbrowse,limeng12/jbrowse,GreggHelt2/apollo-test,SuLab/jbrowse,igemsoftware/Shenzhen_BGIC_0101_2013,GreggHelt2/apollo-test,SuLab/jbrowse,igemsoftware/Shenzhen_BGIC_0101_2013,erasche/jbrowse,nathandunn/jbrowse | python/db_importer.py | python/db_importer.py | import os
from json_generator import JsonGenerator
# example call:
# snpQuery = 'select chromStart as Start, chromEnd as End, name as Name, transcript, frame, alleleCount, funcCodes, alleles, codons, peptides'
# db_importer.dbImport(conn, "snp132CodingDbSnp", snpQuery, "chromEnd", "chrom", "../data/", "snp132CodingDbSnp")
def dbImport(conn, table, query, endCol, chromCol, dataDir,
trackLabel, key = None, chunkBytes = 200000,
compress = True,
config = {'style': {'className': 'feature2'}}):
query += " from %s where %s=? order by Start asc, End desc" \
% (table, chromCol)
cur = conn.execute("""
select %(chrom)s, max(%(end)s), count(*) from %(table)s group by %(chrom)s
""" % {'chrom': chromCol, 'end': endCol, 'table': table})
chromList = cur.fetchall()
for (chrom, refEnd, count) in chromList:
cur = conn.execute(query, (chrom,))
classes = [{
'attributes': [f[0] for f in cur.description],
'proto': {'Chrom': chrom}
}]
jsongen = JsonGenerator(dataDir, trackLabel, chrom, chunkBytes,
compress, classes, refEnd = refEnd,
writeHists = True, featureCount = count)
for row in cur:
jsongen.addSorted([0] + list(row))
jsongen.generateTrack()
jsongen.writeTrackEntry('FeatureTrack', config)
| import os
from json_generator import JsonGenerator
# example call:
# snpQuery = 'select chromStart as Start, chromEnd as End, name as Name, transcript, frame, alleleCount, funcCodes, alleles, codons, peptides'
# db_importer.dbImport(conn, "snp132CodingDbSnp", snpQuery, "chromEnd", "chrom", "../data/", "snp132CodingDbSnp")
def dbImport(conn, table, query, endCol, chromCol, dataDir,
trackLabel, key = None, chunkBytes = 200000, compress = True):
query += " from %s where %s=? order by Start asc, End desc" \
% (table, chromCol)
cur = conn.execute("""
select %(chrom)s, max(%(end)s), count(*) from %(table)s group by %(chrom)s
""" % {'chrom': chromCol, 'end': endCol, 'table': table})
chromList = cur.fetchall()
for (chrom, refEnd, count) in chromList:
cur = conn.execute(query, (chrom,))
classes = [{
'attributes': [f[0] for f in cur.description],
'proto': {'Chrom': chrom}
}]
jsongen = JsonGenerator(dataDir, trackLabel, chrom, chunkBytes,
compress, classes, refEnd = refEnd,
writeHists = True, featureCount = count)
for row in cur:
jsongen.addSorted([0] + list(row))
jsongen.generateTrack()
jsongen.writeTrackEntry('FeatureTrack', {
'style': {
'className': 'feature2'
}
})
| lgpl-2.1 | Python |
d79c82e7406284eb0f3c696bd7cc5ecc08b66106 | Adjust settings path for wsgi | prattl/wepickheroes,prattl/wepickheroes,prattl/wepickheroes,prattl/wepickheroes | api/wph/wsgi.py | api/wph/wsgi.py | """
WSGI config for api project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "api.wph.settings")
application = get_wsgi_application()
| """
WSGI config for api project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "wph.settings")
application = get_wsgi_application()
| mit | Python |
bc95cf222a2f59e77962fb4ae5613d4695853944 | Add imports in halfedge_meshes/__init__.py | carlosrojas/halfedge_mesh | halfedge_mesh/__init__.py | halfedge_mesh/__init__.py | from halfedge_mesh import HalfedgeMesh
from halfedge_mesh import Vertex
from halfedge_mesh import Halfedge
from halfedge_mesh import Facet
| from halfedge_mesh import HalfedgeMesh | mit | Python |
bd369471abb00478f7b5e03a22f94e25133d7d78 | add __version__ | josesho/bootstrap_contrast | bootstrap_contrast/__init__.py | bootstrap_contrast/__init__.py | from .bootstrap_contrast import *
__version__=0.327
| from .bootstrap_contrast import * | mit | Python |
e9d6884e43869f5d22e882b0aec2a534fb2a735b | 部署2.1.1 | HeathKang/flasky,HeathKang/flasky,HeathKang/flasky | app/__init__.py | app/__init__.py | from flask import Flask,render_template
from flask.ext.bootstrap import Bootstrap
from flask.ext.mail import Mail
from flask.ext.moment import Moment
from flask.ext.sqlalchemy import SQLAlchemy
from config import config
from flask.ext.login import LoginManager
from flask.ext.pagedown import PageDown
bootstrap = Bootstrap()
mail = Mail()
moment = Moment()
db = SQLAlchemy()
pagedown = PageDown()
login_manager = LoginManager()
login_manager.session_protection = 'strong'
login_manager.login_view = 'auth.login'
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
config[config_name].init_app(app)
bootstrap.init_app(app)
mail.init_app(app)
moment.init_app(app)
db.init_app(app)
login_manager.init_app(app)
pagedown.init_app(app)
if not app.debug and not app.testing and not app.config['SSL_DISABLE']:
from flask.ext.sslify import sslify
sslify = SSLify(app)
from .main import main as main_blueprint
app.register_blueprint(main_blueprint)
from .auth import auth as auth_blueprint
app.register_blueprint(auth_blueprint,url_prefix='/auth')
from .api_1_0 import api as api_1_0_blueprint
app.register_blueprint(api_1_0_blueprint,url_prefix='/api/v1.0')
return app
| from flask import Flask,render_template
from flask.ext.bootstrap import Bootstrap
from flask.ext.mail import Mail
from flask.ext.moment import Moment
from flask.ext.sqlalchemy import SQLAlchemy
from config import config
from flask.ext.login import LoginManager
from flask.ext.pagedown import PageDown
bootstrap = Bootstrap()
mail = Mail()
moment = Moment()
db = SQLAlchemy()
pagedown = PageDown() #富文本编辑器
login_manager = LoginManager()
login_manager.session_protection = 'strong'
login_manager.login_view = 'auth.login'
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
config[config_name].init_app(app)
bootstrap.init_app(app)
mail.init_app(app)
moment.init_app(app)
db.init_app(app)
login_manager.init_app(app)
pagedown.init_app(app)
if not app.debug and not app.testing and not app.config['SSL_DISABLE']:
from flask.ext.sslify import sslify
sslify = SSLify(app)
from .main import main as main_blueprint
app.register_blueprint(main_blueprint)
from .auth import auth as auth_blueprint
app.register_blueprint(auth_blueprint,url_prefix='/auth')
from .api_1_0 import api as api_1_0_blueprint
app.register_blueprint(api_1_0_blueprint,url_prefix='/api/v1.0')
return app
| mit | Python |
5974af661e1c8d11aa92c5fb36ee41e10f616202 | Update test file | github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql | python/ql/test/experimental/dataflow/ApiGraphs/test.py | python/ql/test/experimental/dataflow/ApiGraphs/test.py | import a1 #$ use=moduleImport("a1")
x = a1.blah1 #$ use=moduleImport("a1").getMember("blah1")
import a2 as m2 #$ use=moduleImport("a2")
x2 = m2.blah2 #$ use=moduleImport("a2").getMember("blah2")
import a3.b3 as m3 #$ use=moduleImport("a3").getMember("b3")
x3 = m3.blah3 #$ use=moduleImport("a3").getMember("b3").getMember("blah3")
from a4.b4 import c4 as m4 #$ use=moduleImport("a4").getMember("b4").getMember("c4")
x4 = m4.blah4 #$ use=moduleImport("a4").getMember("b4").getMember("c4").getMember("blah4")
import a.b.c.d #$ use=moduleImport("a")
ab = a.b #$ use=moduleImport("a").getMember("b")
abc = ab.c #$ use=moduleImport("a").getMember("b").getMember("c")
abcd = abc.d #$ use=moduleImport("a").getMember("b").getMember("c").getMember("d")
x5 = abcd.method() #$ use=moduleImport("a").getMember("b").getMember("c").getMember("d").getMember("method").getReturn()
from a6 import m6 #$ use=moduleImport("a6").getMember("m6")
x6 = m6().foo().bar() #$ use=moduleImport("a6").getMember("m6").getReturn().getMember("foo").getReturn().getMember("bar").getReturn()
# Relative imports. These are ignored
from .foo import bar
from ..foobar import baz
# Use of imports across scopes
def use_m4():
x = m4.blah4 #$ use=moduleImport("a4").getMember("b4").getMember("c4").getMember("blah4")
def local_import_use():
from foo import bar #$ use=moduleImport("foo").getMember("bar")
x = bar() #$ use=moduleImport("foo").getMember("bar").getReturn()
from eggs import ham as spam #$ use=moduleImport("eggs").getMember("ham")
def bbb():
f = spam #$ use=moduleImport("eggs").getMember("ham")
from danger import SOURCE #$ use=moduleImport("danger").getMember("SOURCE")
foo = SOURCE #$ use=moduleImport("danger").getMember("SOURCE")
def change_foo():
global foo
foo = SOURCE #$ use=moduleImport("danger").getMember("SOURCE")
def f():
global foo
sink(foo) #$ use=moduleImport("danger").getMember("SOURCE")
foo = NONSOURCE
change_foo()
sink(foo) #$ MISSING: use=moduleImport("danger").getMember("SOURCE")
| import a1 #$ use=moduleImport("a1")
x = a1.blah1 #$ use=moduleImport("a1").getMember("blah1")
import a2 as m2 #$ use=moduleImport("a2")
x2 = m2.blah2 #$ use=moduleImport("a2").getMember("blah2")
import a3.b3 as m3 #$ use=moduleImport("a3").getMember("b3")
x3 = m3.blah3 #$ use=moduleImport("a3").getMember("b3").getMember("blah3")
from a4.b4 import c4 as m4 #$ use=moduleImport("a4").getMember("b4").getMember("c4")
x4 = m4.blah4 #$ use=moduleImport("a4").getMember("b4").getMember("c4").getMember("blah4")
import a.b.c.d #$ use=moduleImport("a")
ab = a.b #$ use=moduleImport("a").getMember("b")
abc = ab.c #$ use=moduleImport("a").getMember("b").getMember("c")
abcd = abc.d #$ use=moduleImport("a").getMember("b").getMember("c").getMember("d")
x5 = abcd() #$ use=moduleImport("a").getMember("b").getMember("c").getMember("d").getReturn()
y5 = x5.method() #$ use=moduleImport("a").getMember("b").getMember("c").getMember("d").getReturn().getMember("method").getReturn()
# Relative imports. These are ignored
from .foo import bar
from ..foobar import baz | mit | Python |
a11c4b7c4f546156d5d70c9799d148b99e932e2c | Update __init__.py | SpaceHotDog/Flask_API | app/__init__.py | app/__init__.py | # app/__init__.py
from flask_api import FlaskAPI
from flask_sqlalchemy import SQLAlchemy
# local import
from instance.config import app_config
# initialize sql-alchemy
db = SQLAlchemy()
def create_app(config_name):
app = FlaskAPI(__name__, instance_relative_config=True)
app.config.from_object(app_config[config_name])
app.config.from_pyfile('config.py')
# We've also disabled track modifications for SQLAlchemy because
# it'll be deprecated in future due to it's significant performance overhead.
# For debugging enthusiasts, you can set it to True for now.
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
db.init_app(app)
return app
| # app/__init__.py
from flask_api import FlaskAPI
from flask_sqlalchemy import SQLAlchemy
# local import
from instance.config import app_config
# initialize sql-alchemy
db = SQLAlchemy()
def create_app(config_name):
app = FlaskAPI(__name__, instance_relative_config=True)
app.config.from_object(app_config[config_name])
app.config.from_pyfile('config.py')
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
db.init_app(app)
return app
| unlicense | Python |
672d88d69c28d5304dbc2b1ed055289671a9444f | fix views in init for import | dschmaryl/golf-flask,dschmaryl/golf-flask,dschmaryl/golf-flask | app/__init__.py | app/__init__.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
from flask import Flask
from flask_admin import Admin
from flask_admin.contrib.sqla import ModelView
from flask_login import LoginManager, current_user
from flask_sqlalchemy import SQLAlchemy
from flask_bcrypt import Bcrypt
app = Flask(__name__)
app.config.from_object('config')
app.static_folder = 'static'
app.static_url_path = ''
admin = Admin(app, name='golf-stats', template_mode='bootstrap3')
bcrypt = Bcrypt(app)
db = SQLAlchemy(app)
login_manager = LoginManager()
login_manager.init_app(app)
login_manager.login_view = 'login'
from app import views, models
class AdminView(ModelView):
def is_accessible(self):
if not current_user.is_authenticated:
return False
return current_user.username == 'daryl'
admin.add_view(AdminView(models.User, db.session))
admin.add_view(AdminView(models.Round, db.session))
admin.add_view(AdminView(models.Hole, db.session))
admin.add_view(AdminView(models.Course, db.session))
| #!/usr/bin/env python3
# -*- coding: utf-8 -*-
from flask import Flask
from flask_admin import Admin
from flask_admin.contrib.sqla import ModelView
from flask_login import LoginManager, current_user
from flask_sqlalchemy import SQLAlchemy
from flask_bcrypt import Bcrypt
app = Flask(__name__)
app.config.from_object('config')
app.static_folder = 'static'
app.static_url_path = ''
admin = Admin(app, name='golf-stats', template_mode='bootstrap3')
bcrypt = Bcrypt(app)
db = SQLAlchemy(app)
login_manager = LoginManager()
login_manager.init_app(app)
login_manager.login_view = 'login'
from app import models
class AdminView(ModelView):
def is_accessible(self):
if not current_user.is_authenticated:
return False
return current_user.username == 'daryl'
admin.add_view(AdminView(models.User, db.session))
admin.add_view(AdminView(models.Round, db.session))
admin.add_view(AdminView(models.Hole, db.session))
admin.add_view(AdminView(models.Course, db.session))
| mit | Python |
9107d03f3c19cbc1f488478dc428dfcfaac93ded | update test | signalsciences/SigSciApiPy | test_SigSci.py | test_SigSci.py | from __future__ import print_function
from builtins import str
import unittest
import mock
from SigSciApiPy.SigSci import SigSciAPI
def mocked_requests_get(*args, **kwargs):
class MockResponse(object):
def __init__(self, json_data, status_code):
self.json_data = json_data
self.status_code = status_code
# TODO: add an actual "next" URI to make sure looping works etc.
self.text = '{"next": {"uri": ""}, "data": {"id": "testid", "serverHostname": "testhost"}}'
def json(self):
return self.json_data
return MockResponse({"key1": "value1"}, 200)
def mocked_requests_post(*args, **kwargs):
class MockResponse(object):
def __init__(self, json_data, status_code):
self.status_code = status_code
self.json_data = json_data
self.cookies = {}
def json(self):
return self.json_data
return MockResponse({"token": "testtoken"}, 200)
class TestSigSciAPI(unittest.TestCase):
@mock.patch("requests.get", side_effect=mocked_requests_get)
@mock.patch("requests.post", side_effect=mocked_requests_post)
def test_fetch(self, mock_get, mock_post):
# Assert requests.get calls
sigsci = SigSciAPI()
sigsci.email = "testemail"
sigsci.password = "testpass"
sigsci.corp = "testcorp"
sigsci.site = "testsite"
sigsci.authenticate()
sigsci.get_feed_requests()
sigsci.get_list_events()
def test_build_search_query(self):
sigsci = SigSciAPI()
sigsci.tags = ['SQLI', 'XSS']
sigsci.ip = '127.0.0.1'
sigsci.build_search_query()
self.assertEqual(str(sigsci.query).rstrip(), 'from:-1h ip:127.0.0.1 sort:time-asc tag:SQLI tag:XSS')
if __name__ == "__main__":
unittest.main()
| from __future__ import print_function
from builtins import str
import unittest
import mock
from SigSciApiPy.SigSci import SigSciAPI
def mocked_requests_get(*args, **kwargs):
class MockResponse(object):
def __init__(self, json_data, status_code):
self.json_data = json_data
self.status_code = status_code
# TODO: add an actual "next" URI to make sure looping works etc.
self.text = '{"next": {"uri": ""}, "data": {"id": "testid", "serverHostname": "testhost"}}'
def json(self):
return self.json_data
return MockResponse({"key1": "value1"}, 200)
def mocked_requests_post(*args, **kwargs):
class MockResponse(object):
def __init__(self, json_data, status_code):
self.status_code = status_code
self.json_data = json_data
self.cookies = {}
def json(self):
return self.json_data
return MockResponse({"token": "testtoken"}, 200)
class TestSigSciAPI(unittest.TestCase):
@mock.patch("requests.get", side_effect=mocked_requests_get)
@mock.patch("requests.post", side_effect=mocked_requests_post)
def test_fetch(self, mock_get, mock_post):
# Assert requests.get calls
sigsci = SigSciAPI()
sigsci.email = "testemail"
sigsci.password = "testpass"
sigsci.corp = "testcorp"
sigsci.site = "testsite"
sigsci.authenticate()
sigsci.get_feed_requests()
sigsci.get_list_events()
def test_build_search_query(self):
sigsci = SigSciAPI()
sigsci.tags = ['SQLI', 'XSS']
sigsci.ip = '127.0.0.1'
sigsci.build_search_query()
self.assertEqual(str(sigsci.query).rstrip(), 'from:-1h ip:127.0.0.1 sort:time-desc tag:SQLI tag:XSS')
if __name__ == "__main__":
unittest.main()
| mit | Python |
e4b9633ddd4c5926efd6dec56d70b9c1ac9a3b31 | support search by attribute name | cartologic/cartoview,cartologic/cartoview,cartologic/cartoview,cartologic/cartoview | cartoview/app_manager/rest.py | cartoview/app_manager/rest.py |
from cartoview.app_manager.models import AppInstance
from geonode.api.resourcebase_api import *
from .resources import FileUploadResource
from tastypie.resources import ModelResource
from tastypie import fields
from geonode.maps.models import Map as GeonodeMap, MapLayer as GeonodeMapLayer
from geonode.layers.models import Layer, Attribute
from tastypie.constants import ALL_WITH_RELATIONS, ALL
class GeonodeMapLayerResource(ModelResource):
class Meta:
queryset = GeonodeMapLayer.objects.distinct()
class GeonodeMapResource(ModelResource):
map_layers = fields.ToManyField(GeonodeMapLayerResource, 'layer_set', null=True, full=True)
class Meta:
queryset = GeonodeMap.objects.distinct().order_by('-date')
class GeonodeLayerResource(ModelResource):
class Meta:
queryset = Layer.objects.all()
excludes = ['csw_anytext', 'metadata_xml']
filtering = {"typename": ALL}
class GeonodeLayerAttributeResource(ModelResource):
layer = fields.ForeignKey(GeonodeLayerResource,'layer')
class Meta:
queryset = Attribute.objects.all()
filtering = {
"layer": ALL_WITH_RELATIONS,
"attribute": ALL_WITH_RELATIONS
}
class AppResource(FileUploadResource):
class Meta(FileUploadResource.Meta):
from models import App
queryset = App.objects.all()
filtering = {"name": ALL ,"title":ALL}
can_edit = True
class AppInstanceResource(CommonModelApi):
app = fields.ToOneField(AppResource, 'app', full=True)
map = fields.ForeignKey(GeonodeMapResource, 'map', full=True)
class Meta(CommonMetaApi):
filtering = CommonMetaApi.filtering
filtering.update({'app': ALL_WITH_RELATIONS})
queryset = AppInstance.objects.distinct().order_by('-date')
if settings.RESOURCE_PUBLISHING:
queryset = queryset.filter(is_published=True)
resource_name = 'appinstances'
|
from cartoview.app_manager.models import AppInstance
from geonode.api.resourcebase_api import *
from .resources import FileUploadResource
from tastypie.resources import ModelResource
from tastypie import fields
from geonode.maps.models import Map as GeonodeMap, MapLayer as GeonodeMapLayer
from geonode.layers.models import Layer, Attribute
from tastypie.constants import ALL_WITH_RELATIONS, ALL
class GeonodeMapLayerResource(ModelResource):
class Meta:
queryset = GeonodeMapLayer.objects.distinct()
class GeonodeMapResource(ModelResource):
map_layers = fields.ToManyField(GeonodeMapLayerResource, 'layer_set', null=True, full=True)
class Meta:
queryset = GeonodeMap.objects.distinct().order_by('-date')
class GeonodeLayerResource(ModelResource):
class Meta:
queryset = Layer.objects.all()
excludes = ['csw_anytext', 'metadata_xml']
filtering = {"typename": ALL}
class GeonodeLayerAttributeResource(ModelResource):
layer = fields.ForeignKey(GeonodeLayerResource,'layer')
class Meta:
queryset = Attribute.objects.all().order_by('display_order')
filtering = {"layer": ALL_WITH_RELATIONS}
class AppResource(FileUploadResource):
class Meta(FileUploadResource.Meta):
from models import App
queryset = App.objects.all()
filtering = {"name": ALL ,"title":ALL}
can_edit = True
class AppInstanceResource(CommonModelApi):
app = fields.ToOneField(AppResource, 'app', full=True)
map = fields.ForeignKey(GeonodeMapResource, 'map', full=True)
class Meta(CommonMetaApi):
filtering = CommonMetaApi.filtering
filtering.update({'app': ALL_WITH_RELATIONS})
queryset = AppInstance.objects.distinct().order_by('-date')
if settings.RESOURCE_PUBLISHING:
queryset = queryset.filter(is_published=True)
resource_name = 'appinstances'
| bsd-2-clause | Python |
ca9ca7c4a4ca951a0584f16716057629f8560021 | Remove unused imports | alphagov/digitalmarketplace-supplier-frontend,alphagov/digitalmarketplace-supplier-frontend,alphagov/digitalmarketplace-supplier-frontend,alphagov/digitalmarketplace-supplier-frontend | app/__init__.py | app/__init__.py | import re
from datetime import timedelta
from flask import Flask, request, redirect, session, Markup
from flask_login import LoginManager
from flask_wtf.csrf import CsrfProtect
from dmutils import apiclient, init_app, flask_featureflags
from dmutils.user import User
from config import configs
from markdown import markdown
data_api_client = apiclient.DataAPIClient()
login_manager = LoginManager()
feature_flags = flask_featureflags.FeatureFlag()
csrf = CsrfProtect()
def create_app(config_name):
application = Flask(__name__,
static_folder='static/',
static_url_path=configs[config_name].STATIC_URL_PATH)
init_app(
application,
configs[config_name],
data_api_client=data_api_client,
feature_flags=feature_flags,
login_manager=login_manager,
)
application.permanent_session_lifetime = timedelta(hours=1)
from .main import main as main_blueprint
from .status import status as status_blueprint
application.register_blueprint(status_blueprint,
url_prefix='/suppliers')
application.register_blueprint(main_blueprint,
url_prefix='/suppliers')
login_manager.login_view = 'main.render_login'
login_manager.login_message_category = "must_login"
main_blueprint.config = application.config.copy()
csrf.init_app(application)
@application.before_request
def remove_trailing_slash():
if request.path.endswith('/'):
return redirect(request.path[:-1], code=301)
@application.before_request
def refresh_session():
session.permanent = True
session.modified = True
@application.template_filter('markdown')
def markdown_filter(data):
return Markup(markdown(data))
return application
@login_manager.user_loader
def load_user(user_id):
return User.load_user(data_api_client, user_id)
def config_attrs(config):
"""Returns config attributes from a Config object"""
p = re.compile('^[A-Z_]+$')
return filter(lambda attr: bool(p.match(attr)), dir(config))
| import re
from datetime import timedelta
from flask import Flask, request, redirect, session
from flask_login import LoginManager
from flask_wtf.csrf import CsrfProtect
from dmutils import apiclient, init_app, flask_featureflags
from dmutils.user import User
from config import configs
from jinja2 import Markup, escape
from flask import Markup
from markdown import markdown
data_api_client = apiclient.DataAPIClient()
login_manager = LoginManager()
feature_flags = flask_featureflags.FeatureFlag()
csrf = CsrfProtect()
def create_app(config_name):
application = Flask(__name__,
static_folder='static/',
static_url_path=configs[config_name].STATIC_URL_PATH)
init_app(
application,
configs[config_name],
data_api_client=data_api_client,
feature_flags=feature_flags,
login_manager=login_manager,
)
application.permanent_session_lifetime = timedelta(hours=1)
from .main import main as main_blueprint
from .status import status as status_blueprint
application.register_blueprint(status_blueprint,
url_prefix='/suppliers')
application.register_blueprint(main_blueprint,
url_prefix='/suppliers')
login_manager.login_view = 'main.render_login'
login_manager.login_message_category = "must_login"
main_blueprint.config = application.config.copy()
csrf.init_app(application)
@application.before_request
def remove_trailing_slash():
if request.path.endswith('/'):
return redirect(request.path[:-1], code=301)
@application.before_request
def refresh_session():
session.permanent = True
session.modified = True
@application.template_filter('markdown')
def markdown_filter(data):
return Markup(markdown(data))
return application
@login_manager.user_loader
def load_user(user_id):
return User.load_user(data_api_client, user_id)
def config_attrs(config):
"""Returns config attributes from a Config object"""
p = re.compile('^[A-Z_]+$')
return filter(lambda attr: bool(p.match(attr)), dir(config))
| mit | Python |
31ea2191bb83101d0b9123f8e6a427aeefa9dee3 | test getsize | Volumental/fakefs | test_fakefs.py | test_fakefs.py | import unittest
import fakefs
from nose.tools import assert_equal, assert_true, assert_false, raises
import os
class FakeTestCase(unittest.TestCase):
def run(self, result=None):
self.fs = fakefs.FakeFilesystem()
with self.fs.monkey.patch():
super(FakeTestCase, self).run(result)
def test_open_write(self):
with open('/a.txt', 'w') as f:
f.write('abc')
assert_equal(b'abc', self.fs.content_for('/a.txt'))
def test_open_read(self):
self.fs.add_file('/x.txt', "xyz")
with open('/x.txt') as f:
data = f.read()
assert_equal("xyz", data)
def test_exists_missing(self):
assert_false(os.path.exists('/nope'))
def test_exists(self):
self.fs.add_file('/yup', '')
assert_true(os.path.exists('/yup'))
@raises(FileNotFoundError)
def test_getsize_missing(self):
os.path.getsize('/a')
def test_getsiz(self):
self.fs.add_file('/a', '123')
assert_equal(os.path.getsize('/a'), 3)
| import unittest
import fakefs
from nose.tools import assert_equal, assert_true, assert_false
import os
class FakeTestCase(unittest.TestCase):
def run(self, result=None):
self.fs = fakefs.FakeFilesystem()
with self.fs.monkey.patch():
super(FakeTestCase, self).run(result)
def test_open_write(self):
with open('/a.txt', 'w') as f:
f.write('abc')
assert_equal(b'abc', self.fs.content_for('/a.txt'))
def test_open_read(self):
self.fs.add_file('/x.txt', "xyz")
with open('/x.txt') as f:
data = f.read()
assert_equal("xyz", data)
def test_exists_missing(self):
assert_false(os.path.exists('/nope'))
def test_exists(self):
self.fs.add_file('/yup', '')
assert_true(os.path.exists('/yup'))
| mit | Python |
0ec6f767d15bf59d6ba9f5088c88923136f18e2a | support mengxue | chinese-poetry/chinese-poetry,chinese-poetry/chinese-poetry | test_poetry.py | test_poetry.py | # -*- coding: utf-8 -*-
import os
import json
import sys
import traceback
import functools
def check_json(f, _dir):
if not f.endswith('.json'):
return True
filepath = os.path.join(_dir, f)
with open(filepath) as file:
try:
_ = json.loads(file.read())
sys.stdout.write(f"{filepath} 校验成功")
return True
except:
sys.stderr.write(traceback.format_exc())
assert False, f"{filepath} 校验失败"
def __check_path__(path):
"""校验 指定目录 中的 json 文件"""
[ check_json(f, path) for f in os.listdir(path) ]
test_shi = functools.partial(__check_path__, './json')
test_ci = functools.partial(__check_path__, './ci')
test_shijing = functools.partial(__check_path__, './shijing')
test_lunyu = functools.partial(__check_path__, './lunyu')
test_huajianji = functools.partial(__check_path__, u'./wudai/huajianji/')
test_nantang2 = functools.partial(__check_path__, u'./wudai/nantang/')
test_youmengying = functools.partial(__check_path__, u'./youmengying/')
test_sishuwujing = functools.partial(__check_path__, u'./sishuwujing/')
test_yuanqu = functools.partial(__check_path__, u'./yuanqu/')
test_mengxue = functools.partial(__check_path__, u'./mengxue')
| #! -*- coding: utf-8 -*-
# import sqlite3
import os
import json
import sys
import traceback
import functools
def check_json(f, _dir):
if not f.endswith('.json'):
return True
filepath = os.path.join(_dir, f)
with open(filepath) as file:
try:
_ = json.loads(file.read())
print(u"%s 校验成功" % _dir)
return True
except:
sys.stderr.write(traceback.format_exc())
assert False, u"校验(%s)失败" % f
def __check_path__(path):
"""校验 指定目录 中的 json 文件"""
[ check_json(f, path) for f in os.listdir(path) ]
test_shi = functools.partial(__check_path__, './json')
test_ci = functools.partial(__check_path__, './ci')
test_shijing = functools.partial(__check_path__, './shijing')
test_lunyu = functools.partial(__check_path__, './lunyu')
test_huajianji = functools.partial(__check_path__, u'./wudai/huajianji/')
test_nantang2 = functools.partial(__check_path__, u'./wudai/nantang/')
test_youmengying = functools.partial(__check_path__, u'./youmengying/')
test_sishuwujing = functools.partial(__check_path__, u'./sishuwujing/')
test_yuanqu = functools.partial(__check_path__, u'./yuanqu/')
| mit | Python |
ffb2b5a21018a59e99f5a2a959c30d95456244e0 | Update rtconfig.py | geniusgogo/rt-thread,weety/rt-thread,RT-Thread/rt-thread,RT-Thread/rt-thread,hezlog/rt-thread,RT-Thread/rt-thread,armink/rt-thread,nongxiaoming/rt-thread,armink/rt-thread,hezlog/rt-thread,geniusgogo/rt-thread,RT-Thread/rt-thread,nongxiaoming/rt-thread,nongxiaoming/rt-thread,RT-Thread/rt-thread,weety/rt-thread,hezlog/rt-thread,nongxiaoming/rt-thread,weety/rt-thread,nongxiaoming/rt-thread,weety/rt-thread,nongxiaoming/rt-thread,nongxiaoming/rt-thread,weety/rt-thread,geniusgogo/rt-thread,RT-Thread/rt-thread,hezlog/rt-thread,hezlog/rt-thread,armink/rt-thread,armink/rt-thread,ArdaFu/rt-thread,geniusgogo/rt-thread,ArdaFu/rt-thread,ArdaFu/rt-thread,armink/rt-thread,weety/rt-thread,geniusgogo/rt-thread,RT-Thread/rt-thread,ArdaFu/rt-thread,armink/rt-thread,hezlog/rt-thread,armink/rt-thread,hezlog/rt-thread,ArdaFu/rt-thread,weety/rt-thread,geniusgogo/rt-thread,ArdaFu/rt-thread,geniusgogo/rt-thread,ArdaFu/rt-thread | bsp/allwinner_tina/rtconfig.py | bsp/allwinner_tina/rtconfig.py | import os
# toolchains options
ARCH ='arm'
CPU ='arm9'
CROSS_TOOL ='gcc'
if os.getenv('RTT_ROOT'):
RTT_ROOT = os.getenv('RTT_ROOT')
else:
RTT_ROOT = '../..'
if os.getenv('RTT_CC'):
CROSS_TOOL = os.getenv('RTT_CC')
if CROSS_TOOL == 'gcc':
PLATFORM = 'gcc'
EXEC_PATH = r'E:\work\env\tools\gnu_gcc\arm_gcc\mingw\bin'
else:
print('Please make sure your toolchains is GNU GCC!')
exit(0)
if os.getenv('RTT_EXEC_PATH'):
EXEC_PATH = os.getenv('RTT_EXEC_PATH')
BUILD = 'release'
# BUILD = 'debug'
if PLATFORM == 'gcc':
# toolchains
PREFIX = 'arm-none-eabi-'
CC = PREFIX + 'gcc'
CXX = PREFIX + 'g++'
AS = PREFIX + 'gcc'
AR = PREFIX + 'ar'
LINK = PREFIX + 'g++'
TARGET_EXT = 'elf'
SIZE = PREFIX + 'size'
OBJDUMP = PREFIX + 'objdump'
OBJCPY = PREFIX + 'objcopy'
DEVICE = ' -mcpu=arm926ej-s -ffunction-sections -fdata-sections'
CFLAGS = DEVICE + ' -Wall'
AFLAGS = ' -c' + DEVICE + ' -x assembler-with-cpp'
LFLAGS = DEVICE + ' -nostartfiles -Wl,--gc-sections,-Map=rtthread.map,-cref,-u,system_vectors -T link.lds'
CPATH = ''
LPATH = ''
if BUILD == 'debug':
CFLAGS += ' -O0 -gdwarf-2'
AFLAGS += ' -gdwarf-2'
else:
CFLAGS += ' -O2'
CXXFLAGS = CFLAGS
DUMP_ACTION = OBJDUMP + ' -D -S $TARGET > rtt.asm\n'
POST_ACTION = OBJCPY + ' -O binary $TARGET rtthread.bin\n' + SIZE + ' $TARGET \n'
| import os
# toolchains options
ARCH ='arm'
CPU ='arm9'
CROSS_TOOL ='gcc'
if os.getenv('RTT_ROOT'):
RTT_ROOT = os.getenv('RTT_ROOT')
else:
RTT_ROOT = '../..'
if os.getenv('RTT_CC'):
CROSS_TOOL = os.getenv('RTT_CC')
if CROSS_TOOL == 'gcc':
PLATFORM = 'gcc'
EXEC_PATH = r'E:\work\env\tools\gnu_gcc\arm_gcc\mingw\bin'
else:
print 'Please make sure your toolchains is GNU GCC!'
exit(0)
if os.getenv('RTT_EXEC_PATH'):
EXEC_PATH = os.getenv('RTT_EXEC_PATH')
BUILD = 'release'
# BUILD = 'debug'
if PLATFORM == 'gcc':
# toolchains
PREFIX = 'arm-none-eabi-'
CC = PREFIX + 'gcc'
CXX = PREFIX + 'g++'
AS = PREFIX + 'gcc'
AR = PREFIX + 'ar'
LINK = PREFIX + 'g++'
TARGET_EXT = 'elf'
SIZE = PREFIX + 'size'
OBJDUMP = PREFIX + 'objdump'
OBJCPY = PREFIX + 'objcopy'
DEVICE = ' -mcpu=arm926ej-s -ffunction-sections -fdata-sections'
CFLAGS = DEVICE + ' -Wall'
AFLAGS = ' -c' + DEVICE + ' -x assembler-with-cpp'
LFLAGS = DEVICE + ' -nostartfiles -Wl,--gc-sections,-Map=rtthread.map,-cref,-u,system_vectors -T link.lds'
CPATH = ''
LPATH = ''
if BUILD == 'debug':
CFLAGS += ' -O0 -gdwarf-2'
AFLAGS += ' -gdwarf-2'
else:
CFLAGS += ' -O2'
CXXFLAGS = CFLAGS
DUMP_ACTION = OBJDUMP + ' -D -S $TARGET > rtt.asm\n'
POST_ACTION = OBJCPY + ' -O binary $TARGET rtthread.bin\n' + SIZE + ' $TARGET \n'
| apache-2.0 | Python |
b1ad5c8a4d2e9a116a6bb385dbdbeb777f6852b0 | Fix ImportError in tests for `import_module` in django < 1.9 and >= 1.9 | hellysmile/django-mongo-sessions | tests/tests.py | tests/tests.py | # tests stolen from https://github.com/martinrusev/django-redis-sessions
try:
# For Django versions < 1.9
from django.utils.importlib import import_module
except ImportError:
# For Django versions >= 1.9
from django.utils.module_loading import import_module
from django.conf import settings
import time
from nose.tools import eq_
session_engine = import_module(settings.SESSION_ENGINE).SessionStore()
def test_modify_and_keys():
eq_(session_engine.modified, False)
session_engine['test'] = 'test_me'
eq_(session_engine.modified, True)
eq_(session_engine['test'], 'test_me')
def test_save_and_delete():
session_engine['key'] = 'value'
session_engine.save()
eq_(session_engine.exists(session_engine.session_key), True)
session_engine.delete(session_engine.session_key)
eq_(session_engine.exists(session_engine.session_key), False)
def test_flush():
session_engine['key'] = 'another_value'
session_engine.save()
key = session_engine.session_key
session_engine.flush()
eq_(session_engine.exists(key), False)
def test_items():
session_engine['item1'], session_engine['item2'] = 1, 2
session_engine.save()
# Python 3.*
eq_(sorted(list(session_engine.items())), [('item1', 1), ('item2', 2)])
def test_expiry():
# Test if the expiry age is set correctly
eq_(session_engine.get_expiry_age(), settings.SESSION_COOKIE_AGE)
session_engine['key'] = 'expiring_value'
session_engine.save()
key = session_engine.session_key
eq_(session_engine.exists(key), True)
time.sleep(11)
eq_(session_engine.exists(key), False)
def test_save_and_load():
session_engine.setdefault('item_test', 8)
session_engine.save()
session_data = session_engine.load()
eq_(session_data.get('item_test'), 8)
| # tests stolen from https://github.com/martinrusev/django-redis-sessions
from django.utils.importlib import import_module
from django.conf import settings
import time
from nose.tools import eq_
session_engine = import_module(settings.SESSION_ENGINE).SessionStore()
def test_modify_and_keys():
eq_(session_engine.modified, False)
session_engine['test'] = 'test_me'
eq_(session_engine.modified, True)
eq_(session_engine['test'], 'test_me')
def test_save_and_delete():
session_engine['key'] = 'value'
session_engine.save()
eq_(session_engine.exists(session_engine.session_key), True)
session_engine.delete(session_engine.session_key)
eq_(session_engine.exists(session_engine.session_key), False)
def test_flush():
session_engine['key'] = 'another_value'
session_engine.save()
key = session_engine.session_key
session_engine.flush()
eq_(session_engine.exists(key), False)
def test_items():
session_engine['item1'], session_engine['item2'] = 1, 2
session_engine.save()
# Python 3.*
eq_(sorted(list(session_engine.items())), [('item1', 1), ('item2', 2)])
def test_expiry():
# Test if the expiry age is set correctly
eq_(session_engine.get_expiry_age(), settings.SESSION_COOKIE_AGE)
session_engine['key'] = 'expiring_value'
session_engine.save()
key = session_engine.session_key
eq_(session_engine.exists(key), True)
time.sleep(11)
eq_(session_engine.exists(key), False)
def test_save_and_load():
session_engine.setdefault('item_test', 8)
session_engine.save()
session_data = session_engine.load()
eq_(session_data.get('item_test'), 8)
| apache-2.0 | Python |
1172c5e538b09dc3b65c44c1deff8ba2a9ad669c | check back | tqchen/tinyflow,ZihengJiang/tinyflow,tqchen/tinyflow,tqchen/tinyflow,ZihengJiang/tinyflow,ZihengJiang/tinyflow | example/mnist_softmax.py | example/mnist_softmax.py | """Tinyflow example code.
This code is adapted from Tensorflow's MNIST Tutorial with minimum code changes.
"""
import tinyflow as tf
from tinyflow.datasets import get_mnist
# Create the model
x = tf.placeholder(tf.float32, [None, 784])
W = tf.Variable(tf.zeros([784, 10]))
y = tf.nn.softmax(tf.matmul(x, W))
# Define loss and optimizer
y_ = tf.placeholder(tf.float32, [None, 10])
cross_entropy = tf.reduce_mean(-tf.reduce_sum(y_ * tf.log(y), reduction_indices=[1]))
train_step = tf.train.GradientDescentOptimizer(0.5).minimize(cross_entropy)
sess = tf.Session()
sess.run(tf.initialize_all_variables())
# get the mnist dataset
mnist = get_mnist(flatten=True, onehot=True)
for i in range(1000):
batch_xs, batch_ys = mnist.train.next_batch(100)
sess.run(train_step, feed_dict={x: batch_xs, y_:batch_ys})
correct_prediction = tf.equal(tf.argmax(y,1), tf.argmax(y_,1))
accuracy = tf.reduce_mean(correct_prediction)
print(sess.run(accuracy, feed_dict={x: mnist.test.images, y_: mnist.test.labels}))
| """Tinyflow example code.
This code is adapted from Tensorflow's MNIST Tutorial with minimum code changes.
"""
import tinyflow as tf
from tinyflow.datasets import get_mnist
stdev = 0.01
# Create the model
x = tf.placeholder(tf.float32, [None, 784])
conv1_filter = tf.Variable(tf.normal([20, 3, 5, 5], stdev=stdev))
conv1_bias = tf.Variable(tf.normal([20], stdev=stdev))
conv1 = tf.nn.conv2d(conv1_filter, bias=conv1_bias)
flatten = tf.nn.flatten(conv1)
y = tf.nn.softmax(tf.matmul(flatten, W))
# Define loss and optimizer
y_ = tf.placeholder(tf.float32, [None, 10])
cross_entropy = tf.reduce_mean(-tf.reduce_sum(y_ * tf.log(y), reduction_indices=[1]))
train_step = tf.train.GradientDescentOptimizer(0.5).minimize(cross_entropy)
sess = tf.Session(device='gpu')
sess.run(tf.initialize_all_variables())
# get the mnist dataset
mnist = get_mnist(flatten=True, onehot=True)
for i in range(1000):
batch_xs, batch_ys = mnist.train.next_batch(100)
sess.run(train_step, feed_dict={x: batch_xs, y_:batch_ys})
correct_prediction = tf.equal(tf.argmax(y,1), tf.argmax(y_,1))
accuracy = tf.reduce_mean(correct_prediction)
print(sess.run(accuracy, feed_dict={x: mnist.test.images, y_: mnist.test.labels}))
| apache-2.0 | Python |
348205f38fbc09f04a6e8d795b001f08cce96c30 | fix `__init__` | scott-maddox/openbandparams | src/openbandparams/__init__.py | src/openbandparams/__init__.py | #
# Copyright (c) 2013-2014, Scott J Maddox
#
# This file is part of openbandparams.
#
# openbandparams is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# openbandparams is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with openbandparams. If not, see <http://www.gnu.org/licenses/>.
#
#############################################################################
from . import version
from .version import __version__
__all__ = ['__version__']
from . import iii_v_zinc_blende_binaries
__all__ += iii_v_zinc_blende_binaries.__all__
from .iii_v_zinc_blende_binaries import *
from . import iii_v_zinc_blende_ternaries
__all__ += iii_v_zinc_blende_ternaries.__all__
from .iii_v_zinc_blende_ternaries import *
| #
# Copyright (c) 2013-2014, Scott J Maddox
#
# This file is part of openbandparams.
#
# openbandparams is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# openbandparams is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with openbandparams. If not, see <http://www.gnu.org/licenses/>.
#
#############################################################################
from . import version
from .version import __version__
from . import iii_v
from .iii_v import *
__all__ = ['__version__']
__all__ += iii_v.__all__
| agpl-3.0 | Python |
ee2c15f068607fc0d919b407f90d43056bd53ce9 | drop extraneous newline | falconindy/auracle,falconindy/auracle,falconindy/auracle | tests/clone.py | tests/clone.py | #!/usr/bin/env python
import auracle_test
import os
class TestClone(auracle_test.TestCase):
def testCloneSingle(self):
p = self.Auracle(['clone', 'auracle-git'])
self.assertEqual(p.returncode, 0)
self.assertPkgbuildExists('auracle-git', git=True)
self.assertCountEqual(self.request_uris, [
'/rpc?v=5&type=info&arg[]=auracle-git'
])
def testCloneMultiple(self):
p = self.Auracle(['clone', 'auracle-git', 'pkgfile-git'])
self.assertEqual(p.returncode, 0)
self.assertPkgbuildExists('auracle-git', git=True)
self.assertPkgbuildExists('pkgfile-git', git=True)
self.assertCountEqual(self.request_uris, [
'/rpc?v=5&type=info&arg[]=auracle-git&arg[]=pkgfile-git'
])
def testCloneRecursive(self):
p = self.Auracle(['clone', '-r', 'auracle-git'])
self.assertEqual(p.returncode, 0)
self.assertPkgbuildExists('auracle-git', git=True)
self.assertPkgbuildExists('nlohmann-json', git=True)
self.assertGreater(len(self.request_uris), 1)
self.assertIn('/rpc?v=5&type=info&arg[]=auracle-git',
self.request_uris)
def testCloneUpdatesExistingCheckouts(self):
# Package doesn't initially exist, expect a clone.
p = self.Auracle(['clone', 'auracle-git'])
self.assertTrue(p.stdout.decode().startswith('clone'))
self.assertTrue(os.path.exists(
os.path.join(self.tempdir, 'auracle-git', 'clone')))
# Package now exists, expect a pull
p = self.Auracle(['clone', 'auracle-git'])
self.assertTrue(p.stdout.decode().startswith('update'))
self.assertTrue(os.path.exists(
os.path.join(self.tempdir, 'auracle-git', 'pull')))
def testCloneFailureReportsError(self):
p = self.Auracle(['clone', 'yaourt'])
self.assertNotEqual(p.returncode, 0)
self.assertEqual(p.stderr.decode().strip(), (
'error: clone failed for yaourt: '
'git exited with unexpected exit status 1'))
if __name__ == '__main__':
auracle_test.main()
| #!/usr/bin/env python
import auracle_test
import os
class TestClone(auracle_test.TestCase):
def testCloneSingle(self):
p = self.Auracle(['clone', 'auracle-git'])
self.assertEqual(p.returncode, 0)
self.assertPkgbuildExists('auracle-git', git=True)
self.assertCountEqual(self.request_uris, [
'/rpc?v=5&type=info&arg[]=auracle-git'
])
def testCloneMultiple(self):
p = self.Auracle(['clone', 'auracle-git', 'pkgfile-git'])
self.assertEqual(p.returncode, 0)
self.assertPkgbuildExists('auracle-git', git=True)
self.assertPkgbuildExists('pkgfile-git', git=True)
self.assertCountEqual(self.request_uris, [
'/rpc?v=5&type=info&arg[]=auracle-git&arg[]=pkgfile-git'
])
def testCloneRecursive(self):
p = self.Auracle(['clone', '-r', 'auracle-git'])
self.assertEqual(p.returncode, 0)
self.assertPkgbuildExists('auracle-git', git=True)
self.assertPkgbuildExists('nlohmann-json', git=True)
self.assertGreater(len(self.request_uris), 1)
self.assertIn('/rpc?v=5&type=info&arg[]=auracle-git',
self.request_uris)
def testCloneUpdatesExistingCheckouts(self):
# Package doesn't initially exist, expect a clone.
p = self.Auracle(['clone', 'auracle-git'])
self.assertTrue(p.stdout.decode().startswith('clone'))
self.assertTrue(os.path.exists(
os.path.join(self.tempdir, 'auracle-git', 'clone')))
# Package now exists, expect a pull
p = self.Auracle(['clone', 'auracle-git'])
self.assertTrue(p.stdout.decode().startswith('update'))
self.assertTrue(os.path.exists(
os.path.join(self.tempdir, 'auracle-git', 'pull')))
def testCloneFailureReportsError(self):
p = self.Auracle(['clone', 'yaourt'])
self.assertNotEqual(p.returncode, 0)
self.assertEqual(p.stderr.decode().strip(), (
'error: clone failed for yaourt: '
'git exited with unexpected exit status 1'))
if __name__ == '__main__':
auracle_test.main()
| mit | Python |
1831a7608a063d3f9b3dce7e42f591c50b099150 | remove the router name validation because that has been done in the __init__.py | tumluliu/rap | rap/servicefactory.py | rap/servicefactory.py | """ Factory of RoutingService classes """
import json
from .mb import MapboxRouter
from .graphhopper import GraphHopperRouter
# from . import mapzen
# from . import google
# from . import here
# from . import tomtom
""" Factory method of creating concrete routing service instances
"""
def RoutingServiceFactory(service_name):
with open('routerconf.json', 'r') as f:
service_provider_conf = json.load(f)
# TODO(lliu): The following stuff of course should be prettified
# ATTENTION!! A pile of ugly things are coming...
if service_name == 'mapbox':
return MapboxRouter(service_provider_conf['mapbox']['key'])
elif service_name == 'graphhopper':
return GraphHopperRouter(service_provider_conf['graphhopper']['key'])
| """ Factory of RoutingService classes """
import json
from .mb import MapboxRouter
from .graphhopper import GraphHopperRouter
# from . import mapzen
# from . import google
# from . import here
# from . import tomtom
""" Factory method of creating concrete routing service instances
"""
VALID_ROUTING_SERVICES = [
'mapbox', 'graphhopper', 'mapzen', 'google', 'here', 'tomtom'
]
def RoutingServiceFactory(service_name):
if service_name not in VALID_ROUTING_SERVICES:
# TODO(lliu): Here should throw an exception or deal with it in an
# identical way
return None
with open('../apikeys.json', 'r') as f:
service_provider_conf = json.load(f)
# TODO(lliu): The following stuff of course should be prettified
# ATTENTION!! A pile of ugly things are coming...
if service_name == 'mapbox':
return MapboxRouter(service_provider_conf['mapbox']['key'])
elif service_name == 'graphhopper':
return GraphHopperRouter(service_provider_conf['graphhopper']['key'])
| mit | Python |
89cc31bd99b7c1397662352b411a5be9bd7cf625 | Set version to 6.11.0 | explosion/thinc,spacy-io/thinc,explosion/thinc,spacy-io/thinc,explosion/thinc,explosion/thinc,spacy-io/thinc | thinc/about.py | thinc/about.py | # inspired from:
# https://python-packaging-user-guide.readthedocs.org/en/latest/single_source_version/
# https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py
__name__ = 'thinc'
__version__ = '6.11.0'
__summary__ = "Practical Machine Learning for NLP"
__uri__ = 'https://github.com/explosion/thinc'
__author__ = 'Matthew Honnibal'
__email__ = 'matt@explosion.ai'
__license__ = 'MIT'
__title__ = "thinc"
__release__ = False
| # inspired from:
# https://python-packaging-user-guide.readthedocs.org/en/latest/single_source_version/
# https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py
__name__ = 'thinc'
__version__ = '6.11.0.dev2'
__summary__ = "Practical Machine Learning for NLP"
__uri__ = 'https://github.com/explosion/thinc'
__author__ = 'Matthew Honnibal'
__email__ = 'matt@explosion.ai'
__license__ = 'MIT'
__title__ = "thinc"
__release__ = False
| mit | Python |
e7ba38cb79be32336321d80394904243f0799c69 | Revert to using --output switch for the output file. | jd28/pynwn-tools,jd28/pynwn-tools | tlkie/tlkie.py | tlkie/tlkie.py | #!/usr/bin/env python
import argparse, os, sys
from pynwn.file.tlk import Tlk
from pynwn.file.tls import TLS
from pynwn.util.helper import get_encoding
parser = argparse.ArgumentParser()
parser.add_argument('-v', '--version', action='version', version='0.1')
parser.add_argument('-o', '--output', help='Output TLK or TLS file.')
parser.add_argument('-l', '--language', help='TLK language.', default=0)
parser.add_argument('file', help='TLK or TLS file.', nargs='+')
args = parser.parse_args()
def load_by_ext(f, ext):
if '.tlk' == ext:
return Tlk(open(f, 'rb'))
elif '.tls' == ext:
return TLS(f)
else:
raise ValueError("Tlkie can only process a TLK or TLS file.")
def save_by_ext(main, ext):
if '.tlk' == ext:
if isinstance(main, Tlk):
with open(args.output, 'wb') as f:
main.write(f)
elif isinstance(main, TLS):
with open(args.output, 'wb') as f:
main.write_tlk(f, args.language)
elif '.tls' == ext:
if isinstance(main, Tlk):
with open(args.output, 'w', encoding=get_encoding()) as f:
main.write_tls(f)
elif isinstance(main, TLS):
with open(args.output, 'w', encoding=get_encoding()) as f:
main.write(f)
if __name__ == "__main__":
basef = os.path.basename(args.output)
outext = os.path.splitext(basef)[1].lower()
if outext == '.tlk':
main = Tlk()
elif outext == '.tls':
main = TLS()
else:
raise ValueError("Tlkie can only output a TLK or TLS file.")
for f in args.file:
basef = os.path.basename(f)
ext = os.path.splitext(basef)[1]
tl = load_by_ext(f, ext.lower())
print("Adding: %s" % f)
if main is None:
main = tl
continue
main.inject(tl)
print("Saving output: %s" % args.output)
save_by_ext(main, outext)
| #!/usr/bin/env python
import argparse, os, sys
from pynwn.file.tlk import Tlk
from pynwn.file.tls import TLS
from pynwn.util.helper import get_encoding
parser = argparse.ArgumentParser()
parser.add_argument('-v', '--version', action='version', version='0.1')
parser.add_argument('output', help='Output TLK or TLS file.')
parser.add_argument('-l', '--language', help='TLK language.', default=0)
parser.add_argument('file', help='TLK or TLS file.', nargs='+')
args = parser.parse_args()
def load_by_ext(f, ext):
if '.tlk' == ext:
return Tlk(open(f, 'rb'))
elif '.tls' == ext:
return TLS(f)
else:
raise ValueError("Tlkie can only process a TLK or TLS file.")
def save_by_ext(main, ext):
if '.tlk' == ext:
if isinstance(main, Tlk):
with open(args.output, 'wb') as f:
main.write(f)
elif isinstance(main, TLS):
with open(args.output, 'wb') as f:
main.write_tlk(f, args.language)
elif '.tls' == ext:
if isinstance(main, Tlk):
with open(args.output, 'w', encoding=get_encoding()) as f:
main.write_tls(f)
elif isinstance(main, TLS):
with open(args.output, 'w', encoding=get_encoding()) as f:
main.write(f)
if __name__ == "__main__":
main = None
basef = os.path.basename(args.output)
outext = os.path.splitext(basef)[1].lower()
if outext != '.tlk' and outext != '.tls':
raise ValueError("Tlkie can only output a TLK or TLS file.")
for f in args.file:
basef = os.path.basename(f)
ext = os.path.splitext(basef)[1]
tl = load_by_ext(f, ext.lower())
print("Adding: %s" % f)
if main is None:
main = tl
continue
main.inject(tl)
print("Saving output: %s" % args.output)
save_by_ext(main, outext)
| mit | Python |
76412c0d99dd3bed76cf0cc9d06629c6c72ce972 | fix bug on urls.py | kyunooh/JellyBlog,kyunooh/JellyBlog,kyunooh/JellyBlog | jellyblog/urls.py | jellyblog/urls.py | from django.conf.urls import url, include
from django.conf import settings
from django.http import HttpResponseRedirect
from django.contrib.sitemaps.views import sitemap
from jellyblog import views
from .feeds import LatestFeed, AllPublickFeed
from .sitemaps import BlogSitemap
from .serializer import router
sitemaps = {
'blog': BlogSitemap,
}
urlpatterns = [
# url 오른쪽의 주석은 각각 예시를 의미
url(r'^$', views.home, name='home'),
# blog의 첫 페이지를 보여준다.
url(r'^index/$', views.index, name='blog_index'),
# 검색 결과를 보여준다.
url(r'^search/$', views.search_documents, name='search_documents'),
# /page/(page_number)
url(r'^page/(?P<page>[0-9]+)/?$',
views.index_with_page, name='index_with_page'),
# /(document_id)
url(r'^(?P<document_id>[0-9]+)/?$', views.detail, name='detail'),
# /category/(category_id)
url(r'^category/(?P<category_id>[0-9]+)/?$',
views.category_detail, name='category_detail'),
# /category/(category_id)/page/(page_number)
url(r'^category/(?P<category_id>[0-9]+)/page/(?P<page>[0-9]+)/?$',
views.category_with_page, name='category_with_page'),
# google chrome favicon fix
url(r'^favicon.ico/$',
lambda x: HttpResponseRedirect(settings.STATIC_URL+'ico/favicon.ico')),
url(r'^latest/feed/$', LatestFeed()),
url(r'^public/feed/$', AllPublickFeed()),
url(r'^sitemap\.xml$', sitemap, {'sitemaps': sitemaps},
name='django.contrib.sitemaps.views.sitemap'),
url(r'^api/', include(router.urls))
]
| from django.conf.urls import url, include
from django.conf import settings
from django.http import HttpResponseRedirect
from django.contrib.sitemaps.views import sitemap
from jellyblog import views
from .feeds import LatestFeed, AllPublickFeed
from .sitemaps import BlogSitemap
from .serializer import router
sitemaps = {
'blog': BlogSitemap,
}
urlpatterns = [
# url 오른쪽의 주석은 각각 예시를 의미
url(r'^$', views.home, name='home'),
# blog의 첫 페이지를 보여준다.
url(r'^index/$', views.index, name='blog_index'),
# 검색 결과를 보여준다.
url(r'^search/$', views.search_documents, name='search_documents')
# /page/(page_number)
url(r'^page/(?P<page>[0-9]+)/?$',
views.index_with_page, name='index_with_page'),
# /(document_id)
url(r'^(?P<document_id>[0-9]+)/?$', views.detail, name='detail'),
# /category/(category_id)
url(r'^category/(?P<category_id>[0-9]+)/?$',
views.category_detail, name='category_detail'),
# /category/(category_id)/page/(page_number)
url(r'^category/(?P<category_id>[0-9]+)/page/(?P<page>[0-9]+)/?$',
views.category_with_page, name='category_with_page'),
# google chrome favicon fix
url(r'^favicon.ico/$',
lambda x: HttpResponseRedirect(settings.STATIC_URL+'ico/favicon.ico')),
url(r'^latest/feed/$', LatestFeed()),
url(r'^public/feed/$', AllPublickFeed()),
url(r'^sitemap\.xml$', sitemap, {'sitemaps': sitemaps},
name='django.contrib.sitemaps.views.sitemap'),
url(r'^api/', include(router.urls))
]
| apache-2.0 | Python |
fb384756680bb533aa44d7cd638c15aaf0991d32 | Add some more tests | fedora-infra/hrf | tests/tests.py | tests/tests.py | import os
import hrf
import unittest
import json
directory = os.path.dirname(__file__)
class HrfTestCase(unittest.TestCase):
def setUp(self):
self.app = hrf.app.test_client()
def test_json1_title(self):
json_input = file(os.path.join(directory, '1.json'), 'r').read()
post = json.loads(self.app.post('/title', data=json_input).data)
assert post['results'][0] == 'buildsys.build.state.change'
def test_json1_all(self):
json_input = file(os.path.join(directory, '1.json'), 'r').read()
post = json.loads(self.app.post('/all', data=json_input).data)
assert post['results'][0]['title'] == 'buildsys.build.state.change'
assert post['results'][0]['repr'] == 'buildsys.build.state.change -- uthash-1.9.8-3.el6 started building http://koji.fedoraproject.org/koji/buildinfo?buildID=430456'
assert post['results'][0]['icon'] == 'http://fedoraproject.org/w/uploads/2/20/Artwork_DesignService_koji-icon-48.png'
def test_json2_all(self):
json_input = file(os.path.join(directory, '2.json'), 'r').read()
post = json.loads(self.app.post('/all', data=json_input).data)
assert post['results'][0]['title'] == 'bodhi.update.request.testing'
assert post['results'][0]['repr'] == 'bodhi.update.request.testing -- cicku submitted uthash-1.9.8-3.fc18 to testing https://admin.fedoraproject.org/updates/uthash-1.9.8-3.fc18'
assert post['results'][0]['icon'] == 'https://admin.fedoraproject.org/updates/static/images/bodhi-icon-48.png'
assert post['results'][1]['title'] == 'buildsys.build.state.change'
assert post['results'][1]['repr'] == 'buildsys.build.state.change -- uthash-1.9.8-3.el6 started building http://koji.fedoraproject.org/koji/buildinfo?buildID=430456'
assert post['results'][1]['icon'] == 'http://fedoraproject.org/w/uploads/2/20/Artwork_DesignService_koji-icon-48.png'
def test_json1_timezone1(self):
json_input = file(os.path.join(directory, '1.json'), 'r').read()
post1 = json.loads(self.app.post('/timestamp?timezone=US/Eastern', data=json_input).data)
post2 = json.loads(self.app.post('/timestamp', data=json_input).data)
assert post1['results'][0]['iso'] != post2['results'][0]['iso']
def test_json1_timezone_error(self):
json_input = file(os.path.join(directory, '1.json'), 'r').read()
post = json.loads(self.app.post('/timestamp?timezone=invalid/invalid', data=json_input).data)
assert post['error'] == "Invalid timezone parameter."
def test_json1_method_error(self):
json_input = file(os.path.join(directory, '1.json'), 'r').read()
post = json.loads(self.app.post('/derpderp', data=json_input).data)
assert post['error'] == "That method was invalid."
if __name__ == '__main__':
unittest.main()
| import os
import hrf
import unittest
import json
directory = os.path.dirname(__file__)
class HrfTestCase(unittest.TestCase):
def setUp(self):
self.app = hrf.app.test_client()
def test_json1_title(self):
json_input = file(os.path.join(directory, '1.json'), 'r').read()
post = json.loads(self.app.post('/title', data=json_input).data)
assert post['results'][0] == 'buildsys.build.state.change'
def test_json1_all(self):
json_input = file(os.path.join(directory, '1.json'), 'r').read()
post = json.loads(self.app.post('/all', data=json_input).data)
assert post['results'][0]['title'] == 'buildsys.build.state.change'
assert post['results'][0]['repr'] == 'buildsys.build.state.change -- uthash-1.9.8-3.el6 started building http://koji.fedoraproject.org/koji/buildinfo?buildID=430456'
assert post['results'][0]['icon'] == 'http://fedoraproject.org/w/uploads/2/20/Artwork_DesignService_koji-icon-48.png'
def test_json2_all(self):
json_input = file(os.path.join(directory, '2.json'), 'r').read()
post = json.loads(self.app.post('/all', data=json_input).data)
assert post['results'][0]['title'] == 'bodhi.update.request.testing'
assert post['results'][0]['repr'] == 'bodhi.update.request.testing -- cicku submitted uthash-1.9.8-3.fc18 to testing https://admin.fedoraproject.org/updates/uthash-1.9.8-3.fc18'
assert post['results'][0]['icon'] == 'https://admin.fedoraproject.org/updates/static/images/bodhi-icon-48.png'
assert post['results'][1]['title'] == 'buildsys.build.state.change'
assert post['results'][1]['repr'] == 'buildsys.build.state.change -- uthash-1.9.8-3.el6 started building http://koji.fedoraproject.org/koji/buildinfo?buildID=430456'
assert post['results'][1]['icon'] == 'http://fedoraproject.org/w/uploads/2/20/Artwork_DesignService_koji-icon-48.png'
if __name__ == '__main__':
unittest.main()
| lgpl-2.1 | Python |
7556def7c9d591b9579742545bb2f40777b26ed8 | Fix default value for the fake_utils' path argument | atodorov/libblockdev,vpodzime/libblockdev,atodorov/libblockdev,rhinstaller/libblockdev,atodorov/libblockdev,vpodzime/libblockdev,rhinstaller/libblockdev,rhinstaller/libblockdev,vpodzime/libblockdev | tests/utils.py | tests/utils.py | import os
import tempfile
from contextlib import contextmanager
def create_sparse_tempfile(name, size):
""" Create a temporary sparse file.
:param str name: suffix for filename
:param size: the file size (in bytes)
:returns: the path to the newly created file
"""
(fd, path) = tempfile.mkstemp(prefix="libblockdev.", suffix="-%s" % name)
os.close(fd)
create_sparse_file(path, size)
return path
def create_sparse_file(path, size):
""" Create a sparse file.
:param str path: the full path to the file
:param size: the size of the file (in bytes)
:returns: None
"""
fd = os.open(path, os.O_WRONLY|os.O_CREAT|os.O_TRUNC)
os.ftruncate(fd, size)
os.close(fd)
@contextmanager
def udev_settle():
yield
os.system("udevadm settle")
@contextmanager
def fake_utils(path="."):
old_path = os.environ.get("PATH", "")
if old_path:
new_path = path + ":" + old_path
else:
new_path = path
os.environ["PATH"] = new_path
yield
os.environ["PATH"] = old_path
| import os
import tempfile
from contextlib import contextmanager
def create_sparse_tempfile(name, size):
""" Create a temporary sparse file.
:param str name: suffix for filename
:param size: the file size (in bytes)
:returns: the path to the newly created file
"""
(fd, path) = tempfile.mkstemp(prefix="libblockdev.", suffix="-%s" % name)
os.close(fd)
create_sparse_file(path, size)
return path
def create_sparse_file(path, size):
""" Create a sparse file.
:param str path: the full path to the file
:param size: the size of the file (in bytes)
:returns: None
"""
fd = os.open(path, os.O_WRONLY|os.O_CREAT|os.O_TRUNC)
os.ftruncate(fd, size)
os.close(fd)
@contextmanager
def udev_settle():
yield
os.system("udevadm settle")
@contextmanager
def fake_utils(path=""):
old_path = os.environ.get("PATH", "")
if old_path:
new_path = path + ":" + old_path
else:
new_path = path
os.environ["PATH"] = new_path
yield
os.environ["PATH"] = old_path
| lgpl-2.1 | Python |
ccfdec698cfd6d4de3b3d57ab74bfd87a196957c | Rename these methods because we will introduce the json module's versions. | faassen/jsonvalue,faassen/jsonvalue | jsonvalue/core.py | jsonvalue/core.py | from pyld import jsonld
class JsonValue(object):
def __init__(self):
self._dumpers = {}
self._loaders = {}
def type(self, type, dump, load):
self._dumpers[type] = dump
self._loaders[type] = load
def load_value(self, type, value):
load = self._loaders.get(type)
if load is None:
return value
return load(value)
def dump_value(self, type, value):
dump = self._dumpers.get(type)
if dump is None:
return value
return dump(value)
def to_values(self, d):
"""Take JSON dict, return JSON dict with rich values.
"""
return jsonld.compact(self.expand_to_values(d), d['@context'])
def from_values(self, d):
"""Take rich JSON dict, return plain JSON dict without rich values.
"""
return self.compact_from_values(jsonld.expand(d), d['@context'])
def expand_to_values(self, d):
"""Take JSON dict, return expanded dict with rich values.
"""
return _transform_expanded(jsonld.expand(d), self.load_value)
def compact_from_values(self, expanded, context):
"""Take expanded JSON list, return JSON dict with plain values.
"""
return jsonld.compact(_transform_expanded(expanded, self.dump_value),
context)
def _transform_expanded(expanded, transform):
result = []
for d in expanded:
result.append(_transform_dict(d, transform))
return result
def _transform_dict(d, transform):
result = {}
for key, l in d.items():
if not isinstance(l, list):
result[key] = l
continue
result[key] = _transform_list(l, transform)
return result
def _transform_list(l, transform):
result = []
for d in l:
if not isinstance(d, dict):
result.append(d)
result.append(_transform_value(d, transform))
return result
def _transform_value(d, transform):
type = d.get('@type')
if type is None:
return d
value = d.get('@value')
if value is None:
return d
d = d.copy()
d['@value'] = transform(type, value)
return d
| from pyld import jsonld
class JsonValue(object):
def __init__(self):
self._dumpers = {}
self._loaders = {}
def type(self, type, dump, load):
self._dumpers[type] = dump
self._loaders[type] = load
def load(self, type, value):
load = self._loaders.get(type)
if load is None:
return value
return load(value)
def dump(self, type, value):
dump = self._dumpers.get(type)
if dump is None:
return value
return dump(value)
def to_values(self, d):
"""Take JSON dict, return JSON dict with rich values.
"""
return jsonld.compact(self.expand_to_values(d), d['@context'])
def from_values(self, d):
"""Take rich JSON dict, return plain JSON dict without rich values.
"""
return self.compact_from_values(jsonld.expand(d), d['@context'])
def expand_to_values(self, d):
"""Take JSON dict, return expanded dict with rich values.
"""
return _transform_expanded(jsonld.expand(d), self.load)
def compact_from_values(self, expanded, context):
"""Take expanded JSON list, return JSON dict with plain values.
"""
return jsonld.compact(_transform_expanded(expanded, self.dump),
context)
def _transform_expanded(expanded, transform):
result = []
for d in expanded:
result.append(_transform_dict(d, transform))
return result
def _transform_dict(d, transform):
result = {}
for key, l in d.items():
if not isinstance(l, list):
result[key] = l
continue
result[key] = _transform_list(l, transform)
return result
def _transform_list(l, transform):
result = []
for d in l:
if not isinstance(d, dict):
result.append(d)
result.append(_transform_value(d, transform))
return result
def _transform_value(d, transform):
type = d.get('@type')
if type is None:
return d
value = d.get('@value')
if value is None:
return d
d = d.copy()
d['@value'] = transform(type, value)
return d
| bsd-3-clause | Python |
50e4cf1ae678ac73d41885647eee7c16a30e392d | Add one more test on registration. | allo-/django-registration,ei-grad/django-registration,PetrDlouhy/django-registration,wda-hb/test,yorkedork/django-registration,alawnchen/django-registration,Geffersonvivan/django-registration,kinsights/django-registration,wy123123/django-registration,rulz/django-registration,imgmix/django-registration,tanjunyen/django-registration,PSU-OIT-ARC/django-registration,erinspace/django-registration,pando85/django-registration,wy123123/django-registration,kazitanvirahsan/django-registration,PetrDlouhy/django-registration,nikolas/django-registration,pando85/django-registration,wda-hb/test,torchingloom/django-registration,kazitanvirahsan/django-registration,kinsights/django-registration,sergafts/django-registration,arpitremarkable/django-registration,PSU-OIT-ARC/django-registration,yorkedork/django-registration,erinspace/django-registration,percipient/django-registration,mick-t/django-registration,nikolas/django-registration,maitho/django-registration,stillmatic/django-registration,memnonila/django-registration,imgmix/django-registration,percipient/django-registration,torchingloom/django-registration,tanjunyen/django-registration,matejkloska/django-registration,mick-t/django-registration,alawnchen/django-registration,Geffersonvivan/django-registration,timgraham/django-registration,rulz/django-registration,timgraham/django-registration,ei-grad/django-registration,sergafts/django-registration,allo-/django-registration,memnonila/django-registration,arpitremarkable/django-registration,maitho/django-registration,furious-luke/django-registration,furious-luke/django-registration,stillmatic/django-registration,matejkloska/django-registration | registration/tests.py | registration/tests.py | """
Unit tests for django-registration.
"""
from django.core import mail
from django.test import TestCase
from registration.models import RegistrationProfile
class DefaultBackendTestCase(TestCase):
"""
Test the default registration backend.
"""
def setUp(self):
"""
Create an instance of the default backend for use in testing.
"""
from registration.backends.default import DefaultBackend
self.backend = DefaultBackend()
def test_registration(self):
"""
Test the registration process: registration creates a new
inactive account and a new profile with activation key,
populates the correct account data and sends an activation
email.
"""
new_user = self.backend.register({}, 'bob', 'secret', 'bob@example.com')
self.assertEqual(new_user.username, 'bob')
self.failUnless(new_user.check_password('secret'))
self.assertEqual(new_user.email, 'bob@example.com')
self.failIf(new_user.is_active)
self.assertEqual(RegistrationProfile.objects.count(), 1)
self.assertEqual(len(mail.outbox), 1)
def test_activation(self):
"""
Test the activation process: activating within the permitted
window sets the account's ``is_active`` field to ``True`` and
resets the activation key, while failing to activate within
the permitted window forbids later activation.
"""
valid_user = self.backend.register({}, 'alice', 'swordfish', 'alice@example.com')
valid_profile = RegistrationProfile.objects.get(user=valid_user)
activated = self.backend.activate({}, valid_profile.activation_key)
self.assertEqual(activated.username, valid_user.username)
self.failUnless(activated.is_active)
| """
Unit tests for django-registration.
"""
from django.core import mail
from django.test import TestCase
from registration.models import RegistrationProfile
class DefaultBackendTestCase(TestCase):
"""
Test the default registration backend.
"""
def setUp(self):
"""
Create an instance of the default backend for use in testing.
"""
from registration.backends.default import DefaultBackend
self.backend = DefaultBackend()
def test_registration(self):
"""
Create a new user, verifying that username, email and password
are set correctly and that the new user is inactive and
received an activation email.
"""
new_user = self.backend.register({}, 'bob', 'secret', 'bob@example.com')
self.assertEqual(new_user.username, 'bob')
self.failUnless(new_user.check_password('secret'))
self.assertEqual(new_user.email, 'bob@example.com')
self.failIf(new_user.is_active)
self.assertEqual(len(mail.outbox), 1)
def test_activation(self):
"""
Test the activation process: activating within the permitted
window sets the account's ``is_active`` field to ``True`` and
resets the activation key, while failing to activate within
the permitted window forbids later activation.
"""
valid_user = self.backend.register({}, 'alice', 'swordfish', 'alice@example.com')
valid_profile = RegistrationProfile.objects.get(user=valid_user)
activated = self.backend.activate({}, valid_profile.activation_key)
self.assertEqual(activated.username, valid_user.username)
self.failUnless(activated.is_active)
| bsd-3-clause | Python |
8f9b1d8ae70bd259e2b2953913a03f52e5960a1b | Test email sending. | stefankoegl/django-couchdb-utils,ogirardot/django-registration,ratio/django-registration,stefankoegl/django-registration-couchdb,stefankoegl/django-registration-couchdb,wuyuntao/django-registration,wuyuntao/django-registration,danielsokolowski/django-registration,schmidsi/django-registration,andresdouglas/django-registration,stefankoegl/django-couchdb-utils,bruth/django-registration2,stefankoegl/django-registration-couchdb,stefankoegl/django-couchdb-utils,ogirardot/django-registration,danielsokolowski/django-registration | registration/tests.py | registration/tests.py | """
Unit tests for django-registration.
"""
from django.core import mail
from django.test import TestCase
class DefaultBackendTestCase(TestCase):
"""
Test the default registration backend.
"""
def setUp(self):
"""
Create an instance of the default backend for use in testing.
"""
from registration.backends.default import DefaultBackend
self.backend = DefaultBackend()
def test_registration(self):
"""
Create a new user, verifying that username, email and password
are set correctly and that the new user is inactive and
received an activation email.
"""
new_user = self.backend.register({}, 'bob', 'secret', 'bob@example.com')
self.assertEqual(new_user.username, 'bob')
self.failUnless(new_user.check_password('secret'))
self.assertEqual(new_user.email, 'bob@example.com')
self.failIf(new_user.is_active)
self.assertEqual(len(mail.outbox), 1)
| """
Unit tests for django-registration.
"""
from django.test import TestCase
class DefaultBackendTestCase(TestCase):
"""
Test the default registration backend.
"""
def setUp(self):
"""
Create an instance of the default backend for use in testing.
"""
from registration.backends.default import DefaultBackend
self.backend = DefaultBackend()
def test_registration(self):
"""
Create a new user, verifying that username, email and password
are set correctly and that the new user is inactive.
"""
new_user = self.backend.register({}, 'bob', 'secret', 'bob@example.com')
self.assertEqual(new_user.username, 'bob')
self.failUnless(new_user.check_password('secret'))
self.assertEqual(new_user.email, 'bob@example.com')
self.failIf(new_user.is_active)
| bsd-3-clause | Python |
0145cfad50b72d3f2105fe17f6de11831ce5c1de | Add -c option to specify dataframe columns name in advance. | geektoni/Influenza-Like-Illness-Predictor,geektoni/Influenza-Like-Illness-Predictor | data_analysis/generate_weekly_data.py | data_analysis/generate_weekly_data.py | #!/usr/bin/env python
# Given a complete year files with data in the form (page, week, visits)
# this script will generate a convenient csv file which will store for
# each page and for each years's week the total number of visits.
#
# Written by Giovanni De Toni (2017)
# Email: giovanni.det at gmail.com
"""Generate year files with page visits
Usage:
generate_weekly_data.py <input> <output> [-c <columns>]
Options:
-c specify custom columns name.
-h, --help
"""
import fileinput
import pandas as pd
import datetime
from docopt import docopt
# Parse the command line
arguments = docopt(__doc__)
# Set up an empty dictionary
all_data={}
# If the columns are set, add them to the dataframe
columns_df = []
with open(argumens["<columns>"], "r") as f:
for line in f:
all_data[line]=[]
# Future index
index_year=[];
# Read from file
with open(arguments["<input>"], "r") as f:
for line in f:
# Split the line given
# 0: page name
# 1: date-hour
# 2: visits count
# 3: page size
total = line.split(",")
# Get date-hour pair
# 0: date
# 1: hour
date = total[1].split("-")
# Generate year month and day.
year = date[0][0:4]
month = date[0][4:6]
day = date[0][6:8]
# Get week number
week_number = datetime.date(int(year), int(month), int(day)).isocalendar()[1]
# Set up an empty list if the key
# is null
if all_data.get(total[0], []) == []:
all_data[total[0]] = [0 for x in range(53)]
# Sum the visits
all_data[total[0]][int(week_number)-1] += int(total[2]);
for i in range(0, 53):
if i<10:
number="0"+str(i)
else:
number=str(i)
index_year.append(year+"-"+number)
# Generate a pandas dataframe with all the data
print index_year
df = pd.DataFrame(all_data);
df = df.set_index([index_year]);
# Print the dataframe to show the result
print(df)
# Save it to file
df.to_csv(arguments["<output>"], index_label="Week")
| #!/usr/bin/env python
# Given a complete year files with data in the form (page, week, visits)
# this script will generate a convenient csv file which will store for
# each page and for each years's week the total number of visits.
#
# Written by Giovanni De Toni (2017)
# Email: giovanni.det at gmail.com
"""Generate year files with page visits
Usage:
generate_weekly_data.py <input> <output>
Options:
-h, --help
"""
import fileinput
import pandas as pd
import datetime
from docopt import docopt
# Parse the command line
arguments = docopt(__doc__)
# Set up an empty dictionary
all_data={}
# Future index
index_year=[];
# Read from file
with open(arguments["<input>"], "r") as f:
for line in f:
# Split the line given
# 0: page name
# 1: date-hour
# 2: visits count
# 3: page size
total = line.split(",")
# Get date-hour pair
# 0: date
# 1: hour
date = total[1].split("-")
# Generate year month and day.
year = date[0][0:4]
month = date[0][4:6]
day = date[0][6:8]
# Get week number
week_number = datetime.date(int(year), int(month), int(day)).isocalendar()[1]
# Set up an empty list if the key
# is null
if all_data.get(total[0], []) == []:
all_data[total[0]] = [0 for x in range(53)]
# Sum the visits
all_data[total[0]][int(week_number)-1] += int(total[2]);
for i in range(0, 53):
if i<10:
number="0"+str(i)
else:
number=str(i)
index_year.append(year+"-"+number)
# Generate a pandas dataframe with all the data
print index_year
df = pd.DataFrame(all_data);
df = df.set_index([index_year]);
# Print the dataframe to show the result
print(df)
# Save it to file
df.to_csv(arguments["<output>"], index_label="Week")
| mit | Python |
1334a91c9200fdefd8938ceacee67a5f0ad6bd84 | update regex for sql log parse | lsaffre/lino,lsaffre/lino,khchine5/lino,lino-framework/lino,lsaffre/lino,khchine5/lino,lino-framework/lino,lino-framework/lino,lsaffre/lino,lino-framework/lino,lino-framework/lino,lsaffre/lino,khchine5/lino,khchine5/lino,khchine5/lino | lino/utils/sql.py | lino/utils/sql.py | # import lino
# lino.startup('lino_book.projects.team.settings.demo')
# import django
# print django.__file__
# from lino.api.doctest import *
# show_sql_queries()
# #doctest: +ELLIPSIS +NORMALIZE_WHITESPACE
# r = demo_get('robin','api/tickets/AllTickets', fmt='json', limit=1)
# print(r)
# show_sql_queries()
import re
import sys
# from pprint import pprint
import textwrap
def p(kw, sql_width = 60):
"""Prints a parsed sql log nicely"""
kw['sql'] = ("\n ").join(textwrap.wrap(kw['sql'], sql_width))
print(
"table: {table}\n"
"Longest_time: {time}\n"
"Queries: {count}\n"
"total_time: {total_time}\n"
"sql: {sql}".format(**kw))
if __name__ == "__main__":
matches = []
regex = r"^.+?\((?P<time>[\d\.]*)\) (?P<sql>.*FROM \`(?P<table>.*?)\`.*?;).*$"
# f = open("log/lino.log", 'r')
f = sys.stdin
d= {}
l = f.readline()
while l:
m = re.match(regex, l)
# print m
if m:
g = m.groupdict()
g['time'] = float(g['time'])
r = d.setdefault(g['table'], {})
r['count'] = r.get('count', 0) + 1
r["total_time"] = r.get("total_time", 0 ) + float(g['time'])
if r.get('time', -1) < g['time']:
d[g['table']].update(g)
l = f.readline()
if d:
for kw in sorted(d.values(), key= lambda x: x['total_time']):
p(kw)
print "-------------------"
print("The slowest SQL call was:")
#find max
kw = d[max(d, key=lambda x: float(d[x].get('time', 0)))]
p(kw)
print "-------------------"
else:
print("No sql queries found") | # import lino
# lino.startup('lino_book.projects.team.settings.demo')
# import django
# print django.__file__
# from lino.api.doctest import *
# show_sql_queries()
# #doctest: +ELLIPSIS +NORMALIZE_WHITESPACE
# r = demo_get('robin','api/tickets/AllTickets', fmt='json', limit=1)
# print(r)
# show_sql_queries()
import re
import sys
# from pprint import pprint
import textwrap
def p(kw, sql_width = 60):
"""Prints a parsed sql log nicely"""
kw['sql'] = ("\n ").join(textwrap.wrap(kw['sql'], sql_width))
print(
"table: {table}\n"
"Longest_time: {time}\n"
"Queries: {count}\n"
"total_time: {total_time}\n"
"sql: {sql}".format(**kw))
if __name__ == "__main__":
matches = []
regex = r"^.+\((?P<time>[\d\.]*)\) (?P<sql>.*FROM \`(?P<table>.*?)\`.*)$"
# f = open("log/lino.log", 'r')
f = sys.stdin
d= {}
l = f.readline()
while l:
m = re.match(regex, l)
# print m
if m:
g = m.groupdict()
g['time'] = float(g['time'])
r = d.setdefault(g['table'], {})
r['count'] = r.get('count', 0) + 1
r["total_time"] = r.get("total_time", 0 ) + float(g['time'])
if r.get('time', -1) < g['time']:
d[g['table']].update(g)
l = f.readline()
if d:
for kw in sorted(d.values(), key= lambda x: x['total_time']):
p(kw)
print "-------------------"
print("The slowest SQL call was:")
#find max
kw = d[max(d, key=lambda x: float(d[x].get('time', 0)))]
p(kw)
print "-------------------"
else:
print("No sql queries found") | unknown | Python |
0bc4535aa5b17cebfdf8243b165c678538a10fe7 | remove table truncating, when connecting to database | varnish/varnish-microservice-monitor,varnish/zipnish,varnish/zipnish,varnish/varnish-microservice-monitor,varnish/zipnish,varnish/zipnish,varnish/varnish-microservice-monitor,varnish/varnish-microservice-monitor,varnish/zipnish,varnish/varnish-microservice-monitor | log-reader/app.py | log-reader/app.py | # basic stuff required for logging / debugging
import os, sys, syslog, traceback
# varnishapi to interact with varnishlog
import varnishapi
# log module to manage data flow coming in from varnishlog into ZipKin database
from log import LogReader, LogDataManager
from log import LogDatabase, LogStorage
# called when the program starts up
def main(sharedMemoryLog):
try:
# connect to varnish log
vap = varnishapi.VarnishLog(['-g', 'request'])
# connect to varnishapi and begin logging
# logDataManager
sharedMemoryLog.execute(vap)
# keyboard exception
except KeyboardInterrupt:
vap.Fini()
# log exception at system level
except Exception as e:
syslog.openlog(sys.argv[0], syslog.LOG_PID | syslog.LOG_PERROR, syslog.LOG_LOCAL0)
syslog.syslog(syslog.LOG_ERR, traceback.format_exc())
if __name__ == '__main__':
# connection parameters to database
dbParams = {\
'host': 'localhost', \
'db': 'zipkin', \
'user': 'zipkin', \
'passwd': 'kinect', \
'keep_alive': True, \
'truncate_tables': False \
}
# connect to log database
logDatabase = LogDatabase(**dbParams)
# log data storage
logStorage = LogStorage(logDatabase)
# manages log data
logDataManager = LogDataManager(logStorage)
# shared memory log reader
shmLog = LogReader(logDataManager)
# initiate logging
main(shmLog)
| # basic stuff required for logging / debugging
import os, sys, syslog, traceback
# varnishapi to interact with varnishlog
import varnishapi
# log module to manage data flow coming in from varnishlog into ZipKin database
from log import LogReader, LogDataManager
from log import LogDatabase, LogStorage
# called when the program starts up
def main(sharedMemoryLog):
try:
# connect to varnish log
vap = varnishapi.VarnishLog(['-g', 'request'])
# connect to varnishapi and begin logging
# logDataManager
sharedMemoryLog.execute(vap)
# keyboard exception
except KeyboardInterrupt:
vap.Fini()
# log exception at system level
except Exception as e:
syslog.openlog(sys.argv[0], syslog.LOG_PID | syslog.LOG_PERROR, syslog.LOG_LOCAL0)
syslog.syslog(syslog.LOG_ERR, traceback.format_exc())
if __name__ == '__main__':
# connection parameters to database
dbParams = {\
'host': 'localhost', \
'db': 'zipkin', \
'user': 'zipkin', \
'passwd': 'kinect', \
'keep_alive': True, \
'truncate_tables': True \
}
# connect to log database
logDatabase = LogDatabase(**dbParams)
# log data storage
logStorage = LogStorage(logDatabase)
# manages log data
logDataManager = LogDataManager(logStorage)
# shared memory log reader
shmLog = LogReader(logDataManager)
# initiate logging
main(shmLog)
| bsd-2-clause | Python |
4223c8e235337fbb2935eb0e6c78eab50b158609 | Update version string. | pquentin/libcloud,erjohnso/libcloud,carletes/libcloud,mbrukman/libcloud,mistio/libcloud,sfriesel/libcloud,mtekel/libcloud,sahildua2305/libcloud,aleGpereira/libcloud,jerryblakley/libcloud,Verizon/libcloud,sfriesel/libcloud,NexusIS/libcloud,Keisuke69/libcloud,aviweit/libcloud,thesquelched/libcloud,atsaki/libcloud,smaffulli/libcloud,mathspace/libcloud,t-tran/libcloud,mbrukman/libcloud,wido/libcloud,illfelder/libcloud,Jc2k/libcloud,thesquelched/libcloud,mistio/libcloud,samuelchong/libcloud,Keisuke69/libcloud,ByteInternet/libcloud,techhat/libcloud,cloudControl/libcloud,sergiorua/libcloud,mtekel/libcloud,apache/libcloud,Cloud-Elasticity-Services/as-libcloud,aviweit/libcloud,NexusIS/libcloud,iPlantCollaborativeOpenSource/libcloud,apache/libcloud,marcinzaremba/libcloud,aleGpereira/libcloud,StackPointCloud/libcloud,wrigri/libcloud,schaubl/libcloud,supertom/libcloud,aviweit/libcloud,dcorbacho/libcloud,ByteInternet/libcloud,briancurtin/libcloud,sahildua2305/libcloud,mathspace/libcloud,pquentin/libcloud,munkiat/libcloud,watermelo/libcloud,cloudControl/libcloud,vongazman/libcloud,niteoweb/libcloud,pantheon-systems/libcloud,SecurityCompass/libcloud,JamesGuthrie/libcloud,sergiorua/libcloud,JamesGuthrie/libcloud,supertom/libcloud,ZuluPro/libcloud,cryptickp/libcloud,wido/libcloud,briancurtin/libcloud,Verizon/libcloud,kater169/libcloud,sgammon/libcloud,niteoweb/libcloud,ZuluPro/libcloud,cloudControl/libcloud,ninefold/libcloud,pantheon-systems/libcloud,mgogoulos/libcloud,MrBasset/libcloud,wrigri/libcloud,Itxaka/libcloud,ByteInternet/libcloud,mgogoulos/libcloud,wrigri/libcloud,kater169/libcloud,mbrukman/libcloud,iPlantCollaborativeOpenSource/libcloud,andrewsomething/libcloud,dcorbacho/libcloud,Kami/libcloud,schaubl/libcloud,illfelder/libcloud,DimensionDataCBUSydney/libcloud,smaffulli/libcloud,DimensionDataCBUSydney/libcloud,JamesGuthrie/libcloud,carletes/libcloud,pquentin/libcloud,sfriesel/libcloud,MrBasset/libcloud,samuelchong/libcloud,munkiat/libcloud,Kami/libcloud,watermelo/libcloud,jimbobhickville/libcloud,apache/libcloud,marcinzaremba/libcloud,iPlantCollaborativeOpenSource/libcloud,sahildua2305/libcloud,t-tran/libcloud,curoverse/libcloud,jerryblakley/libcloud,SecurityCompass/libcloud,curoverse/libcloud,schaubl/libcloud,Scalr/libcloud,marcinzaremba/libcloud,watermelo/libcloud,cryptickp/libcloud,Cloud-Elasticity-Services/as-libcloud,andrewsomething/libcloud,lochiiconnectivity/libcloud,StackPointCloud/libcloud,supertom/libcloud,wuyuewen/libcloud,carletes/libcloud,mgogoulos/libcloud,MrBasset/libcloud,andrewsomething/libcloud,sergiorua/libcloud,vongazman/libcloud,erjohnso/libcloud,ninefold/libcloud,niteoweb/libcloud,erjohnso/libcloud,techhat/libcloud,mistio/libcloud,Jc2k/libcloud,munkiat/libcloud,cryptickp/libcloud,Verizon/libcloud,atsaki/libcloud,lochiiconnectivity/libcloud,Scalr/libcloud,Itxaka/libcloud,kater169/libcloud,Itxaka/libcloud,SecurityCompass/libcloud,ZuluPro/libcloud,ClusterHQ/libcloud,mathspace/libcloud,wuyuewen/libcloud,jerryblakley/libcloud,NexusIS/libcloud,curoverse/libcloud,DimensionDataCBUSydney/libcloud,thesquelched/libcloud,aleGpereira/libcloud,t-tran/libcloud,smaffulli/libcloud,jimbobhickville/libcloud,lochiiconnectivity/libcloud,ClusterHQ/libcloud,illfelder/libcloud,jimbobhickville/libcloud,dcorbacho/libcloud,Kami/libcloud,wido/libcloud,mtekel/libcloud,wuyuewen/libcloud,Scalr/libcloud,Cloud-Elasticity-Services/as-libcloud,techhat/libcloud,vongazman/libcloud,atsaki/libcloud,StackPointCloud/libcloud,samuelchong/libcloud,pantheon-systems/libcloud,sgammon/libcloud,briancurtin/libcloud | libcloud/__init__.py | libcloud/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
libcloud provides a unified interface to the cloud computing resources.
@var __version__: Current version of libcloud
"""
__all__ = ["__version__", "enable_debug"]
__version__ = "0.5.1"
def enable_debug(fo):
"""
Enable library wide debugging to a file-like object.
@param fo: Where to append debugging information
@type fo: File like object, only write operations are used.
"""
from libcloud.base import (ConnectionKey,
LoggingHTTPConnection,
LoggingHTTPSConnection)
LoggingHTTPSConnection.log = fo
LoggingHTTPConnection.log = fo
ConnectionKey.conn_classes = (LoggingHTTPConnection, LoggingHTTPSConnection)
def _init_once():
"""
Utility function that is ran once on Library import.
This checks for the LIBCLOUD_DEBUG enviroment variable, which if it exists
is where we will log debug information about the provider transports.
If LIBCLOUD_DEBUG is not a path, C{/tmp/libcloud_debug.log} is used by
default.
"""
import os
d = os.getenv("LIBCLOUD_DEBUG")
if d:
if d.isdigit():
d = "/tmp/libcloud_debug.log"
fo = open(d, "a")
enable_debug(fo)
_init_once()
| # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
libcloud provides a unified interface to the cloud computing resources.
@var __version__: Current version of libcloud
"""
__all__ = ["__version__", "enable_debug"]
__version__ = "0.5.0"
def enable_debug(fo):
"""
Enable library wide debugging to a file-like object.
@param fo: Where to append debugging information
@type fo: File like object, only write operations are used.
"""
from libcloud.base import (ConnectionKey,
LoggingHTTPConnection,
LoggingHTTPSConnection)
LoggingHTTPSConnection.log = fo
LoggingHTTPConnection.log = fo
ConnectionKey.conn_classes = (LoggingHTTPConnection, LoggingHTTPSConnection)
def _init_once():
"""
Utility function that is ran once on Library import.
This checks for the LIBCLOUD_DEBUG enviroment variable, which if it exists
is where we will log debug information about the provider transports.
If LIBCLOUD_DEBUG is not a path, C{/tmp/libcloud_debug.log} is used by
default.
"""
import os
d = os.getenv("LIBCLOUD_DEBUG")
if d:
if d.isdigit():
d = "/tmp/libcloud_debug.log"
fo = open(d, "a")
enable_debug(fo)
_init_once()
| apache-2.0 | Python |
2f784a5eb8002b2fbb4e960685a1f43474309bf4 | bump version | Calysto/octave_kernel,Calysto/octave_kernel | octave_kernel/__init__.py | octave_kernel/__init__.py | """An Octave kernel for Jupyter"""
__version__ = '0.25.0'
| """An Octave kernel for Jupyter"""
__version__ = '0.24.8'
| bsd-3-clause | Python |
aaa30aab6c2b46a1ae247d9cdee8033641106172 | Bump version | Frojd/django-react-templatetags,Frojd/django-react-templatetags,Frojd/django-react-templatetags | django_react_templatetags/__init__.py | django_react_templatetags/__init__.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
django_react_templatetags
----------
This extension allows you to add React components into your django templates.
"""
__title__ = "django_react_templatetags"
__version__ = "5.0.1"
__build__ = 501
__author__ = "Martin Sandström"
__license__ = "MIT"
__copyright__ = "Copyright 2015-2018 Fröjd Interactive"
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
django_react_templatetags
----------
This extension allows you to add React components into your django templates.
"""
__title__ = "django_react_templatetags"
__version__ = "5.0.0"
__build__ = 500
__author__ = "Martin Sandström"
__license__ = "MIT"
__copyright__ = "Copyright 2015-2018 Fröjd Interactive"
| mit | Python |
510c111a9ec2672857455a66c3718145185fffdc | Make default type of is_dict_union 'type' and automatically add key to nested is_dict_unions | Daanvdk/is_valid | is_valid/is_dict_union.py | is_valid/is_dict_union.py | from .base import Predicate
from .is_dict_where import is_dict_where
from .is_in import is_in
from .is_superdict_where import is_superdict_where
from .is_subdict_where import is_subdict_where
from .to_pred import to_pred
class is_dict_union(Predicate):
def __init__(self, *args, **kwargs):
if args and isinstance(args[0], str):
key, *args = args
else:
key = 'type'
self._key = key
self._preds = {
value: self._to_pred(pred, value)
for value, pred in dict(*args, **kwargs).items()
}
self.prerequisites = [is_superdict_where({
key: is_in(set(self._preds)),
})]
def _to_pred(self, pred, value):
pred = to_pred(pred)
if type(pred) in [is_dict_where, is_superdict_where, is_subdict_where]:
if type(pred) is is_superdict_where:
cls = is_superdict_where
else:
cls = is_dict_where
required = {self._key: value}
optional = {}
for key, subpred in pred._predicates.items():
if key in pred._required:
required[key] = subpred
else:
optional[key] = subpred
pred = cls(required, optional)
elif type(pred) is is_dict_union:
return is_dict_union(pred._key, {
subvalue: self._to_pred(subpred, value)
for subvalue, subpred in pred._preds.items()
})
return pred
def _evaluate(self, data, explain, context):
return self._preds[data[self._key]](data, explain, context)
| from .base import Predicate
from .is_dict_where import is_dict_where
from .is_in import is_in
from .is_superdict_where import is_superdict_where
from .is_subdict_where import is_subdict_where
from .to_pred import to_pred
class is_dict_union(Predicate):
def __init__(self, key, *args, **kwargs):
self._key = key
self._preds = {
value: self._to_pred(pred, value)
for value, pred in dict(*args, **kwargs).items()
}
self.prerequisites = [is_superdict_where({
key: is_in(set(self._preds)),
})]
def _to_pred(self, pred, value):
pred = to_pred(pred)
if type(pred) in [is_dict_where, is_superdict_where, is_subdict_where]:
if type(pred) is is_superdict_where:
cls = is_superdict_where
else:
cls = is_dict_where
required = {self._key: value}
optional = {}
for key, subpred in pred._predicates.items():
if key in pred._required:
required[key] = subpred
else:
optional[key] = subpred
pred = cls(required, optional)
return pred
def _evaluate(self, data, explain, context):
return self._preds[data[self._key]](data, explain, context)
| mit | Python |
b8cf5becf6b68d8044ad3a89a9abcea107909597 | Add adapters to main pika import | fkarb/pika-python3,reddec/pika,hugoxia/pika,knowsis/pika,vrtsystems/pika,vitaly-krugl/pika,Zephor5/pika,zixiliuyue/pika,jstnlef/pika,benjamin9999/pika,renshawbay/pika-python3,skftn/pika,Tarsbot/pika,pika/pika,shinji-s/pika | pika/__init__.py | pika/__init__.py | # ***** BEGIN LICENSE BLOCK *****
# Version: MPL 1.1/GPL 2.0
#
# The contents of this file are subject to the Mozilla Public License
# Version 1.1 (the "License"); you may not use this file except in
# compliance with the License. You may obtain a copy of the License at
# http://www.mozilla.org/MPL/
#
# Software distributed under the License is distributed on an "AS IS"
# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
# the License for the specific language governing rights and
# limitations under the License.
#
# The Original Code is Pika.
#
# The Initial Developers of the Original Code are LShift Ltd, Cohesive
# Financial Technologies LLC, and Rabbit Technologies Ltd. Portions
# created before 22-Nov-2008 00:00:00 GMT by LShift Ltd, Cohesive
# Financial Technologies LLC, or Rabbit Technologies Ltd are Copyright
# (C) 2007-2008 LShift Ltd, Cohesive Financial Technologies LLC, and
# Rabbit Technologies Ltd.
#
# Portions created by LShift Ltd are Copyright (C) 2007-2009 LShift
# Ltd. Portions created by Cohesive Financial Technologies LLC are
# Copyright (C) 2007-2009 Cohesive Financial Technologies
# LLC. Portions created by Rabbit Technologies Ltd are Copyright (C)
# 2007-2009 Rabbit Technologies Ltd.
#
# Portions created by Tony Garnock-Jones are Copyright (C) 2009-2010
# LShift Ltd and Tony Garnock-Jones.
#
# All Rights Reserved.
#
# Contributor(s): ______________________________________.
#
# Alternatively, the contents of this file may be used under the terms
# of the GNU General Public License Version 2 or later (the "GPL"), in
# which case the provisions of the GPL are applicable instead of those
# above. If you wish to allow use of your version of this file only
# under the terms of the GPL, and not to allow others to use your
# version of this file under the terms of the MPL, indicate your
# decision by deleting the provisions above and replace them with the
# notice and other provisions required by the GPL. If you do not
# delete the provisions above, a recipient may use your version of
# this file under the terms of any one of the MPL or the GPL.
#
# ***** END LICENSE BLOCK *****
from pika.connection import ConnectionParameters
from pika.connection import PlainCredentials
from pika.reconnection_strategies import ReconnectionStrategy
from pika.spec import BasicProperties
from pika.adapters.base_connection import BaseConnection
from pika.adapters.asyncore_connection import AsyncoreConnection
from pika.adapters.blocking_connection import BlockingConnection
from pika.adapters.select_connection import SelectConnection
from pika.adapters.select_connection import IOLoop
# Python 2.4 support: add struct.unpack_from if it's missing.
try:
import struct
getattr(struct, "unpack_from")
except AttributeError:
def _unpack_from(fmt, buf, offset=0):
slice = buffer(buf, offset, struct.calcsize(fmt))
return struct.unpack(fmt, slice)
struct.unpack_from = _unpack_from
| # ***** BEGIN LICENSE BLOCK *****
# Version: MPL 1.1/GPL 2.0
#
# The contents of this file are subject to the Mozilla Public License
# Version 1.1 (the "License"); you may not use this file except in
# compliance with the License. You may obtain a copy of the License at
# http://www.mozilla.org/MPL/
#
# Software distributed under the License is distributed on an "AS IS"
# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
# the License for the specific language governing rights and
# limitations under the License.
#
# The Original Code is Pika.
#
# The Initial Developers of the Original Code are LShift Ltd, Cohesive
# Financial Technologies LLC, and Rabbit Technologies Ltd. Portions
# created before 22-Nov-2008 00:00:00 GMT by LShift Ltd, Cohesive
# Financial Technologies LLC, or Rabbit Technologies Ltd are Copyright
# (C) 2007-2008 LShift Ltd, Cohesive Financial Technologies LLC, and
# Rabbit Technologies Ltd.
#
# Portions created by LShift Ltd are Copyright (C) 2007-2009 LShift
# Ltd. Portions created by Cohesive Financial Technologies LLC are
# Copyright (C) 2007-2009 Cohesive Financial Technologies
# LLC. Portions created by Rabbit Technologies Ltd are Copyright (C)
# 2007-2009 Rabbit Technologies Ltd.
#
# Portions created by Tony Garnock-Jones are Copyright (C) 2009-2010
# LShift Ltd and Tony Garnock-Jones.
#
# All Rights Reserved.
#
# Contributor(s): ______________________________________.
#
# Alternatively, the contents of this file may be used under the terms
# of the GNU General Public License Version 2 or later (the "GPL"), in
# which case the provisions of the GPL are applicable instead of those
# above. If you wish to allow use of your version of this file only
# under the terms of the GPL, and not to allow others to use your
# version of this file under the terms of the MPL, indicate your
# decision by deleting the provisions above and replace them with the
# notice and other provisions required by the GPL. If you do not
# delete the provisions above, a recipient may use your version of
# this file under the terms of any one of the MPL or the GPL.
#
# ***** END LICENSE BLOCK *****
from pika.connection import ConnectionParameters
from pika.connection import PlainCredentials
from pika.reconnection_strategies import ReconnectionStrategy
from pika.spec import BasicProperties
# Python 2.4 support: add struct.unpack_from if it's missing.
try:
import struct
getattr(struct, "unpack_from")
except AttributeError:
def _unpack_from(fmt, buf, offset=0):
slice = buffer(buf, offset, struct.calcsize(fmt))
return struct.unpack(fmt, slice)
struct.unpack_from = _unpack_from
| mpl-2.0 | Python |
b527f9a87c1b18412db9d98ac5272ddc4aad03f9 | Add extraInfo link in `/admin/auth/user` | caesar2164/edx-platform,caesar2164/edx-platform,caesar2164/edx-platform,Stanford-Online/edx-platform,Stanford-Online/edx-platform,Stanford-Online/edx-platform,caesar2164/edx-platform,Stanford-Online/edx-platform | openedx/stanford/djangoapps/register_cme/admin.py | openedx/stanford/djangoapps/register_cme/admin.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from django.utils.html import format_html
from .models import ExtraInfo
class ExtraInfoAdmin(admin.ModelAdmin):
"""
Admin interface for ExtraInfo model.
"""
list_display = (
'user',
'get_email',
'last_name',
'first_name',
)
readonly_fields = (
'user',
)
search_fields = (
'user__username',
'user__email',
'last_name',
'first_name',
)
def get_email(self, obj):
return obj.user.email
get_email.short_description = 'Email address'
class Meta(object):
model = ExtraInfo
class NewUserAdmin(UserAdmin):
"""
Modifies admin interface for User model to display additional ExtraInfo link.
"""
def __init__(self, *args, **kwargs):
super(NewUserAdmin, self).__init__(*args, **kwargs)
admin.views.main.EMPTY_CHANGELIST_VALUE = ''
list_display = (
'username',
'email',
'first_name',
'last_name',
'is_staff',
'extrainfo_link',
)
list_select_related = (
'extrainfo',
)
def extrainfo_link(self, obj):
return format_html(
'<a href="/admin/register_cme/extrainfo/{extrainfo_id}">ExtraInfo</a>',
extrainfo_id=obj.extrainfo.id,
)
extrainfo_link.short_description = 'ExtraInfo'
extrainfo_link.allow_tags = True
admin.site.register(ExtraInfo, ExtraInfoAdmin)
admin.site.unregister(User)
admin.site.register(User, NewUserAdmin)
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import admin
from .models import ExtraInfo
class ExtraInfoAdmin(admin.ModelAdmin):
"""
Admin interface for ExtraInfo model.
"""
list_display = (
'user',
'get_email',
'last_name',
'first_name',
)
readonly_fields = (
'user',
)
search_fields = (
'user__username',
'user__email',
'last_name',
'first_name',
)
def get_email(self, obj):
return obj.user.email
get_email.short_description = 'Email address'
class Meta(object):
model = ExtraInfo
admin.site.register(ExtraInfo, ExtraInfoAdmin)
| agpl-3.0 | Python |
7bbf00a1e68d2310346c10f416e9bee16e99bb44 | Add RegisterForm | siawyoung/bookstore,siawyoung/bookstore,siawyoung/bookstore | bookstore_app/forms.py | bookstore_app/forms.py | import pdb
from django import forms
from models import *
class LoginForm(forms.ModelForm):
password = forms.CharField(widget=forms.PasswordInput)
class Meta:
model = Customer
fields = ['login_id', 'password']
labels = {
'login_id': 'Username'
}
class RegisterForm(forms.ModelForm):
password = forms.CharField(widget=forms.PasswordInput)
class Meta:
model = Customer
fields = '__all__'
labels = {
'login_id': 'Username',
'cc_num': 'Credit Card',
'phone_num': 'Phone Number'
} | from django import forms
from models import *
class LoginForm(forms.ModelForm):
# username = forms.CharField(label='Username', max_length=20)
# password = forms.PasswordInput()
password = forms.CharField(widget=forms.PasswordInput)
class Meta:
model = Customer
fields = ['login_id', 'password']
labels = {
'login_id': 'Username'
} | mit | Python |
f1cefc3fa4500440af63765784175baf760ae6c7 | bump version 0.9.9 | littlezz/island-backup,littlezz/island-backup,littlezz/island-backup | island_backup/__init__.py | island_backup/__init__.py | version = '0.9.9'
| version = '0.9.1'
| mit | Python |
3a795c6a614d96012d60216818cd3b6a4f2e7ab0 | Remove hard coded input and output files from script | NLeSC/embodied-emotions-scripts,NLeSC/embodied-emotions-scripts | embem/bodyparts/make_body_part_mapping.py | embem/bodyparts/make_body_part_mapping.py | """Make a mapping from body part words to categories.
Make mapping <body part word> -> [historic words] based on Inger Leemans'
clustering.
Usage: python make_body_part_mapping.py
Requires files body_part_clusters_renaissance.csv,
body_part_clusters_classisism.csv, and body_part_clusters_enlightenment.csv to
be in the current directory.
Writes body_part_mapping.json to the current directory.
"""
import codecs
import json
import argparse
import os
def csv2mapping(file_name):
mapping = {}
with codecs.open(file_name, 'rb', 'utf-8') as f:
for line in f.readlines():
parts = line.split(';')
label = parts[0].lower()
if parts[2] != '':
if not mapping.get(label):
mapping[label] = []
for entry in parts[2:]:
if entry and entry != '\n':
words = entry.split('\t')
mapping[label].append(words[0])
return mapping
def merge_mappings(m1, m2):
for k, v in m2.iteritems():
if not m1.get(k):
m1[k] = v
else:
m1[k] = m1[k] + v
return m1
parser = argparse.ArgumentParser()
parser.add_argument('dir', help='directory containing the body part cluster '
'csv files (<embem_data_dir>/dict).')
parser.add_argument('json_out', help='name of file to write the mapping to '
'(json file).')
args = parser.parse_args()
dr = args.dir
mapping_r = csv2mapping(os.path.join(dr, 'body_part_clusters_renaissance.csv'))
mapping_c = csv2mapping(os.path.join(dr, 'body_part_clusters_classisism.csv'))
mapping_e = csv2mapping(os.path.join(dr,
'body_part_clusters_enlightenment.csv'))
mapping = merge_mappings(mapping_r, mapping_c)
mapping = merge_mappings(mapping, mapping_e)
for k, v in mapping.iteritems():
mapping[k] = list(set(mapping[k]))
with codecs.open(args.json_out, 'wb', 'utf-8') as f:
json.dump(mapping, f, indent=2)
| """Make a mapping from body part words to categories.
Make mapping <body part word> -> [historic words] based on Inger Leemans'
clustering.
Usage: python make_body_part_mapping.py
Requires files body_part_clusters_renaissance.csv,
body_part_clusters_classisism.csv, and body_part_clusters_enlightenment.csv to
be in the current directory.
Writes body_part_mapping.json to the current directory.
"""
import codecs
import json
def csv2mapping(file_name):
mapping = {}
with codecs.open(file_name, 'rb', 'utf-8') as f:
for line in f.readlines():
parts = line.split(';')
label = parts[0].lower()
if parts[2] != '':
if not mapping.get(label):
mapping[label] = []
for entry in parts[2:]:
if entry and entry != '\n':
words = entry.split('\t')
mapping[label].append(words[0])
return mapping
def merge_mappings(m1, m2):
for k, v in m2.iteritems():
if not m1.get(k):
m1[k] = v
else:
m1[k] = m1[k] + v
return m1
mapping_r = csv2mapping('body_part_clusters_renaissance.csv')
mapping_c = csv2mapping('body_part_clusters_classisism.csv')
mapping_e = csv2mapping('body_part_clusters_enlightenment.csv')
mapping = merge_mappings(mapping_r, mapping_c)
mapping = merge_mappings(mapping, mapping_e)
for k, v in mapping.iteritems():
mapping[k] = list(set(mapping[k]))
with codecs.open('body_part_mapping.json', 'wb', 'utf-8') as f:
json.dump(mapping, f, indent=2)
| apache-2.0 | Python |
110131487474abadd6255bd31153592f3fa516c0 | Add SDG and validation generator | johnmartinsson/bird-species-classification,johnmartinsson/bird-species-classification | trainResnet.py | trainResnet.py | from __future__ import print_function
import numpy as np
np.random.seed(1337) # for reproducibility
SEED = 1337
from keras.datasets import mnist
from keras.utils import np_utils
from keras import backend as K
from keras.optimizers import SGD
from bird.models.resnet import ResNetBuilder
from bird.generators.sound import SoundDataGenerator
batch_size = 8
nb_classes = 20
samples_per_epoch = 2008
nb_epoch = 20
# input image dimensions
img_rows, img_cols = 256, 512
# number of channels
nb_channels = 1
model = ResNetBuilder.build_resnet_18((img_rows, img_cols, nb_channels), nb_classes)
sgd = SGD(lr=0.001, decay=1e-6, momentum=0.9, nesterov=True)
model.compile(loss='categorical_crossentropy',
optimizer=sgd,
metrics=['accuracy'])
train_datagen = SoundDataGenerator(
rescale=1./255)
#augment_with_same_class=True,
#augment_with_noise=True)
valid_datagen = SoundDataGenerator(
rescale=1./255)
# Generator for training data
print("Loading training data...")
train_generator = train_datagen.flow_from_directory(
'./datasets/birdClef2016Subset/train',
target_size=(img_rows, img_cols),
batch_size=batch_size,
class_mode='categorical',
color_mode='grayscale',
seed=SEED)
# Generator for validation data
print("Loading validation data...")
valid_generator = valid_datagen.flow_from_directory(
'./datasets/birdClef2016Subset/valid',
target_size=(img_rows, img_cols),
batch_size=batch_size,
class_mode='categorical',
color_mode='grayscale',
seed=SEED)
# Fit model to generated training data
model.fit_generator(
train_generator,
samples_per_epoch=samples_per_epoch,
nb_epoch=nb_epoch,
validation_data=valid_generator,
nb_val_samples=613)
# Save weights
model.save_weights('resnet.h5')
| from __future__ import print_function
import numpy as np
np.random.seed(1337) # for reproducibility
SEED = 1337
from keras.datasets import mnist
from keras.utils import np_utils
from keras import backend as K
from bird.models.resnet import ResNetBuilder
from bird.generators.sound import SoundDataGenerator
batch_size = 8
nb_classes = 20
samples_per_epoch = 100
nb_epoch = 5
# input image dimensions
img_rows, img_cols = 256, 512
# number of channels
nb_channels = 1
model = ResNetBuilder.build_resnet_18((img_rows, img_cols, nb_channels), nb_classes)
model.compile(loss='categorical_crossentropy',
optimizer='adadelta',
metrics=['accuracy'])
train_datagen = SoundDataGenerator(
rescale=1./255)
#augment_with_same_class=True,
#augment_with_noise=True)
valid_datagen = SoundDataGenerator(
rescale=1./255)
# Generator for training data
print("Loading training data...")
train_generator = train_datagen.flow_from_directory(
'./datasets/birdClef2016Subset/train',
target_size=(img_rows, img_cols),
batch_size=batch_size,
class_mode='categorical',
save_to_dir='./visuals/resnet_training_samples',
color_mode='grayscale',
seed=SEED)
# Generator for validation data
print("Loading validation data...")
valid_generator = valid_datagen.flow_from_directory(
'./datasets/birdClef2016Subset/valid',
target_size=(img_rows, img_cols),
batch_size=batch_size,
class_mode='categorical',
color_mode='grayscale',
seed=SEED)
# Fit model to generated training data
model.fit_generator(
train_generator,
samples_per_epoch=samples_per_epoch,
nb_epoch=nb_epoch)
#validation_data=valid_generator,
#nb_val_samples=613)
# Save weights
model.save_weights('resnet.h5')
| mit | Python |
cf0d928cffc86e7ba2a68a7e95c54c7e530d2da8 | Disable accelerate_simd test for now. | apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb | packages/Python/lldbsuite/test/lang/swift/accelerate_simd/TestAccelerateSIMD.py | packages/Python/lldbsuite/test/lang/swift/accelerate_simd/TestAccelerateSIMD.py | # TestAccelerateSIMD.py
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2018 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ------------------------------------------------------------------------------
import lldbsuite.test.lldbinline as lldbinline
from lldbsuite.test.decorators import *
lldbinline.MakeInlineTest(__file__, globals(),
decorators=[swiftTest,skipUnlessDarwin,
skipIf(bugnumber=46330565)])
| # TestAccelerateSIMD.py
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2018 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ------------------------------------------------------------------------------
import lldbsuite.test.lldbinline as lldbinline
from lldbsuite.test.decorators import *
lldbinline.MakeInlineTest(__file__, globals(),
decorators=[swiftTest,skipUnlessDarwin])
| apache-2.0 | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.