commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
4c43119cd5d231e7cfe120d3b1e0881dc1048c42 | edit library(miller-rabin) | knuu/competitive-programming,knuu/competitive-programming,knuu/competitive-programming,knuu/competitive-programming | python-library/math/prime.py | python-library/math/prime.py | class Prime:
""" make prime numbers lists
complexity: O(n^(1/2))
used in AOJ0202
"""
def __init__(self, n):
self.is_prime = [True for _ in range(n+1)]
self.primeList = []
self.is_prime[0] = self.is_prime[1] = False
for i in range(2, int(pow(n, 0.5))+1):
if self.is_prime[i] == True:
self.primeList.append(i)
for j in range(2*i, n+1, i):
self.is_prime[j] = False
for i in range(int(pow(n, 0.5))+1, n+1):
if self.is_prime[i] == True:
self.primeList.append(i)
def isPrime(self, n):
return self.is_prime[n]
def nthPrime(self, n):
return self.primeList[n-1]
def miller_rabin(n):
""" primality Test
if n < 3,825,123,056,546,413,051, it is enough to test
a = 2, 3, 5, 7, 11, 13, 17, 19, and 23.
Complexity: O(log^3 n)
"""
if n == 2: return True
if n <= 1 or not n&1: return False
primes = [2, 3, 5, 7, 11, 13, 17, 19, 23]
d = n - 1
s = 0
while not d&1:
d >>= 1
s += 1
for prime in primes:
if prime >= n: continue
x = pow(prime, d, n)
if x == 1: break
for r in range(s):
if x == n - 1: break
if r + 1 == s: return False
x = x * x % n
return True
| class Prime:
""" make prime numbers lists
complexity: O(n^(1/2))
used in AOJ0202
"""
def __init__(self, n):
self.is_prime = [True for _ in range(n+1)]
self.primeList = []
self.is_prime[0] = self.is_prime[1] = False
for i in range(2, int(pow(n, 0.5))+1):
if self.is_prime[i] == True:
self.primeList.append(i)
for j in range(2*i, n+1, i):
self.is_prime[j] = False
for i in range(int(pow(n, 0.5))+1, n+1):
if self.is_prime[i] == True:
self.primeList.append(i)
def isPrime(self, n):
return self.is_prime[n]
def nthPrime(self, n):
return self.primeList[n-1]
def miller_rabin(n):
""" primality Test
if n < 3,825,123,056,546,413,051, it is enough to test
a = 2, 3, 5, 7, 11, 13, 17, 19, and 23.
Complexity: O(log^3 n)
"""
if n == 2: return True
if n <= 1 or not n&1: return False
primes = [2, 3, 5, 7, 11, 13, 17, 19, 23]
d = n - 1
s = 0
while not d&1:
d >>= 1
s += 1
for prime in primes:
if prime >= n: break
x = pow(prime, d, n)
if x == 1: break
for r in range(s):
if x == n - 1: break
if r + 1 == s: return False
x = x * x % n
return True
| mit | Python |
94f88b7b00ace47a19644fdc23108fdd950c1e97 | Add comment about source and license of _rewrite_shebangs.py | gem/oq-installers,gem/oq-nsis,gem/oq-installers | installers/windows/nsis/dist/_rewrite_shebangs.py | installers/windows/nsis/dist/_rewrite_shebangs.py | # Original source:
# https://github.com/takluyver/pynsist/blob/80392f24d664b08eb7f0b7e45a408575e55810fc/nsist/_rewrite_shebangs.py
# Copyright (c) 2014-2017 Thomas Kluyver under MIT license:
# https://github.com/takluyver/pynsist/blob/e01d6f08eb71bc5aa2d294f5369a736e59becd09/LICENSE
"""This is run during installation to rewrite the shebang (#! headers) of script
files.
"""
import glob
import os.path
import sys
if sys.version_info[0] >= 3:
# What do we do if the path contains characters outside the system code page?!
b_python_exe = sys.executable.encode(sys.getfilesystemencoding())
else:
b_python_exe = sys.executable
def rewrite(path):
with open(path, 'rb') as f:
contents = f.readlines()
if not contents:
return
if contents[0].strip() != b'#!python':
return
contents[0] = b'#!"' + b_python_exe + b'"\n'
with open(path, 'wb') as f:
f.writelines(contents)
def main(argv=None):
if argv is None:
argv = sys.argv
target_dir = argv[1]
for path in glob.glob(os.path.join(target_dir, '*-script.py')):
rewrite(path)
if __name__ == '__main__':
main()
| """This is run during installation to rewrite the shebang (#! headers) of script
files.
"""
import glob
import os.path
import sys
if sys.version_info[0] >= 3:
# What do we do if the path contains characters outside the system code page?!
b_python_exe = sys.executable.encode(sys.getfilesystemencoding())
else:
b_python_exe = sys.executable
def rewrite(path):
with open(path, 'rb') as f:
contents = f.readlines()
if not contents:
return
if contents[0].strip() != b'#!python':
return
contents[0] = b'#!"' + b_python_exe + b'"\n'
with open(path, 'wb') as f:
f.writelines(contents)
def main(argv=None):
if argv is None:
argv = sys.argv
target_dir = argv[1]
for path in glob.glob(os.path.join(target_dir, '*-script.py')):
rewrite(path)
if __name__ == '__main__':
main()
| agpl-3.0 | Python |
9dd019c12899045faebd49bc06026c8512609c9e | Remove assert line from import | yakky/django-statictemplate,bdon/django-statictemplate,ojii/django-statictemplate | statictemplate/management/commands/statictemplate.py | statictemplate/management/commands/statictemplate.py | # -*- coding: utf-8 -*-
from contextlib import contextmanager
from django.conf import settings
try:
from django.conf.urls.defaults import patterns, url, include
except ImportError:
from django.conf.urls import patterns, url, include # pragma: no cover
from django.core.management.base import BaseCommand
from django.shortcuts import render_to_response
from django.template.context import RequestContext
from django.test.client import Client
@contextmanager
def override_urlconf():
has_old = hasattr(settings, 'ROOT_URLCONF')
old = getattr(settings, 'ROOT_URLCONF', None)
settings.ROOT_URLCONF = 'statictemplate.management.commands.statictemplate'
yield
if has_old:
setattr(settings, 'ROOT_URLCONF', old)
else: # pragma: no cover
delattr(settings, 'ROOT_URLCONF')
def make_static(template):
with override_urlconf():
client = Client()
response = client.get('/', {'template': template})
return response.content
class Command(BaseCommand):
def handle(self, template, **options):
output = make_static(template)
self.stdout.write(output)
def render(request):
template_name = request.GET['template']
return render_to_response(template_name, RequestContext(request))
urlpatterns = patterns('',
url('^$', render),
url('^others', include(settings.ROOT_URLCONF))
)
| # -*- coding: utf-8 -*-
from contextlib import contextmanager
from django.conf import settings
try:
from django.conf.urls.defaults import patterns, url, include
assert all((patterns, url, include))
except ImportError:
from django.conf.urls import patterns, url, include # pragma: no cover
from django.core.management.base import BaseCommand
from django.shortcuts import render_to_response
from django.template.context import RequestContext
from django.test.client import Client
@contextmanager
def override_urlconf():
has_old = hasattr(settings, 'ROOT_URLCONF')
old = getattr(settings, 'ROOT_URLCONF', None)
settings.ROOT_URLCONF = 'statictemplate.management.commands.statictemplate'
yield
if has_old:
setattr(settings, 'ROOT_URLCONF', old)
else: # pragma: no cover
delattr(settings, 'ROOT_URLCONF')
def make_static(template):
with override_urlconf():
client = Client()
response = client.get('/', {'template': template})
return response.content
class Command(BaseCommand):
def handle(self, template, **options):
output = make_static(template)
self.stdout.write(output)
def render(request):
template_name = request.GET['template']
return render_to_response(template_name, RequestContext(request))
urlpatterns = patterns('',
url('^$', render),
url('^others', include(settings.ROOT_URLCONF))
)
| bsd-3-clause | Python |
a2f1bfc4a61b52042bf947ba75b444f6efa7a724 | Remove duplicate negative test of flavor_id | cisco-openstack/tempest,FujitsuEnablingSoftwareTechnologyGmbH/tempest,FujitsuEnablingSoftwareTechnologyGmbH/tempest,cisco-openstack/tempest,jaspreetw/tempest,flyingfish007/tempest,CiscoSystems/tempest,JioCloud/tempest,zsoltdudas/lis-tempest,yamt/tempest,cloudbase/lis-tempest,neerja28/Tempest,nunogt/tempest,bigswitch/tempest,rzarzynski/tempest,pandeyop/tempest,ntymtsiv/tempest,eggmaster/tempest,danielmellado/tempest,vedujoshi/os_tempest,openstack/tempest,vedujoshi/tempest,tudorvio/tempest,akash1808/tempest,danielmellado/tempest,BeenzSyed/tempest,pczerkas/tempest,masayukig/tempest,BeenzSyed/tempest,vedujoshi/os_tempest,openstack/tempest,xbezdick/tempest,rakeshmi/tempest,varunarya10/tempest,zsoltdudas/lis-tempest,Lilywei123/tempest,manasi24/jiocloud-tempest-qatempest,flyingfish007/tempest,jamielennox/tempest,manasi24/jiocloud-tempest-qatempest,dkalashnik/tempest,Juraci/tempest,NexusIS/tempest,Juniper/tempest,dkalashnik/tempest,Tesora/tesora-tempest,roopali8/tempest,manasi24/tempest,ebagdasa/tempest,vedujoshi/tempest,hayderimran7/tempest,jaspreetw/tempest,afaheem88/tempest_neutron,Vaidyanath/tempest,ebagdasa/tempest,Vaidyanath/tempest,rzarzynski/tempest,Lilywei123/tempest,nunogt/tempest,eggmaster/tempest,akash1808/tempest,bigswitch/tempest,afaheem88/tempest,JioCloud/tempest,rakeshmi/tempest,tonyli71/tempest,Mirantis/tempest,neerja28/Tempest,CiscoSystems/tempest,masayukig/tempest,alinbalutoiu/tempest,manasi24/tempest,ntymtsiv/tempest,hayderimran7/tempest,cloudbase/lis-tempest,hpcloud-mon/tempest,LIS/lis-tempest,vmahuli/tempest,afaheem88/tempest_neutron,NexusIS/tempest,queria/my-tempest,izadorozhna/tempest,vmahuli/tempest,Juniper/tempest,jamielennox/tempest,tonyli71/tempest,izadorozhna/tempest,Tesora/tesora-tempest,tudorvio/tempest,xbezdick/tempest,roopali8/tempest,pczerkas/tempest,varunarya10/tempest,sebrandon1/tempest,redhat-cip/tempest,Juraci/tempest,pandeyop/tempest,afaheem88/tempest,Mirantis/tempest,redhat-cip/tempest,hpcloud-mon/tempest,queria/my-tempest,sebrandon1/tempest,LIS/lis-tempest,yamt/tempest,alinbalutoiu/tempest | tempest/api/compute/flavors/test_flavors_negative.py | tempest/api/compute/flavors/test_flavors_negative.py | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from tempest.api.compute import base
from tempest import exceptions
from tempest.test import attr
class FlavorsNegativeTestJSON(base.BaseV2ComputeTest):
_interface = 'json'
@classmethod
def setUpClass(cls):
super(FlavorsNegativeTestJSON, cls).setUpClass()
cls.client = cls.flavors_client
@attr(type=['negative', 'gate'])
def test_invalid_minRam_filter(self):
self.assertRaises(exceptions.BadRequest,
self.client.list_flavors_with_detail,
{'minRam': 'invalid'})
@attr(type=['negative', 'gate'])
def test_invalid_minDisk_filter(self):
self.assertRaises(exceptions.BadRequest,
self.client.list_flavors_with_detail,
{'minDisk': 'invalid'})
@attr(type=['negative', 'gate'])
def test_non_existent_flavor_id(self):
# flavor details are not returned for non-existent flavors
nonexistent_flavor_id = str(uuid.uuid4())
self.assertRaises(exceptions.NotFound, self.client.get_flavor_details,
nonexistent_flavor_id)
class FlavorsNegativeTestXML(FlavorsNegativeTestJSON):
_interface = 'xml'
| # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from tempest.api.compute import base
from tempest.common.utils import data_utils
from tempest import exceptions
from tempest.test import attr
class FlavorsNegativeTestJSON(base.BaseV2ComputeTest):
_interface = 'json'
@classmethod
def setUpClass(cls):
super(FlavorsNegativeTestJSON, cls).setUpClass()
cls.client = cls.flavors_client
# Generating a nonexistent flavor id
resp, flavors = cls.client.list_flavors()
flavor_ids = [flavor['id'] for flavor in flavors]
while True:
cls.nonexistent_flavor_id = data_utils.rand_int_id(start=999)
if cls.nonexistent_flavor_id not in flavor_ids:
break
@attr(type=['negative', 'gate'])
def test_invalid_minRam_filter(self):
self.assertRaises(exceptions.BadRequest,
self.client.list_flavors_with_detail,
{'minRam': 'invalid'})
@attr(type=['negative', 'gate'])
def test_invalid_minDisk_filter(self):
self.assertRaises(exceptions.BadRequest,
self.client.list_flavors_with_detail,
{'minDisk': 'invalid'})
@attr(type=['negative', 'gate'])
def test_get_flavor_details_for_invalid_flavor_id(self):
# Ensure 404 returned for invalid flavor ID
invalid_flavor_id = str(uuid.uuid4())
self.assertRaises(exceptions.NotFound, self.client.get_flavor_details,
invalid_flavor_id)
@attr(type=['negative', 'gate'])
def test_non_existent_flavor_id(self):
# flavor details are not returned for non-existent flavors
self.assertRaises(exceptions.NotFound, self.client.get_flavor_details,
self.nonexistent_flavor_id)
class FlavorsNegativeTestXML(FlavorsNegativeTestJSON):
_interface = 'xml'
| apache-2.0 | Python |
d39776a8ace21cb1ab7985ccc7c571459b5e9af5 | use bulk create with transmissions | lsgunth/rapidsms,caktus/rapidsms,eHealthAfrica/rapidsms,peterayeni/rapidsms,caktus/rapidsms,peterayeni/rapidsms,catalpainternational/rapidsms,peterayeni/rapidsms,peterayeni/rapidsms,lsgunth/rapidsms,catalpainternational/rapidsms,caktus/rapidsms,catalpainternational/rapidsms,lsgunth/rapidsms,eHealthAfrica/rapidsms,lsgunth/rapidsms,ehealthafrica-ci/rapidsms,eHealthAfrica/rapidsms,catalpainternational/rapidsms,ehealthafrica-ci/rapidsms,ehealthafrica-ci/rapidsms | rapidsms/router/db/router.py | rapidsms/router/db/router.py | from django.db.models import Q
from rapidsms.router.blocking import BlockingRouter
from rapidsms.router.db.tasks import receive, send_transmissions
class DatabaseRouter(BlockingRouter):
def queue_message(self, direction, connections, text, fields=None):
"""Create Message and Transmission objects for messages."""
from rapidsms.router.db.models import Message, Transmission
dbm = Message.objects.create(text=text, direction=direction)
transmissions = []
for connection in connections:
transmissions.append(Transmission(message=dbm, status='Q',
connection=connection))
Transmission.objects.bulk_create(transmissions)
return dbm
def new_incoming_message(self, connections, text, fields=None):
"""Queue message in DB for async inbound processing."""
dbm = self.queue_message("I", connections, text, fields)
receive.delay(message_id=dbm.pk)
# don't return message to prevent futher processing
# inbound processing will be handled within an async task
return None
def backend_preparation(self, msg):
"""Queue message in DB rather than passing directly to backends."""
# create queued message and associated transmissions
dbm = self.queue_message("O", msg.connections, msg.text)
# mark message as processing
dbm.status = "P"
dbm.save()
transmissions = dbm.transmissions
# divide transmissions by backend
backends = transmissions.values_list('connection__backend_id',
flat=True)
for backend_id in backends.distinct():
q = Q(connection__backend_id=backend_id)
# TODO: chunk transmissions into more managable lenths
chunk = transmissions.filter(q).values_list('pk', flat=True)
send_transmissions.delay(backend_id=backend_id,
message_id=dbm.pk,
transmission_ids=chunk)
| from django.db.models import Q
from rapidsms.router.blocking import BlockingRouter
from rapidsms.router.db.tasks import receive, send_transmissions
class DatabaseRouter(BlockingRouter):
def queue_message(self, direction, connections, text, fields=None):
"""Create Message and Transmission objects for messages."""
from rapidsms.router.db.models import Message
msg = Message.objects.create(text=text, direction=direction)
# TODO: update to use bulk insert ORM api
for connection in connections:
msg.transmissions.create(connection=connection, status='Q')
return msg
def new_incoming_message(self, connections, text, fields=None):
"""Queue message in DB for async inbound processing."""
dbm = self.queue_message("I", connections, text, fields)
receive.delay(message_id=dbm.pk)
# don't return message to prevent futher processing
# inbound processing will be handled within an async task
return None
def backend_preparation(self, msg):
"""Queue message in DB rather than passing directly to backends."""
# create queued message and associated transmissions
dbm = self.queue_message("O", msg.connections, msg.text)
# mark message as processing
dbm.status = "P"
dbm.save()
transmissions = dbm.transmissions
# divide transmissions by backend
backends = transmissions.values_list('connection__backend_id',
flat=True)
for backend_id in backends.distinct():
q = Q(connection__backend_id=backend_id)
# TODO: chunk transmissions into more managable lenths
chunk = transmissions.filter(q).values_list('pk', flat=True)
send_transmissions.delay(backend_id=backend_id,
message_id=dbm.pk,
transmission_ids=chunk)
| bsd-3-clause | Python |
be02bae14a9fb217eb4ab61c4942d9bb5ccc5e01 | Clarify docstring | AustereCuriosity/astropy,mhvk/astropy,StuartLittlefair/astropy,lpsinger/astropy,AustereCuriosity/astropy,saimn/astropy,dhomeier/astropy,MSeifert04/astropy,DougBurke/astropy,funbaker/astropy,stargaser/astropy,lpsinger/astropy,funbaker/astropy,dhomeier/astropy,AustereCuriosity/astropy,MSeifert04/astropy,StuartLittlefair/astropy,stargaser/astropy,mhvk/astropy,joergdietrich/astropy,tbabej/astropy,larrybradley/astropy,pllim/astropy,DougBurke/astropy,joergdietrich/astropy,bsipocz/astropy,pllim/astropy,kelle/astropy,saimn/astropy,AustereCuriosity/astropy,DougBurke/astropy,MSeifert04/astropy,funbaker/astropy,lpsinger/astropy,astropy/astropy,aleksandr-bakanov/astropy,astropy/astropy,joergdietrich/astropy,aleksandr-bakanov/astropy,stargaser/astropy,mhvk/astropy,joergdietrich/astropy,joergdietrich/astropy,larrybradley/astropy,tbabej/astropy,larrybradley/astropy,saimn/astropy,bsipocz/astropy,saimn/astropy,MSeifert04/astropy,bsipocz/astropy,kelle/astropy,pllim/astropy,tbabej/astropy,saimn/astropy,dhomeier/astropy,pllim/astropy,astropy/astropy,pllim/astropy,kelle/astropy,aleksandr-bakanov/astropy,aleksandr-bakanov/astropy,StuartLittlefair/astropy,larrybradley/astropy,funbaker/astropy,stargaser/astropy,DougBurke/astropy,dhomeier/astropy,lpsinger/astropy,StuartLittlefair/astropy,mhvk/astropy,tbabej/astropy,mhvk/astropy,astropy/astropy,kelle/astropy,astropy/astropy,tbabej/astropy,kelle/astropy,dhomeier/astropy,lpsinger/astropy,StuartLittlefair/astropy,AustereCuriosity/astropy,bsipocz/astropy,larrybradley/astropy | astropy/units/format/__init__.py | astropy/units/format/__init__.py | # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
A collection of different unit formats.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
from .base import Base
from .generic import Generic
from .console import Console
from .fits import Fits
from .latex import Latex
from .unicode import Unicode
from .vounit import VOUnit
__all__ = [
'Generic', 'Console', 'Fits', 'Latex', 'Unicode', 'VOUnit',
'get_format']
def get_format(format=None):
"""
Get a formatter by name.
Parameters
----------
format : str or `astropy.units.format.Base` instance or subclass
The name of the format, or the format instance or subclass
itself.
Returns
-------
format : `astropy.units.format.Base` instance
"""
if isinstance(format, type) and issubclass(format, Base):
return format()
elif isinstance(format, Base):
return format
if format is None:
format = 'generic'
format = format.lower()
for key in __all__:
val = globals()[key]
if (issubclass(val, Base) and
key.lower() == format.lower()):
return val()
raise ValueError("Unknown format {0!r}".format(format))
| # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
A collection of different unit formats.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
from .base import Base
from .generic import Generic
from .console import Console
from .fits import Fits
from .latex import Latex
from .unicode import Unicode
from .vounit import VOUnit
__all__ = [
'Generic', 'Console', 'Fits', 'Latex', 'Unicode', 'VOUnit',
'get_format']
def get_format(format=None):
"""
Get a formatter by name.
Parameters
----------
format : str or `astropy.units.format.Base` instance
The name of the format, or the format instance itself.
Returns
-------
format : `astropy.units.format.Base` instance
"""
if isinstance(format, type) and issubclass(format, Base):
return format()
elif isinstance(format, Base):
return format
if format is None:
format = 'generic'
format = format.lower()
for key in __all__:
val = globals()[key]
if (issubclass(val, Base) and
key.lower() == format.lower()):
return val()
raise ValueError("Unknown format {0!r}".format(format))
| bsd-3-clause | Python |
cbf0307dce466e719ab388dae50c29d5e72f60e2 | throw exception if docker_project_name not set | cboling/xos,cboling/xos,cboling/xos,cboling/xos,cboling/xos | xos/synchronizers/onboarding/steps/sync_xos.py | xos/synchronizers/onboarding/steps/sync_xos.py | import os
import sys
import base64
from django.db.models import F, Q
from xos.config import Config
from synchronizers.base.syncstep import SyncStep, DeferredException
from core.models import XOS
from xos.logger import Logger, logging
from synchronizers.base.ansible import run_template
# xosbuilder will be in steps/..
parentdir = os.path.join(os.path.dirname(__file__),"..")
sys.path.insert(0,parentdir)
from xosbuilder import XOSBuilder
logger = Logger(level=logging.INFO)
class SyncXOS(SyncStep, XOSBuilder):
provides=[XOS]
observes=XOS
requested_interval=0
playbook = "sync_xos.yaml"
def __init__(self, **args):
SyncStep.__init__(self, **args)
XOSBuilder.__init__(self)
def sync_record(self, xos):
logger.info("Sync'ing XOS %s" % xos)
if not xos.docker_project_name:
raise Exception("xos.docker_project_name is not set")
if (not xos.enable_build):
raise DeferredException("XOS build is currently disabled")
self.create_docker_compose()
dockerfiles = [self.create_ui_dockerfile()]
tenant_fields = {"dockerfiles": dockerfiles,
"build_dir": self.build_dir,
"docker_project_name": xos.docker_project_name,
"ansible_tag": xos.__class__.__name__ + "_" + str(xos.id)}
path="XOS"
res = run_template(self.playbook, tenant_fields, path=path)
def delete_record(self, m):
pass
def fetch_pending(self, deleted=False):
pend = super(SyncXOS, self).fetch_pending(deleted)
return pend
| import os
import sys
import base64
from django.db.models import F, Q
from xos.config import Config
from synchronizers.base.syncstep import SyncStep, DeferredException
from core.models import XOS
from xos.logger import Logger, logging
from synchronizers.base.ansible import run_template
# xosbuilder will be in steps/..
parentdir = os.path.join(os.path.dirname(__file__),"..")
sys.path.insert(0,parentdir)
from xosbuilder import XOSBuilder
logger = Logger(level=logging.INFO)
class SyncXOS(SyncStep, XOSBuilder):
provides=[XOS]
observes=XOS
requested_interval=0
playbook = "sync_xos.yaml"
def __init__(self, **args):
SyncStep.__init__(self, **args)
XOSBuilder.__init__(self)
def sync_record(self, xos):
logger.info("Sync'ing XOS %s" % xos)
if (not xos.enable_build):
raise DeferredException("XOS build is currently disabled")
self.create_docker_compose()
dockerfiles = [self.create_ui_dockerfile()]
tenant_fields = {"dockerfiles": dockerfiles,
"build_dir": self.build_dir,
"docker_project_name": xos.docker_project_name,
"ansible_tag": xos.__class__.__name__ + "_" + str(xos.id)}
path="XOS"
res = run_template(self.playbook, tenant_fields, path=path)
def delete_record(self, m):
pass
def fetch_pending(self, deleted=False):
pend = super(SyncXOS, self).fetch_pending(deleted)
return pend
| apache-2.0 | Python |
fa7d4fadeabe28b6468833f1c01c21fde1bc2747 | Revert "adding unit test for get_force()" | joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue | hoomd/md/test-py/test_force_base.py | hoomd/md/test-py/test_force_base.py | # -*- coding: iso-8859-1 -*-
# Maintainer: jglaser
from hoomd import *
from hoomd import md;
context.initialize()
import unittest
import os
# md.pair.lj
class force_base_tests (unittest.TestCase):
def setUp(self):
print
self.s = init.create_lattice(lattice.sc(a=2.1878096788957757),n=[10,10,10]); #target a packing fraction of 0.05
context.current.sorter.set_params(grid=8)
# basic test of creation
def test(self):
nl = md.nlist.cell()
lj = md.pair.lj(r_cut=3.0, nlist = nl);
lj.pair_coeff.set('A', 'A', epsilon=1.0, sigma=1.0)
lj.update_coeffs();
all = group.all();
md.integrate.mode_standard(dt=0.0)
md.integrate.nvt(group=all, kT=1.2, tau=0.5)
run(1, quiet=True);
g = group.tag_list(name='ptl0', tags=[0])
energy = lj.get_energy(g)
self.assertAlmostEqual(energy, self.s.particles.get(0).net_energy, places=5);
def tearDown(self):
self.s = None
context.initialize();
if __name__ == '__main__':
unittest.main(argv = ['test.py', '-v'])
| # -*- coding: iso-8859-1 -*-
# Maintainer: jglaser
from hoomd import *
from hoomd import md;
context.initialize()
import unittest
import os
# md.pair.lj
class force_base_tests (unittest.TestCase):
def setUp(self):
print
self.s = init.create_lattice(lattice.sc(a=2.1878096788957757),n=[10,10,10]); #target a packing fraction of 0.05
context.current.sorter.set_params(grid=8)
# basic test of creation
def test(self):
nl = md.nlist.cell()
lj = md.pair.lj(r_cut=3.0, nlist = nl);
lj.pair_coeff.set('A', 'A', epsilon=1.0, sigma=1.0)
lj.update_coeffs();
all = group.all();
md.integrate.mode_standard(dt=0.0)
md.integrate.nvt(group=all, kT=1.2, tau=0.5)
run(1, quiet=True);
g = group.tag_list(name='ptl0', tags=[0])
energy = lj.get_energy(g)
self.assertAlmostEqual(energy, self.s.particles.get(0).net_energy, places=5);
force_x = lj.get_force(g, index=0)
force_y = lj.get_force(g, index=1)
force_z = lj.get_force(g, index=2)
self.assertAlmostEqual(force_x, self.s.particles.get(0).net_force[0], places=5);
self.assertAlmostEqual(force_y, self.s.particles.get(0).net_force[1], places=5);
self.assertAlmostEqual(force_z, self.s.particles.get(0).net_force[2], places=5);
def tearDown(self):
self.s = None
context.initialize();
if __name__ == '__main__':
unittest.main(argv = ['test.py', '-v'])
| bsd-3-clause | Python |
713b366b228983d9b6e33238da9d22c9aa51176a | Use the async dialogs.prompt_text api | SHA2017-badge/micropython-esp32,SHA2017-badge/micropython-esp32,SHA2017-badge/micropython-esp32,SHA2017-badge/micropython-esp32,SHA2017-badge/micropython-esp32 | esp32/modules/setup.py | esp32/modules/setup.py | # SETUP APPLICATION
# SHOWN ON FIRST BOOT
import ugfx, badge, appglue, dialogs, utime
def load_settings():
return badge.nvs_get_str("owner", "name", "")
def store_settings(nickname):
badge.nvs_set_str("owner", "name", nickname)
def is_developer(nickname):
if (nickname==""):
badge.nvs_set_str('badge', 'setup.state', '2') # Skip the sponsors
return True
return False
def action_home(pressed):
if (pressed):
appglue.start_app("")
def set_setup_state():
s_old = int(badge.nvs_get_str('badge', 'setup.state', '0'))
s_new = 2
if (s_old==0):
s_new = 1
badge.nvs_set_str('badge', 'setup.state', str(s_new))
def draw_setup_completed():
ugfx.clear(ugfx.WHITE)
ugfx.string(0, 0, "Setup", "PermanentMarker22", ugfx.BLACK)
ugfx.string(0, 25, "Settings stored to flash!", "Roboto_Regular12", ugfx.BLACK)
ugfx.set_lut(ugfx.LUT_FASTER)
ugfx.flush()
def return_to_home():
badge.eink_busy_wait()
appglue.start_app("")
def program_main():
ugfx.init()
nickname = load_settings()
def asked_nickname(value):
nickname = value if value else nickname
if not is_developer(nickname):
store_settings(nickname)
# Do the firstboot magic
set_setup_state()
# Show the user that we are done
draw_setup_completed()
utime.sleep(2)
return_to_home()
dialogs.prompt_text("Nickname", nickname, cb=asked_nickname)
# Start main application
program_main()
| # SETUP APPLICATION
# SHOWN ON FIRST BOOT
import ugfx, badge, appglue, dialogs, utime
# Globals
nickname = ""
def load_settings():
global nickname
nickname = badge.nvs_get_str("owner", "name", "")
def store_settings():
global nickname
nickname_new = badge.nvs_set_str("owner", "name", nickname)
if (nickname_new):
nickname = nickname_new
def check_developer():
global nickname
if (nickname==""):
badge.nvs_set_str('badge', 'setup.state', '2') # Skip the sponsors
return True
return False
def ask_nickname():
global nickname
nickname_new = dialogs.prompt_text("Nickname", nickname)
if (nickname_new):
nickname = nickname_new
def action_home(pressed):
if (pressed):
appglue.start_app("")
def set_setup_state():
s_old = int(badge.nvs_get_str('badge', 'setup.state', '0'))
s_new = 2
if (s_old==0):
s_new = 1
badge.nvs_set_str('badge', 'setup.state', str(s_new))
def draw_setup_completed():
ugfx.clear(ugfx.WHITE)
ugfx.string(0, 0, "Setup", "PermanentMarker22", ugfx.BLACK)
ugfx.string(0, 25, "Settings stored to flash!", "Roboto_Regular12", ugfx.BLACK)
ugfx.set_lut(ugfx.LUT_FASTER)
ugfx.flush()
def return_to_home():
badge.eink_busy_wait()
appglue.start_app("")
def program_main():
ugfx.init() # We need graphics
load_settings() # Load current settings
ask_nickname() # Ask the nickname
if not check_developer():
store_settings() # Store the settings
set_setup_state() # Do the firstboot magic
draw_setup_completed() # Show the user that we are done
utime.sleep(2) # Sleep 2 seconds
return_to_home() # Return to the splash app
# Start main application
program_main()
| mit | Python |
542265f78186ea2b7594afe6ca692e5fb826c367 | Bump version to 0.5.3 | Commonists/CommonsDownloader | commonsdownloader/__init__.py | commonsdownloader/__init__.py | """commonsdownloader package."""
__version__ = '0.5.3'
| """commonsdownloader package."""
__version__ = '0.5.2'
| mit | Python |
8cb34f4d88184d0c42e8c1fc41f451fa3cd5a6be | Fix undefined reference error in command line KeepKey plugin. | romanz/electrum,wakiyamap/electrum-mona,vialectrum/vialectrum,romanz/electrum,digitalbitbox/electrum,kyuupichan/electrum,asfin/electrum,pooler/electrum-ltc,vialectrum/vialectrum,kyuupichan/electrum,spesmilo/electrum,digitalbitbox/electrum,cryptapus/electrum,kyuupichan/electrum,digitalbitbox/electrum,wakiyamap/electrum-mona,pooler/electrum-ltc,spesmilo/electrum,cryptapus/electrum,fyookball/electrum,fyookball/electrum,spesmilo/electrum,fujicoin/electrum-fjc,fyookball/electrum,pooler/electrum-ltc,pooler/electrum-ltc,digitalbitbox/electrum,neocogent/electrum,vialectrum/vialectrum,asfin/electrum,asfin/electrum,fujicoin/electrum-fjc,wakiyamap/electrum-mona,romanz/electrum,neocogent/electrum,neocogent/electrum,spesmilo/electrum,wakiyamap/electrum-mona,cryptapus/electrum,fujicoin/electrum-fjc | plugins/keepkey/cmdline.py | plugins/keepkey/cmdline.py | from electrum.plugins import hook
from electrum.util import print_msg, raw_input
from .keepkey import KeepKeyPlugin
from ..hw_wallet import CmdLineHandler
class Plugin(KeepKeyPlugin):
handler = CmdLineHandler()
@hook
def init_keystore(self, keystore):
if not isinstance(keystore, self.keystore_class):
return
keystore.handler = self.handler
| from electrum.util import print_msg, raw_input
from .keepkey import KeepKeyPlugin
from ..hw_wallet import CmdLineHandler
class Plugin(KeepKeyPlugin):
handler = CmdLineHandler()
@hook
def init_keystore(self, keystore):
if not isinstance(keystore, self.keystore_class):
return
keystore.handler = self.handler
| mit | Python |
e4b23cf4e33b1125c7d50e8675c2d7460f0ef468 | remove loopback address from allowed hosts | treehouse/livestream-django-feelings,treehouse/livestream-django-feelings,treehouse/livestream-django-feelings | feelings/feelings/deploy_settings/__init__.py | feelings/feelings/deploy_settings/__init__.py | import dj_database_url
from feelings.settings import *
def get_env_variable(var_name):
try:
return os.environ[var_name]
except KeyError:
error_msg = "Set the {} env variable".format(var_name)
if DEBUG:
warnings.war(error_msg)
else:
raise ImproperlyConfigured(error_msg)
DEBUG = False
TEMPLATE_DEBUG = DEBUG
ALLOWED_HOSTS = [
'localhost',
'.herokuapp.com',
]
INSTALLED_APPS += (
'gunicorn',
)
db_from_env = dj_database_url.config()
DATABASES['default'].update(db_from_env)
SECRET_KEY = get_env_variable("SECRET_KEY")
STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage' | import dj_database_url
from feelings.settings import *
def get_env_variable(var_name):
try:
return os.environ[var_name]
except KeyError:
error_msg = "Set the {} env variable".format(var_name)
if DEBUG:
warnings.war(error_msg)
else:
raise ImproperlyConfigured(error_msg)
DEBUG = False
TEMPLATE_DEBUG = DEBUG
ALLOWED_HOSTS = [
'localhost',
'127.0.0.1',
'.herokuapp.com',
]
INSTALLED_APPS += (
'gunicorn',
)
db_from_env = dj_database_url.config()
DATABASES['default'].update(db_from_env)
SECRET_KEY = get_env_variable("SECRET_KEY")
STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage' | mit | Python |
814c6f6a1e6365bb5e0d83b0d8147fc6ec7ed15e | Update resampling.py | cleverhans-lab/cleverhans,cleverhans-lab/cleverhans,cleverhans-lab/cleverhans | defenses/torch/audio/input_tranformation/resampling.py | defenses/torch/audio/input_tranformation/resampling.py | import torchaudio
import librosa
# resampling reference https://core.ac.uk/download/pdf/228298313.pdf
# resampling input transformation defense for audio
T = torchaudio.transforms
# Read audio file
audio_data = librosa.load(files, sr=16000)[0][-19456:]
audio_data = torch.tensor(audio_data).float().to(device)
# Discarding samples from a waveform during downsampling could remove a significant portion of the adversarial perturbation,
# thereby prevents an adversarial attack.
# resample the audio files to 8kHz from 16kHz
sample = T.Resample(16000, 8000, resampling_method="sinc_interpolation")
audio_resample_1 = sample(audio_data)
# resample the audio back to 16kHz
sample = T.Resample(8000, 16000, resampling_method="sinc_interpolation")
# Give audio_resample_2 as input to the asr model
audio_resample_2 = sample(audio_resample_1)
| import torchaudio
import librosa
# resampling reference https://core.ac.uk/download/pdf/228298313.pdf
# resampling input transformation defense for audio
T = torchaudio.transforms
audio_data = librosa.load(files, sr=16000)[0][-19456:] # Read audio file
audio_data = torch.tensor(audio_data).float().to(device)
sample = T.Resample(
16000, 8000, resampling_method="sinc_interpolation"
) # resample the audio files to 8kHz from 16kHz
audio_resample_1 = sample(audio_data)
sample = T.Resample(
8000, 16000, resampling_method="sinc_interpolation"
) # resample the audio back to 16kHz
audio_resample_2 = sample(audio_resample_1)
# Give audio_resample_2 as input to the asr model
| mit | Python |
a1d71466d09e9e1ea2f75eae57e72e0000c65ffc | Add new-style TEMPLATES setting for tests | incuna/incuna-mail,incuna/incuna-mail | tests/run.py | tests/run.py | import sys
import django
from colour_runner.django_runner import ColourRunnerMixin
from django.conf import settings
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
},
MIDDLEWARE_CLASSES=(),
TEMPLATES=[
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': False,
'DIRS': ('tests/templates',),
},
]
)
if django.VERSION >= (1, 7):
django.setup()
try:
from django.test.runner import DiscoverRunner
except ImportError:
# Django < 1.6
from discover_runner import DiscoverRunner
class Runner(ColourRunnerMixin, DiscoverRunner):
pass
test_runner = Runner(verbosity=1)
failures = test_runner.run_tests(['tests'])
if failures:
sys.exit(1)
| import sys
import django
from colour_runner.django_runner import ColourRunnerMixin
from django.conf import settings
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
},
MIDDLEWARE_CLASSES=(),
TEMPLATE_DIRS=('tests/templates',),
)
if django.VERSION >= (1, 7):
django.setup()
try:
from django.test.runner import DiscoverRunner
except ImportError:
# Django < 1.6
from discover_runner import DiscoverRunner
class Runner(ColourRunnerMixin, DiscoverRunner):
pass
test_runner = Runner(verbosity=1)
failures = test_runner.run_tests(['tests'])
if failures:
sys.exit(1)
| bsd-2-clause | Python |
b8f4d448e126c08ae0f5d9ab178ff60a06fb02f3 | Improve readability of tex conversion | tanbur/diffalg,tanbur/desr | tex_tools.py | tex_tools.py | """
Created on Wed Aug 12 01:37:16 2015
@author: richard
"""
import re
VAR_RE = '[a-z][\d_]*'
def matrix_to_tex(matrix_):
lines = []
for line in matrix_:
lines.append(' & '.join(map(str, line)) + ' \\\\')
return '\n'.join(lines)
def _var_repler(var):
var = var.group()
if len(var) == 1:
return var[0]
var_letter, subscript = var[0], var[1:]
if subscript[0] == '_':
subscript = subscript[1:]
subscript = subscript.replace('_', '')
return '{}_{{{}}}'.format(var_letter, subscript)
def var_to_tex(var):
return re.sub(VAR_RE, _var_repler, str(var).replace('_', ''))
def expr_to_tex(expr):
expr = str(expr).replace(' ', '').replace('**1.0', '')
tex = re.sub(VAR_RE, _var_repler, expr).replace('*', '')
return tex
def eqn_to_tex(eqn):
eqn = str(eqn).replace(' ', '')
expr1, expr2 = eqn.split('==')
tex = '{} &= {}'.format(expr_to_tex(expr1), expr_to_tex(expr2))
return tex
def eqns_to_tex(eqns):
''' To convert to array environment, copy the output into a lyx LaTeX cell,
then copy this entire cell into an eqnarray of sufficient size
'''
return '\\\\'.join(map(eqn_to_tex, eqns)) | """
Created on Wed Aug 12 01:37:16 2015
@author: richard
"""
import re
VAR_RE = '[a-z][\d_]*'
def matrix_to_tex(matrix_):
lines = []
for line in matrix_:
lines.append(' & '.join(map(str, line)) + ' \\\\')
return '\n'.join(lines)
def _var_repler(var):
var = var.group()
if len(var) == 1:
return var[0]
var_letter, subscript = var[0], var[1:]
if subscript[0] == '_':
subscript = subscript[1:]
subscript = subscript.replace('_', '')
return '{}_{{{}}}'.format(var_letter, subscript)
def var_to_tex(var):
return re.sub(VAR_RE, _var_repler, str(var).replace('_', ''))
def expr_to_tex(expr):
expr = str(expr).replace(' ', '')
tex = re.sub(VAR_RE, _var_repler, expr).replace('*', '')
return tex
def eqn_to_tex(eqn):
eqn = str(eqn).replace(' ', '')
expr1, expr2 = eqn.split('==')
tex = '{} &= {}'.format(expr_to_tex(expr1), expr_to_tex(expr2))
return tex
def eqns_to_tex(eqns):
''' To convert to array environment, copy the output into a lyx LaTeX cell,
then copy this entire cell into an eqnarray of sufficient size
'''
return '\\\\'.join(map(eqn_to_tex, eqns)) | apache-2.0 | Python |
c43e8314511ae614c5d3efe8d00e9d18cd04b953 | fix bug in lookup: modified: textstore.py | li-xirong/tagrel | textstore.py | textstore.py | from util import printStatus
class RecordStore:
def __init__(self, tagfile):
printStatus('textstore.RecordStore', 'read from %s' % tagfile)
self.mapping = {}
self.tag2freq = {}
for line in open(tagfile): #.readlines():
print line.strip()
[photoid, userid, tags] = line.strip().split('\t')
self.mapping[photoid] = (userid, tags.lower())
for tag in set(str.split(tags)):
self.tag2freq[tag] = self.tag2freq.get(tag,0) + 1
self.nr_images = len(self.mapping)
self.nr_tags = len(self.tag2freq)
print ("-> %d images, %d unique tags" % (self.nr_images, self.nr_tags))
def tagprior(self, tag, k):
return float(k) * self.tag2freq.get(tag,0) / self.nr_images
def lookup(self, photoid):
return self.mapping.get(photoid, (None, None))
if __name__ == '__main__':
tagfile = 'id.userid.lemmtags.txt'
store = RecordStore(tagfile)
| from util import printStatus
class RecordStore:
def __init__(self, tagfile):
printStatus('textstore.RecordStore', 'read from %s' % tagfile)
self.mapping = {}
self.tag2freq = {}
for line in open(tagfile): #.readlines():
[photoid, userid, tags] = line.strip().split('\t')
self.mapping[photoid] = (userid, tags.lower())
for tag in set(str.split(tags)):
self.tag2freq[tag] = self.tag2freq.get(tag,0) + 1
self.nr_images = len(self.mapping)
self.nr_tags = len(self.tag2freq)
print ("-> %d images, %d unique tags" % (self.nr_images, self.nr_tags))
def tagprior(self, tag, k):
return float(k) * self.tag2freq.get(tag,0) / self.nr_images
def lookup(self, photoid):
return self.mapping[photoid]
| mit | Python |
20d6e123b8cc28f9600e8a20c64bfe8abbc6c3f4 | Remove comment | hfrequency/django-issue-tracker | issue_tracker/core/models.py | issue_tracker/core/models.py | from django.db import models
from django.contrib.auth.models import User
class Project(models.Model):
user = models.ForeignKey(User)
name = models.CharField(max_length=100)
version = models.CharField(max_length=15, null=True)
release_date = models.DateField(null=True)
class Issue(models.Model):
project = models.ForeignKey(Project)
status_choices = (
("0", "OPEN"),
("1", "IN PROGRESS"),
("2", "FINISHED"),
("3", "CLOSED"),
("4", "CANCELED"),
)
status = models.CharField(max_length=10, choices=status_choices)
level_choices = (
("0", "LOW"),
("1", "MEDIUM"),
("2", "HIGH"),
("3", "CRITICAL"),
("4", "BLOCKER"),
)
level = models.CharField(max_length=10, choices=level_choices)
comments = models.ForeignKey(Comments, null=True)
title = models.CharField(max_length=50, null=True)
description = models.CharField(max_length=50, null=True)
date_created = models.DateField(auto_now_add=True)
date_completed = models.DateField(null=True)
# TODO implement these
# time_estimate
# percentage_completed
class Comments(models.Model):
issue = models.ForeignKey(Issue)
comment = models.CharField(max_length=500)
user = models.ForeignKey(User)
| from django.db import models
from django.contrib.auth.models import User
class Project(models.Model):
user = models.ForeignKey(User)
# issue = models.ForeignKey(Issue, null=True)
name = models.CharField(max_length=100)
version = models.CharField(max_length=15, null=True)
release_date = models.DateField(null=True)
class Issue(models.Model):
project = models.ForeignKey(Project)
status_choices = (
("0", "OPEN"),
("1", "IN PROGRESS"),
("2", "FINISHED"),
("3", "CLOSED"),
("4", "CANCELED"),
)
status = models.CharField(max_length=10, choices=status_choices)
level_choices = (
("0", "LOW"),
("1", "MEDIUM"),
("2", "HIGH"),
("3", "CRITICAL"),
("4", "BLOCKER"),
)
level = models.CharField(max_length=10, choices=level_choices)
comments = models.ForeignKey(Comments, null=True)
title = models.CharField(max_length=50, null=True)
description = models.CharField(max_length=50, null=True)
date_created = models.DateField(auto_now_add=True)
date_completed = models.DateField(null=True)
# TODO implement these
# time_estimate
# percentage_completed
class Comments(models.Model):
issue = models.ForeignKey(Issue)
comment = models.CharField(max_length=500)
user = models.ForeignKey(User)
| mit | Python |
434610aabb80868bf086daabaea419513a8f471d | test change | JSchatzman/django-imager,JSchatzman/django-imager,JSchatzman/django-imager | imagersite/imager_profile/models.py | imagersite/imager_profile/models.py | from django.db import models
from django.contrib.auth.models import User
import uuid
from django.db.models.signals import post_save
from django.dispatch import receiver
# Create your models here.
class ActiveUsersManager(models.Manager):
"""Active user manager."""
def get_querysets(self):
"""Get the query set of active users."""
return super(ActiveUsersManager, self).get_querysets().filter(user__is_active__=True)
class ImagerProfile(models.Model):
"""The ImagerProfile and all of its attributes."""
user = models.OneToOneField(
User,
related_name="profile",
on_delete=models.CASCADE
)
objects = models.Manager()
active = ActiveUsersManager()
hireable = models.BooleanField(default=True)
address = models.CharField(max_length=255, blank=True, null=True)
camera_type = models.CharField(max_length=255, blank=True, null=True)
personal_website = models.URLField(max_length=200)
bio = models.TextField()
travel_radius = models.DecimalField(max_digits=8, decimal_places=3, null=True)
phone = models.CharField(max_length=50, blank=True, null=True)
photo_type = models.CharField(max_length=50, blank=True, null=True)
def display_properties(self):
"""Print the properties of this profile."""
print(self.hireable)
print(self.address)
print(self.camera_type)
print(self.personal_website)
print(self.bio)
print(self.travel_radius)
print(self.phone)
print(self.photo_type)
@property
def is_active(self):
"""Return True if user is active."""
return self.user.is_active
@receiver(post_save, sender=User)
def make_profile_for_user(sender, instance, **kwargs):
"""Output info about new user."""
new_profile = ImagerProfile(user=instance)
if kwargs['created']:
profile = ImagerProfile(user=instance)
profile.save()
| from django.db import models
from django.contrib.auth.models import User
import uuid
from django.db.models.signals import post_save
from django.dispatch import receiver
# Create your models here.
class ActiveUsersManager(models.Manager):
"""Active user manager."""
def get_querysets(self):
"""Get the query set of active users."""
return super(ActiveUsersManager, self).get_querysets().filter(user__is_active__=True)
class ImagerProfile(models.Model):
"""The ImagerProfile and all of its attributes."""
user = models.OneToOneField(
User,
related_name="profile",
on_delete=models.CASCADE
)
objects = models.Manager()
active = ActiveUsersManager()
hireable = models.BooleanField(default=True)
address = models.CharField(max_length=255, blank=True, null=True)
camera_type = models.CharField(max_length=255, blank=True, null=True)
personal_website = models.URLField(max_length=200)
bio = models.TextField()
travel_radius = models.DecimalField(max_digits=8, decimal_places=3, null=True)
phone = models.CharField(max_length=50, blank=True, null=True)
photo_type = models.CharField(max_length=50, blank=True, null=True)
def display_properties(self):
"""Print the properties of this profile."""
print(self.hireable)
print(self.address)
print(self.camera_type)
print(self.personal_website)
print(self.bio)
print(self.travel_radius)
print(self.phone)
print(self.photo_type)
@property
def is_active(self):
"""Return True if user is active."""
return self.user.is_active
@receiver(post_save, sender=User)
def make_profile_for_user(sender, instance, **kwargs):
new_profile = ImagerProfile(user=instance)
if kwargs['created']:
profile = ImagerProfile(user=instance)
profile.save()
| mit | Python |
6e1096efb884e813f1db3ea951d8eec551a06c6e | Add ArgsKwargs to thinc.api | spacy-io/thinc,spacy-io/thinc,explosion/thinc,spacy-io/thinc,explosion/thinc,explosion/thinc,explosion/thinc | thinc/api.py | thinc/api.py | from .config import Config, registry
from .initializers import normal_init, uniform_init, xavier_uniform_init, zero_init
from .loss import categorical_crossentropy, L1_distance, cosine_distance
from .model import create_init, Model
from .optimizers import Adam, RAdam, SGD, Optimizer
from .schedules import cyclic_triangular, warmup_linear, constant, constant_then
from .schedules import decaying, slanted_triangular, compounding
from .types import Ragged, Padded, ArgsKwargs
from .util import fix_random_seed, is_cupy_array, set_active_gpu
from .util import prefer_gpu, require_gpu
from .util import get_shuffled_batches, minibatch, evaluate_model_on_arrays
from .util import to_categorical, get_width, get_array_module
from .util import torch2xp, xp2torch, tensorflow2xp, xp2tensorflow
from .backends import get_ops, set_current_ops, get_current_ops, use_device
from .backends import Ops, CupyOps, NumpyOps
from .backends import use_pytorch_for_gpu_memory, use_tensorflow_for_gpu_memory
from .layers import Dropout, Embed, ExtractWindow, HashEmbed, LayerNorm, Linear
from .layers import Maxout, Mish, MultiSoftmax, ReLu, Residual, Softmax, BiLSTM, LSTM
from .layers import CauchySimilarity, ParametricAttention, PyTorchWrapper
from .layers import SparseLinear, StaticVectors, PyTorchBiLSTM, FeatureExtractor
from .layers import TensorFlowWrapper
from .layers import add, bidirectional, chain, clone, concatenate, foreach, noop
from .layers import recurrent, uniqued, siamese, list2ragged, ragged2list
from .layers import with_list2array, with_list2padded, with_reshape, with_getitem
from .layers import strings2arrays
from .layers import MaxPool, MeanPool, SumPool
__all__ = list(locals().keys())
| from .config import Config, registry
from .initializers import normal_init, uniform_init, xavier_uniform_init, zero_init
from .loss import categorical_crossentropy, L1_distance, cosine_distance
from .model import create_init, Model
from .optimizers import Adam, RAdam, SGD, Optimizer
from .schedules import cyclic_triangular, warmup_linear, constant, constant_then
from .schedules import decaying, slanted_triangular, compounding
from .types import Ragged, Padded
from .util import fix_random_seed, is_cupy_array, set_active_gpu
from .util import prefer_gpu, require_gpu
from .util import get_shuffled_batches, minibatch, evaluate_model_on_arrays
from .util import to_categorical, get_width, get_array_module
from .util import torch2xp, xp2torch, tensorflow2xp, xp2tensorflow
from .backends import get_ops, set_current_ops, get_current_ops, use_device
from .backends import Ops, CupyOps, NumpyOps
from .backends import use_pytorch_for_gpu_memory, use_tensorflow_for_gpu_memory
from .layers import Dropout, Embed, ExtractWindow, HashEmbed, LayerNorm, Linear
from .layers import Maxout, Mish, MultiSoftmax, ReLu, Residual, Softmax, BiLSTM, LSTM
from .layers import CauchySimilarity, ParametricAttention, PyTorchWrapper
from .layers import SparseLinear, StaticVectors, PyTorchBiLSTM, FeatureExtractor
from .layers import TensorFlowWrapper
from .layers import add, bidirectional, chain, clone, concatenate, foreach, noop
from .layers import recurrent, uniqued, siamese, list2ragged, ragged2list
from .layers import with_list2array, with_list2padded, with_reshape, with_getitem
from .layers import strings2arrays
from .layers import MaxPool, MeanPool, SumPool
__all__ = list(locals().keys())
| mit | Python |
f154aed297124b1eae6c80ae60bc2d44fd82405c | rename a variable in EventReaderBundle | alphatwirl/alphatwirl,TaiSakuma/AlphaTwirl,TaiSakuma/AlphaTwirl,alphatwirl/alphatwirl,alphatwirl/alphatwirl,alphatwirl/alphatwirl | AlphaTwirl/EventReader/EventReaderBundle.py | AlphaTwirl/EventReader/EventReaderBundle.py | # Tai Sakuma <tai.sakuma@cern.ch>
from EventLoop import EventLoop
##__________________________________________________________________||
class AllEvents(object):
def __call__(self, event): return True
##__________________________________________________________________||
class EventReaderBundle(object):
def __init__(self, eventBuilder, eventLoopRunner, readerPackage, eventSelection = None):
self.eventBuilder = eventBuilder
self.eventLoopRunner = eventLoopRunner
self.readerPackage = readerPackage
self.eventSelection = eventSelection if eventSelection is not None else AllEvents()
self.EventLoop = EventLoop
def begin(self):
self.eventLoopRunner.begin()
def read(self, component):
reader = self.readerPackage.make(component.name)
eventLoop = self.EventLoop(self.eventBuilder, self.eventSelection, component, reader)
self.eventLoopRunner.run(eventLoop)
def end(self):
self.eventLoopRunner.end()
self.readerPackage.collect()
##__________________________________________________________________||
| # Tai Sakuma <tai.sakuma@cern.ch>
from EventLoop import EventLoop
##__________________________________________________________________||
class AllEvents(object):
def __call__(self, event): return True
##__________________________________________________________________||
class EventReaderBundle(object):
def __init__(self, eventBuilder, eventLoopRunner, readerPackage, eventSelection = None):
self.eventBuilder = eventBuilder
self.eventLoopRunner = eventLoopRunner
self.readerPackage = readerPackage
self.eventSelection = eventSelection if eventSelection is not None else AllEvents()
self.EventLoop = EventLoop
def begin(self):
self.eventLoopRunner.begin()
def read(self, component):
readers = self.readerPackage.make(component.name)
eventLoop = self.EventLoop(self.eventBuilder, self.eventSelection, component, readers)
self.eventLoopRunner.run(eventLoop)
def end(self):
self.eventLoopRunner.end()
self.readerPackage.collect()
##__________________________________________________________________||
| bsd-3-clause | Python |
2f34e330d1d99594dab156e8d3816ba5fce8cd31 | fix test_unionfs.py | simbuerg/benchbuild,simbuerg/benchbuild | benchbuild/tests/test_unionfs.py | benchbuild/tests/test_unionfs.py | """ Testing suite for the mounting process. """
import unittest
import os
from benchbuild.project import Project
from benchbuild.utils.downloader import Wget
from benchbuild.settings import CFG
from benchbuild.utils.cmd import ls
class ProjectMock(Project):
"""
Class to get a self pointer for the project that is tested.
The project also gets wrapped inside the unionfs.
"""
from benchbuild.utils.run import unionfs
@unionfs('./base', './image', None, './union')
def mount_test_helper(self):
"""
A plumbum or benchbuild command is called inside the wrapped unionfs
and therefore also inside the mount, added manually here.
The function wrapped with this helper is later on compared with the
actual unionfs-wrapped function as a testing process.
"""
self.build()
self.run(ls)
def download(self):
""" Get the project source input. """
Wget(self.sourcedir, self.NAME)
def configure(self):
""" Configure the parameters of the project. """
pass
def build(self):
""" Build the project. """
self.download()
self.configure()
class TestUnionFsMount(unittest.TestCase):
"""
Class to test the mounting of the unionfs with different paths and check if
the unmounting works without working with a whole filesystem yet.
"""
def test_build_dir(self):
""" Check if the needed build_dir exists. """
self.assertTrue(os.path.exists(CFG["build_dir"].value()))
if __name__ == 'main':
unittest.main()
| """ Testing suite for the mounting process. """
import unittest
import os
from benchbuild.project import Project
from benchbuild.utils.container import get_base_dir
from benchbuild.utils.downloader import Wget
from benchbuild.settings import CFG
from benchbuild.utils.cmd import ls
class ProjectMock(Project):
"""
Class to get a self pointer for the project that is tested.
The project also gets wrapped inside the unionfs.
"""
from benchbuild.utils.run import unionfs
#adjust parameters in the unionfs call to test different mounts
@unionfs('./base', './image', None, './union')
def mount_test_helper(self):
"""
A plumbum or benchbuild command is called inside the wrapped unionfs and
therefor also inside the mount, added manually here.
The function wrapped with this helper is later on compared with the
actual unionfs-wrapped function as a testing process.
"""
self.build()
self.run(ls)
def download(self):
""" Get the project source input. """
Wget(self.sourcedir, self.NAME)
def configure(self):
""" Configure the parameters of the project. """
pass
def build(self):
""" Build the project. """
self.download()
self.configure()
class TestUnionFsMount(unittest.TestCase):
"""
Class to test the mounting of the unionfs with different paths and check if
the unmounting works without working with a whole filesystem yet.
"""
def test_build_dir(self):
""" Check if the needed build_dir exists. """
self.assertTrue(os.path.exists(CFG["build_dir"].value()))
def test_base_dir(self):
""" Check if the needed base_dir exsists. """
self.assertTrue(os.path.exists(get_base_dir()))
def test_unionfs_wrapping(self):
""" Tests if the wrapped unionfs returns a function. """
#can not build a new TestProject-object as caller
#also can not wrap a new unionfs around a non-funtion
#call can not be assigned to just the call of the mount_test_helper
def test_mount_location(self):
""" Tests if the mount is at the expected path. """
#settings do not save the base_dir yet, initialise an experiment first
base_dir = CFG["unionfs"]["base_dir"].value()
self.assertEqual(base_dir, get_base_dir())
def test_unionfs_tear_down(self):
""" Tests if the tear down of the unionfs was successfull. """
from benchbuild.utils.run import unionfs_tear_down
build_dir = CFG["build_dir"].value()
unionfs_tear_down(build_dir, 3)
self.assertRaises(ValueError)
self.assertRaises(RuntimeError)
#the second assert is never reached if the first one fails
if __name__ == 'main':
unittest.main()
| mit | Python |
c1ff243cb0eeca41f793ecf365e2fcfd053b396d | revert to template format | JNeiger/robocup-software,RoboJackets/robocup-software,RoboJackets/robocup-software,JNeiger/robocup-software,RoboJackets/robocup-software,RoboJackets/robocup-software,JNeiger/robocup-software,JNeiger/robocup-software,JNeiger/robocup-software | soccer/gameplay/plays/training/skills_practice.py | soccer/gameplay/plays/training/skills_practice.py | import robocup
import constants
import play
import skills
import tactics
# This is a file where you can learn how skills work!
class SkillsPractice(play.Play):
def __init__(self):
super().__init__(continuous=True)
# To make a robot move, use skills.move.Move(<point to move to>)
# To create a point, we initialize a point using
# robocup.Point(<x coordinate>, <y coordinate>)
# These lines moves a robot to the point (0, 0)
move_point = robocup.Point(0, 0)
skill = skills.move.Move(move_point)
# Adds behavior to our behavior tree, we will explain this more later
self.add_subbehavior(skill, "skill") | import robocup
import constants
import play
import skills
# This is a file where you can learn how skills work!
class SkillsPractice(play.Play):
def __init__(self):
super().__init__(continuous=True)
# To make a robot move, use skills.move.Move(<point to move to>)
# To create a point, we initialize a point using
# robocup.Point(<x coordinate>, <y coordinate>)
# This line moves a robot to the point (0, 0)
move_point = robocup.Point(3, 9)
skill = skills.move.Move(move_point)
# Adds behavior to our behavior tree, we will explain this more later
self.add_subbehavior(skill, "skill", required=True) | apache-2.0 | Python |
85ef4988e1f25b586d8dff63b4ade83a2222849f | Add api_key to filtered variables. | akuseru/zulip,wdaher/zulip,jimmy54/zulip,KJin99/zulip,joshisa/zulip,sup95/zulip,Vallher/zulip,jimmy54/zulip,yocome/zulip,LeeRisk/zulip,Batterfii/zulip,kaiyuanheshang/zulip,PaulPetring/zulip,guiquanz/zulip,dxq-git/zulip,littledogboy/zulip,KJin99/zulip,aps-sids/zulip,eeshangarg/zulip,LeeRisk/zulip,xuanhan863/zulip,gigawhitlocks/zulip,peguin40/zulip,aliceriot/zulip,ipernet/zulip,shaunstanislaus/zulip,yocome/zulip,MariaFaBella85/zulip,KJin99/zulip,timabbott/zulip,codeKonami/zulip,verma-varsha/zulip,dxq-git/zulip,johnny9/zulip,tommyip/zulip,umkay/zulip,alliejones/zulip,christi3k/zulip,noroot/zulip,firstblade/zulip,brainwane/zulip,showell/zulip,blaze225/zulip,sharmaeklavya2/zulip,ashwinirudrappa/zulip,ahmadassaf/zulip,amanharitsh123/zulip,tbutter/zulip,so0k/zulip,LeeRisk/zulip,qq1012803704/zulip,cosmicAsymmetry/zulip,Suninus/zulip,dotcool/zulip,atomic-labs/zulip,susansls/zulip,willingc/zulip,jackrzhang/zulip,codeKonami/zulip,paxapy/zulip,jrowan/zulip,joshisa/zulip,ryansnowboarder/zulip,shrikrishnaholla/zulip,JPJPJPOPOP/zulip,arpith/zulip,amallia/zulip,zacps/zulip,voidException/zulip,yuvipanda/zulip,umkay/zulip,ipernet/zulip,levixie/zulip,Jianchun1/zulip,zachallaun/zulip,showell/zulip,sharmaeklavya2/zulip,tdr130/zulip,vaidap/zulip,peiwei/zulip,saitodisse/zulip,dattatreya303/zulip,krtkmj/zulip,samatdav/zulip,paxapy/zulip,EasonYi/zulip,pradiptad/zulip,jeffcao/zulip,zorojean/zulip,praveenaki/zulip,akuseru/zulip,themass/zulip,nicholasbs/zulip,arpitpanwar/zulip,deer-hope/zulip,sharmaeklavya2/zulip,yuvipanda/zulip,Frouk/zulip,niftynei/zulip,easyfmxu/zulip,KingxBanana/zulip,dwrpayne/zulip,wdaher/zulip,verma-varsha/zulip,aliceriot/zulip,sonali0901/zulip,ApsOps/zulip,vabs22/zulip,MayB/zulip,developerfm/zulip,moria/zulip,eastlhu/zulip,lfranchi/zulip,Jianchun1/zulip,huangkebo/zulip,gkotian/zulip,hayderimran7/zulip,zorojean/zulip,dhcrzf/zulip,noroot/zulip,levixie/zulip,tiansiyuan/zulip,jerryge/zulip,ipernet/zulip,vabs22/zulip,wangdeshui/zulip,SmartPeople/zulip,swinghu/zulip,wdaher/zulip,Qgap/zulip,ryanbackman/zulip,johnny9/zulip,arpith/zulip,zofuthan/zulip,zachallaun/zulip,thomasboyt/zulip,shaunstanislaus/zulip,joshisa/zulip,proliming/zulip,aakash-cr7/zulip,hayderimran7/zulip,jeffcao/zulip,yocome/zulip,dotcool/zulip,praveenaki/zulip,hj3938/zulip,fw1121/zulip,ryanbackman/zulip,kou/zulip,Vallher/zulip,JanzTam/zulip,bitemyapp/zulip,jphilipsen05/zulip,kou/zulip,jainayush975/zulip,timabbott/zulip,AZtheAsian/zulip,levixie/zulip,natanovia/zulip,hafeez3000/zulip,hayderimran7/zulip,calvinleenyc/zulip,voidException/zulip,mdavid/zulip,Suninus/zulip,nicholasbs/zulip,bluesea/zulip,tommyip/zulip,adnanh/zulip,dxq-git/zulip,willingc/zulip,Vallher/zulip,hengqujushi/zulip,zulip/zulip,brainwane/zulip,pradiptad/zulip,noroot/zulip,joyhchen/zulip,themass/zulip,peiwei/zulip,Batterfii/zulip,dnmfarrell/zulip,johnnygaddarr/zulip,ashwinirudrappa/zulip,praveenaki/zulip,aps-sids/zulip,itnihao/zulip,hayderimran7/zulip,Jianchun1/zulip,hengqujushi/zulip,voidException/zulip,babbage/zulip,easyfmxu/zulip,guiquanz/zulip,JanzTam/zulip,punchagan/zulip,bastianh/zulip,eastlhu/zulip,Qgap/zulip,glovebx/zulip,atomic-labs/zulip,tiansiyuan/zulip,jeffcao/zulip,thomasboyt/zulip,kaiyuanheshang/zulip,Vallher/zulip,so0k/zulip,rht/zulip,ApsOps/zulip,tbutter/zulip,ikasumiwt/zulip,RobotCaleb/zulip,xuxiao/zulip,arpith/zulip,KingxBanana/zulip,dhcrzf/zulip,TigorC/zulip,wdaher/zulip,natanovia/zulip,jphilipsen05/zulip,hackerkid/zulip,punchagan/zulip,jessedhillon/zulip,amanharitsh123/zulip,umkay/zulip,huangkebo/zulip,armooo/zulip,eastlhu/zulip,mahim97/zulip,ipernet/zulip,so0k/zulip,karamcnair/zulip,johnny9/zulip,dattatreya303/zulip,andersk/zulip,Galexrt/zulip,SmartPeople/zulip,samatdav/zulip,babbage/zulip,Gabriel0402/zulip,hengqujushi/zulip,zwily/zulip,aakash-cr7/zulip,punchagan/zulip,technicalpickles/zulip,ericzhou2008/zulip,noroot/zulip,hustlzp/zulip,technicalpickles/zulip,avastu/zulip,dhcrzf/zulip,zulip/zulip,EasonYi/zulip,vaidap/zulip,hustlzp/zulip,reyha/zulip,reyha/zulip,susansls/zulip,synicalsyntax/zulip,esander91/zulip,Gabriel0402/zulip,jimmy54/zulip,xuanhan863/zulip,samatdav/zulip,hj3938/zulip,brockwhittaker/zulip,zulip/zulip,wavelets/zulip,christi3k/zulip,ApsOps/zulip,niftynei/zulip,jonesgithub/zulip,MariaFaBella85/zulip,reyha/zulip,Frouk/zulip,umkay/zulip,dotcool/zulip,RobotCaleb/zulip,luyifan/zulip,Jianchun1/zulip,dwrpayne/zulip,arpitpanwar/zulip,sharmaeklavya2/zulip,Galexrt/zulip,peguin40/zulip,qq1012803704/zulip,niftynei/zulip,shubhamdhama/zulip,wweiradio/zulip,he15his/zulip,mohsenSy/zulip,Frouk/zulip,shubhamdhama/zulip,babbage/zulip,m1ssou/zulip,technicalpickles/zulip,eeshangarg/zulip,nicholasbs/zulip,jerryge/zulip,JanzTam/zulip,firstblade/zulip,ahmadassaf/zulip,yuvipanda/zulip,zacps/zulip,jackrzhang/zulip,johnnygaddarr/zulip,saitodisse/zulip,bssrdf/zulip,mansilladev/zulip,showell/zulip,RobotCaleb/zulip,avastu/zulip,eastlhu/zulip,johnnygaddarr/zulip,jainayush975/zulip,synicalsyntax/zulip,hackerkid/zulip,MayB/zulip,Batterfii/zulip,arpith/zulip,hj3938/zulip,proliming/zulip,themass/zulip,developerfm/zulip,jeffcao/zulip,zhaoweigg/zulip,huangkebo/zulip,ipernet/zulip,peguin40/zulip,bitemyapp/zulip,JanzTam/zulip,hustlzp/zulip,zorojean/zulip,peguin40/zulip,reyha/zulip,vaidap/zulip,Drooids/zulip,TigorC/zulip,Suninus/zulip,vabs22/zulip,ryanbackman/zulip,bluesea/zulip,avastu/zulip,hengqujushi/zulip,bowlofstew/zulip,ufosky-server/zulip,kaiyuanheshang/zulip,tbutter/zulip,Gabriel0402/zulip,arpitpanwar/zulip,ahmadassaf/zulip,fw1121/zulip,vabs22/zulip,ashwinirudrappa/zulip,wweiradio/zulip,lfranchi/zulip,natanovia/zulip,themass/zulip,bssrdf/zulip,natanovia/zulip,Drooids/zulip,mdavid/zulip,natanovia/zulip,saitodisse/zulip,ikasumiwt/zulip,zofuthan/zulip,EasonYi/zulip,AZtheAsian/zulip,udxxabp/zulip,vakila/zulip,yocome/zulip,xuxiao/zulip,brainwane/zulip,calvinleenyc/zulip,vakila/zulip,shaunstanislaus/zulip,cosmicAsymmetry/zulip,firstblade/zulip,wangdeshui/zulip,bowlofstew/zulip,jonesgithub/zulip,so0k/zulip,kou/zulip,luyifan/zulip,EasonYi/zulip,PaulPetring/zulip,Batterfii/zulip,mohsenSy/zulip,zofuthan/zulip,jessedhillon/zulip,akuseru/zulip,arpitpanwar/zulip,zofuthan/zulip,peiwei/zulip,armooo/zulip,KingxBanana/zulip,xuxiao/zulip,luyifan/zulip,Qgap/zulip,zwily/zulip,ryansnowboarder/zulip,qq1012803704/zulip,udxxabp/zulip,hengqujushi/zulip,stamhe/zulip,grave-w-grave/zulip,dhcrzf/zulip,kokoar/zulip,brockwhittaker/zulip,JPJPJPOPOP/zulip,blaze225/zulip,timabbott/zulip,kaiyuanheshang/zulip,dwrpayne/zulip,DazWorrall/zulip,calvinleenyc/zulip,dhcrzf/zulip,wdaher/zulip,seapasulli/zulip,schatt/zulip,tommyip/zulip,isht3/zulip,moria/zulip,joyhchen/zulip,vakila/zulip,littledogboy/zulip,swinghu/zulip,bastianh/zulip,gigawhitlocks/zulip,LAndreas/zulip,niftynei/zulip,he15his/zulip,bastianh/zulip,kokoar/zulip,jonesgithub/zulip,itnihao/zulip,ufosky-server/zulip,peiwei/zulip,dawran6/zulip,Juanvulcano/zulip,SmartPeople/zulip,Diptanshu8/zulip,zofuthan/zulip,bowlofstew/zulip,shrikrishnaholla/zulip,DazWorrall/zulip,brainwane/zulip,Juanvulcano/zulip,umkay/zulip,RobotCaleb/zulip,codeKonami/zulip,dawran6/zulip,m1ssou/zulip,suxinde2009/zulip,cosmicAsymmetry/zulip,paxapy/zulip,karamcnair/zulip,sup95/zulip,technicalpickles/zulip,avastu/zulip,dawran6/zulip,Diptanshu8/zulip,sonali0901/zulip,stamhe/zulip,bssrdf/zulip,tdr130/zulip,nicholasbs/zulip,aakash-cr7/zulip,rishig/zulip,dnmfarrell/zulip,itnihao/zulip,sonali0901/zulip,sonali0901/zulip,ryanbackman/zulip,JPJPJPOPOP/zulip,PaulPetring/zulip,vakila/zulip,esander91/zulip,zofuthan/zulip,andersk/zulip,he15his/zulip,dattatreya303/zulip,jackrzhang/zulip,wweiradio/zulip,tommyip/zulip,easyfmxu/zulip,lfranchi/zulip,tdr130/zulip,tdr130/zulip,grave-w-grave/zulip,hackerkid/zulip,zachallaun/zulip,alliejones/zulip,willingc/zulip,ikasumiwt/zulip,jerryge/zulip,krtkmj/zulip,aakash-cr7/zulip,showell/zulip,xuanhan863/zulip,pradiptad/zulip,dattatreya303/zulip,jainayush975/zulip,jainayush975/zulip,hj3938/zulip,wavelets/zulip,Suninus/zulip,samatdav/zulip,wangdeshui/zulip,he15his/zulip,Drooids/zulip,he15his/zulip,joyhchen/zulip,itnihao/zulip,cosmicAsymmetry/zulip,jerryge/zulip,easyfmxu/zulip,dxq-git/zulip,Drooids/zulip,jackrzhang/zulip,MayB/zulip,yuvipanda/zulip,reyha/zulip,so0k/zulip,alliejones/zulip,krtkmj/zulip,Suninus/zulip,bluesea/zulip,ahmadassaf/zulip,j831/zulip,qq1012803704/zulip,dotcool/zulip,cosmicAsymmetry/zulip,mdavid/zulip,Suninus/zulip,dxq-git/zulip,JanzTam/zulip,fw1121/zulip,hj3938/zulip,ahmadassaf/zulip,AZtheAsian/zulip,j831/zulip,joyhchen/zulip,Jianchun1/zulip,johnny9/zulip,ryanbackman/zulip,jrowan/zulip,jrowan/zulip,synicalsyntax/zulip,hackerkid/zulip,shrikrishnaholla/zulip,bowlofstew/zulip,tommyip/zulip,RobotCaleb/zulip,rishig/zulip,jainayush975/zulip,eastlhu/zulip,huangkebo/zulip,kaiyuanheshang/zulip,dattatreya303/zulip,isht3/zulip,susansls/zulip,joshisa/zulip,m1ssou/zulip,hafeez3000/zulip,EasonYi/zulip,arpitpanwar/zulip,shaunstanislaus/zulip,Frouk/zulip,swinghu/zulip,KJin99/zulip,zorojean/zulip,schatt/zulip,shubhamdhama/zulip,dnmfarrell/zulip,firstblade/zulip,DazWorrall/zulip,ashwinirudrappa/zulip,Galexrt/zulip,sup95/zulip,zwily/zulip,DazWorrall/zulip,noroot/zulip,rht/zulip,zachallaun/zulip,amyliu345/zulip,voidException/zulip,atomic-labs/zulip,easyfmxu/zulip,karamcnair/zulip,karamcnair/zulip,bluesea/zulip,Galexrt/zulip,fw1121/zulip,eeshangarg/zulip,amyliu345/zulip,Juanvulcano/zulip,jessedhillon/zulip,ikasumiwt/zulip,Qgap/zulip,m1ssou/zulip,sonali0901/zulip,thomasboyt/zulip,zacps/zulip,zwily/zulip,PaulPetring/zulip,dnmfarrell/zulip,j831/zulip,Gabriel0402/zulip,alliejones/zulip,technicalpickles/zulip,bitemyapp/zulip,praveenaki/zulip,andersk/zulip,isht3/zulip,Frouk/zulip,zachallaun/zulip,paxapy/zulip,ericzhou2008/zulip,souravbadami/zulip,esander91/zulip,aliceriot/zulip,ikasumiwt/zulip,souravbadami/zulip,voidException/zulip,jphilipsen05/zulip,glovebx/zulip,susansls/zulip,zhaoweigg/zulip,amallia/zulip,tommyip/zulip,tiansiyuan/zulip,ApsOps/zulip,rishig/zulip,swinghu/zulip,SmartPeople/zulip,Cheppers/zulip,zhaoweigg/zulip,easyfmxu/zulip,wangdeshui/zulip,zofuthan/zulip,samatdav/zulip,thomasboyt/zulip,technicalpickles/zulip,proliming/zulip,luyifan/zulip,brainwane/zulip,stamhe/zulip,willingc/zulip,ryansnowboarder/zulip,suxinde2009/zulip,SmartPeople/zulip,ashwinirudrappa/zulip,Frouk/zulip,vikas-parashar/zulip,andersk/zulip,shubhamdhama/zulip,tiansiyuan/zulip,TigorC/zulip,bitemyapp/zulip,KJin99/zulip,stamhe/zulip,vaidap/zulip,luyifan/zulip,Gabriel0402/zulip,kou/zulip,bssrdf/zulip,joshisa/zulip,Galexrt/zulip,dawran6/zulip,ashwinirudrappa/zulip,blaze225/zulip,bluesea/zulip,codeKonami/zulip,fw1121/zulip,jackrzhang/zulip,levixie/zulip,natanovia/zulip,nicholasbs/zulip,littledogboy/zulip,udxxabp/zulip,guiquanz/zulip,qq1012803704/zulip,mahim97/zulip,atomic-labs/zulip,ryansnowboarder/zulip,Drooids/zulip,hengqujushi/zulip,arpith/zulip,firstblade/zulip,krtkmj/zulip,jerryge/zulip,wavelets/zulip,MariaFaBella85/zulip,hj3938/zulip,MariaFaBella85/zulip,schatt/zulip,thomasboyt/zulip,Vallher/zulip,Diptanshu8/zulip,noroot/zulip,armooo/zulip,dwrpayne/zulip,littledogboy/zulip,johnnygaddarr/zulip,bitemyapp/zulip,grave-w-grave/zulip,ahmadassaf/zulip,bluesea/zulip,Batterfii/zulip,grave-w-grave/zulip,akuseru/zulip,sup95/zulip,jackrzhang/zulip,j831/zulip,hayderimran7/zulip,KJin99/zulip,hengqujushi/zulip,johnny9/zulip,aliceriot/zulip,ryansnowboarder/zulip,hackerkid/zulip,aps-sids/zulip,mohsenSy/zulip,calvinleenyc/zulip,themass/zulip,atomic-labs/zulip,jerryge/zulip,amallia/zulip,gkotian/zulip,mdavid/zulip,zorojean/zulip,ipernet/zulip,LAndreas/zulip,zulip/zulip,vakila/zulip,kou/zulip,fw1121/zulip,bastianh/zulip,voidException/zulip,xuanhan863/zulip,adnanh/zulip,avastu/zulip,zhaoweigg/zulip,schatt/zulip,tbutter/zulip,shrikrishnaholla/zulip,mansilladev/zulip,christi3k/zulip,RobotCaleb/zulip,arpitpanwar/zulip,amanharitsh123/zulip,ufosky-server/zulip,jimmy54/zulip,ufosky-server/zulip,jphilipsen05/zulip,jonesgithub/zulip,jessedhillon/zulip,moria/zulip,levixie/zulip,bowlofstew/zulip,shaunstanislaus/zulip,SmartPeople/zulip,jrowan/zulip,dnmfarrell/zulip,zorojean/zulip,punchagan/zulip,MariaFaBella85/zulip,huangkebo/zulip,saitodisse/zulip,ipernet/zulip,niftynei/zulip,amyliu345/zulip,itnihao/zulip,grave-w-grave/zulip,xuxiao/zulip,tdr130/zulip,bluesea/zulip,PhilSk/zulip,bitemyapp/zulip,peiwei/zulip,armooo/zulip,kokoar/zulip,tbutter/zulip,tdr130/zulip,stamhe/zulip,itnihao/zulip,bssrdf/zulip,schatt/zulip,noroot/zulip,Drooids/zulip,technicalpickles/zulip,seapasulli/zulip,brainwane/zulip,deer-hope/zulip,bssrdf/zulip,rht/zulip,thomasboyt/zulip,rht/zulip,bowlofstew/zulip,schatt/zulip,deer-hope/zulip,peiwei/zulip,zachallaun/zulip,kokoar/zulip,mdavid/zulip,lfranchi/zulip,nicholasbs/zulip,LAndreas/zulip,m1ssou/zulip,Qgap/zulip,aps-sids/zulip,huangkebo/zulip,ahmadassaf/zulip,wavelets/zulip,willingc/zulip,hustlzp/zulip,guiquanz/zulip,voidException/zulip,kokoar/zulip,KingxBanana/zulip,jphilipsen05/zulip,aps-sids/zulip,jonesgithub/zulip,aakash-cr7/zulip,eeshangarg/zulip,JPJPJPOPOP/zulip,calvinleenyc/zulip,dawran6/zulip,ericzhou2008/zulip,babbage/zulip,Juanvulcano/zulip,brockwhittaker/zulip,peguin40/zulip,seapasulli/zulip,bowlofstew/zulip,Frouk/zulip,lfranchi/zulip,pradiptad/zulip,codeKonami/zulip,brockwhittaker/zulip,shaunstanislaus/zulip,amallia/zulip,hafeez3000/zulip,alliejones/zulip,zhaoweigg/zulip,punchagan/zulip,Diptanshu8/zulip,amanharitsh123/zulip,PhilSk/zulip,DazWorrall/zulip,dwrpayne/zulip,thomasboyt/zulip,lfranchi/zulip,wweiradio/zulip,mansilladev/zulip,bssrdf/zulip,seapasulli/zulip,susansls/zulip,Galexrt/zulip,JPJPJPOPOP/zulip,christi3k/zulip,jessedhillon/zulip,showell/zulip,avastu/zulip,karamcnair/zulip,karamcnair/zulip,timabbott/zulip,proliming/zulip,hustlzp/zulip,bitemyapp/zulip,johnnygaddarr/zulip,blaze225/zulip,firstblade/zulip,littledogboy/zulip,lfranchi/zulip,christi3k/zulip,so0k/zulip,suxinde2009/zulip,christi3k/zulip,wdaher/zulip,yuvipanda/zulip,sonali0901/zulip,vakila/zulip,aps-sids/zulip,kaiyuanheshang/zulip,andersk/zulip,m1ssou/zulip,armooo/zulip,xuxiao/zulip,PaulPetring/zulip,Gabriel0402/zulip,reyha/zulip,vaidap/zulip,peguin40/zulip,wdaher/zulip,jackrzhang/zulip,deer-hope/zulip,JPJPJPOPOP/zulip,zulip/zulip,LeeRisk/zulip,xuanhan863/zulip,esander91/zulip,mohsenSy/zulip,esander91/zulip,hafeez3000/zulip,sup95/zulip,PhilSk/zulip,jeffcao/zulip,karamcnair/zulip,EasonYi/zulip,dotcool/zulip,PhilSk/zulip,umkay/zulip,Juanvulcano/zulip,ryansnowboarder/zulip,shaunstanislaus/zulip,dawran6/zulip,jonesgithub/zulip,shrikrishnaholla/zulip,hayderimran7/zulip,eastlhu/zulip,glovebx/zulip,zwily/zulip,dxq-git/zulip,saitodisse/zulip,jimmy54/zulip,developerfm/zulip,mahim97/zulip,easyfmxu/zulip,wweiradio/zulip,showell/zulip,ericzhou2008/zulip,saitodisse/zulip,praveenaki/zulip,zorojean/zulip,Cheppers/zulip,natanovia/zulip,udxxabp/zulip,willingc/zulip,pradiptad/zulip,vikas-parashar/zulip,m1ssou/zulip,atomic-labs/zulip,zacps/zulip,joshisa/zulip,luyifan/zulip,jainayush975/zulip,wangdeshui/zulip,aps-sids/zulip,synicalsyntax/zulip,ApsOps/zulip,avastu/zulip,Cheppers/zulip,zulip/zulip,glovebx/zulip,KingxBanana/zulip,gigawhitlocks/zulip,bastianh/zulip,wavelets/zulip,wweiradio/zulip,glovebx/zulip,gigawhitlocks/zulip,verma-varsha/zulip,verma-varsha/zulip,babbage/zulip,vakila/zulip,jrowan/zulip,LeeRisk/zulip,kaiyuanheshang/zulip,Batterfii/zulip,gigawhitlocks/zulip,timabbott/zulip,TigorC/zulip,Cheppers/zulip,jrowan/zulip,ryanbackman/zulip,jerryge/zulip,mansilladev/zulip,vikas-parashar/zulip,rishig/zulip,hafeez3000/zulip,ericzhou2008/zulip,showell/zulip,armooo/zulip,adnanh/zulip,deer-hope/zulip,paxapy/zulip,rishig/zulip,synicalsyntax/zulip,souravbadami/zulip,brockwhittaker/zulip,johnny9/zulip,ikasumiwt/zulip,PhilSk/zulip,jeffcao/zulip,mansilladev/zulip,jeffcao/zulip,levixie/zulip,so0k/zulip,shrikrishnaholla/zulip,kou/zulip,LeeRisk/zulip,amallia/zulip,jessedhillon/zulip,stamhe/zulip,akuseru/zulip,developerfm/zulip,jonesgithub/zulip,willingc/zulip,themass/zulip,Vallher/zulip,xuanhan863/zulip,tbutter/zulip,qq1012803704/zulip,themass/zulip,grave-w-grave/zulip,gkotian/zulip,zacps/zulip,tommyip/zulip,yuvipanda/zulip,stamhe/zulip,LeeRisk/zulip,littledogboy/zulip,ashwinirudrappa/zulip,Vallher/zulip,JanzTam/zulip,amallia/zulip,johnny9/zulip,guiquanz/zulip,krtkmj/zulip,tiansiyuan/zulip,schatt/zulip,Qgap/zulip,firstblade/zulip,eeshangarg/zulip,Batterfii/zulip,kou/zulip,PhilSk/zulip,hafeez3000/zulip,ApsOps/zulip,gigawhitlocks/zulip,glovebx/zulip,zwily/zulip,amyliu345/zulip,johnnygaddarr/zulip,TigorC/zulip,aliceriot/zulip,j831/zulip,mdavid/zulip,swinghu/zulip,shubhamdhama/zulip,dattatreya303/zulip,esander91/zulip,vabs22/zulip,RobotCaleb/zulip,huangkebo/zulip,itnihao/zulip,timabbott/zulip,KJin99/zulip,jimmy54/zulip,Diptanshu8/zulip,zulip/zulip,xuxiao/zulip,zhaoweigg/zulip,proliming/zulip,jphilipsen05/zulip,joyhchen/zulip,blaze225/zulip,codeKonami/zulip,ericzhou2008/zulip,MayB/zulip,gkotian/zulip,littledogboy/zulip,mahim97/zulip,LAndreas/zulip,Suninus/zulip,pradiptad/zulip,alliejones/zulip,praveenaki/zulip,paxapy/zulip,vaidap/zulip,tiansiyuan/zulip,johnnygaddarr/zulip,atomic-labs/zulip,xuxiao/zulip,codeKonami/zulip,gkotian/zulip,vikas-parashar/zulip,mohsenSy/zulip,zacps/zulip,babbage/zulip,suxinde2009/zulip,ufosky-server/zulip,akuseru/zulip,vikas-parashar/zulip,amyliu345/zulip,Diptanshu8/zulip,wavelets/zulip,niftynei/zulip,DazWorrall/zulip,armooo/zulip,suxinde2009/zulip,tbutter/zulip,esander91/zulip,arpitpanwar/zulip,swinghu/zulip,punchagan/zulip,qq1012803704/zulip,souravbadami/zulip,krtkmj/zulip,mansilladev/zulip,deer-hope/zulip,zhaoweigg/zulip,MayB/zulip,amyliu345/zulip,moria/zulip,synicalsyntax/zulip,isht3/zulip,adnanh/zulip,TigorC/zulip,developerfm/zulip,Qgap/zulip,calvinleenyc/zulip,MayB/zulip,rishig/zulip,Juanvulcano/zulip,gkotian/zulip,dotcool/zulip,mahim97/zulip,arpith/zulip,vabs22/zulip,zachallaun/zulip,wweiradio/zulip,ikasumiwt/zulip,rht/zulip,mansilladev/zulip,JanzTam/zulip,pradiptad/zulip,Cheppers/zulip,DazWorrall/zulip,aakash-cr7/zulip,cosmicAsymmetry/zulip,amallia/zulip,dhcrzf/zulip,eastlhu/zulip,developerfm/zulip,souravbadami/zulip,udxxabp/zulip,guiquanz/zulip,souravbadami/zulip,KingxBanana/zulip,praveenaki/zulip,jimmy54/zulip,Drooids/zulip,deer-hope/zulip,isht3/zulip,samatdav/zulip,moria/zulip,gigawhitlocks/zulip,hj3938/zulip,kokoar/zulip,wangdeshui/zulip,hayderimran7/zulip,aliceriot/zulip,ufosky-server/zulip,mahim97/zulip,Jianchun1/zulip,Cheppers/zulip,amanharitsh123/zulip,MayB/zulip,rht/zulip,joshisa/zulip,AZtheAsian/zulip,hustlzp/zulip,gkotian/zulip,saitodisse/zulip,MariaFaBella85/zulip,eeshangarg/zulip,vikas-parashar/zulip,mohsenSy/zulip,isht3/zulip,ryansnowboarder/zulip,adnanh/zulip,sharmaeklavya2/zulip,moria/zulip,punchagan/zulip,dxq-git/zulip,bastianh/zulip,babbage/zulip,timabbott/zulip,udxxabp/zulip,shubhamdhama/zulip,brockwhittaker/zulip,sup95/zulip,yocome/zulip,AZtheAsian/zulip,hafeez3000/zulip,PaulPetring/zulip,udxxabp/zulip,LAndreas/zulip,developerfm/zulip,alliejones/zulip,bastianh/zulip,andersk/zulip,seapasulli/zulip,joyhchen/zulip,akuseru/zulip,Gabriel0402/zulip,moria/zulip,ufosky-server/zulip,tiansiyuan/zulip,dwrpayne/zulip,guiquanz/zulip,eeshangarg/zulip,suxinde2009/zulip,LAndreas/zulip,synicalsyntax/zulip,wangdeshui/zulip,shubhamdhama/zulip,LAndreas/zulip,tdr130/zulip,yocome/zulip,zwily/zulip,MariaFaBella85/zulip,dhcrzf/zulip,xuanhan863/zulip,nicholasbs/zulip,sharmaeklavya2/zulip,luyifan/zulip,suxinde2009/zulip,rishig/zulip,glovebx/zulip,susansls/zulip,andersk/zulip,hustlzp/zulip,proliming/zulip,yuvipanda/zulip,seapasulli/zulip,Cheppers/zulip,verma-varsha/zulip,PaulPetring/zulip,EasonYi/zulip,dwrpayne/zulip,proliming/zulip,ApsOps/zulip,dotcool/zulip,umkay/zulip,AZtheAsian/zulip,aliceriot/zulip,dnmfarrell/zulip,brainwane/zulip,krtkmj/zulip,dnmfarrell/zulip,Galexrt/zulip,jessedhillon/zulip,verma-varsha/zulip,fw1121/zulip,blaze225/zulip,seapasulli/zulip,j831/zulip,he15his/zulip,yocome/zulip,hackerkid/zulip,levixie/zulip,wavelets/zulip,adnanh/zulip,amanharitsh123/zulip,hackerkid/zulip,adnanh/zulip,he15his/zulip,kokoar/zulip,ericzhou2008/zulip,peiwei/zulip,shrikrishnaholla/zulip,mdavid/zulip,rht/zulip,swinghu/zulip | zerver/filters.py | zerver/filters.py | from __future__ import absolute_import
from django.views.debug import SafeExceptionReporterFilter
from django.http import build_request_repr
class ZulipExceptionReporterFilter(SafeExceptionReporterFilter):
def get_post_parameters(self, request):
filtered_post = SafeExceptionReporterFilter.get_post_parameters(self, request).copy()
filtered_vars = ['content', 'secret', 'password', 'key', 'api-key', 'subject', 'stream',
'subscriptions', 'to', 'csrfmiddlewaretoken', 'api_key']
for var in filtered_vars:
if var in filtered_post:
filtered_post[var] = '**********'
return filtered_post
def get_request_repr(self, request):
if request is None:
return repr(None)
else:
return build_request_repr(request,
POST_override=self.get_post_parameters(request),
COOKIES_override="**********",
META_override="**********")
| from __future__ import absolute_import
from django.views.debug import SafeExceptionReporterFilter
from django.http import build_request_repr
class ZulipExceptionReporterFilter(SafeExceptionReporterFilter):
def get_post_parameters(self, request):
filtered_post = SafeExceptionReporterFilter.get_post_parameters(self, request).copy()
filtered_vars = ['content', 'secret', 'password', 'key', 'api-key', 'subject', 'stream',
'subscriptions', 'to', 'csrfmiddlewaretoken']
for var in filtered_vars:
if var in filtered_post:
filtered_post[var] = '**********'
return filtered_post
def get_request_repr(self, request):
if request is None:
return repr(None)
else:
return build_request_repr(request,
POST_override=self.get_post_parameters(request),
COOKIES_override="**********",
META_override="**********")
| apache-2.0 | Python |
9d442b1b45245e5c6d43ea8ea0bed98dc055e04e | Update version | architecture-building-systems/CityEnergyAnalyst,architecture-building-systems/CityEnergyAnalyst,architecture-building-systems/CityEnergyAnalyst | cea/__init__.py | cea/__init__.py | __version__ = "2.30.0"
class ConfigError(Exception):
"""Raised when the configuration of a tool contains some invalid values."""
rc = 100 # sys.exit(rc)
class CustomDatabaseNotFound(Exception):
"""Raised when the InputLocator can't find a user-provided database (region=='custom')"""
rc = 101 # sys.exit(rc)
class ScriptNotFoundException(Exception):
"""Raised when an invalid script name is used."""
rc = 102 # sys.exit(rc)
class MissingInputDataException(Exception):
"""Raised when a script can't run because some information is missing"""
rc = 103
class InvalidOccupancyNameException(Exception):
"""Raised when the occupancy.dbf has an invalid / unknown occupancy column"""
rc = 104
def suppres_3rd_party_debug_loggers():
"""set logging level to WARN for fiona and shapely and others"""
import logging
loggers_to_silence = ["shapely", "Fiona", "fiona", "matplotlib", "urllib3.connectionpool"]
for log_name in loggers_to_silence:
log = logging.getLogger(log_name)
log.setLevel(logging.ERROR)
| __version__ = "2.29.0"
class ConfigError(Exception):
"""Raised when the configuration of a tool contains some invalid values."""
rc = 100 # sys.exit(rc)
class CustomDatabaseNotFound(Exception):
"""Raised when the InputLocator can't find a user-provided database (region=='custom')"""
rc = 101 # sys.exit(rc)
class ScriptNotFoundException(Exception):
"""Raised when an invalid script name is used."""
rc = 102 # sys.exit(rc)
class MissingInputDataException(Exception):
"""Raised when a script can't run because some information is missing"""
rc = 103
class InvalidOccupancyNameException(Exception):
"""Raised when the occupancy.dbf has an invalid / unknown occupancy column"""
rc = 104
def suppres_3rd_party_debug_loggers():
"""set logging level to WARN for fiona and shapely and others"""
import logging
loggers_to_silence = ["shapely", "Fiona", "fiona", "matplotlib", "urllib3.connectionpool"]
for log_name in loggers_to_silence:
log = logging.getLogger(log_name)
log.setLevel(logging.ERROR)
| mit | Python |
3090d65dd66acc0acdb5cccee82d2c6abc81eac4 | Remove config option ckan.ab_scheming.deployment | abgov/ckanext-workflow,abgov/ckanext-workflow,abgov/ckanext-workflow,abgov/ckanext-workflow | ckanext/workflow/logic/validation.py | ckanext/workflow/logic/validation.py | from ckan.plugins.toolkit import Invalid, _
from ckan.lib.navl.dictization_functions import unflatten
from ckan.plugins import toolkit
import ckan.logic as logic
import ckan.lib.base as base
import re
import ckanext.workflow.helpers as helpers
import pylons.config as config
NotFound = logic.NotFound
abort = base.abort
def scheming_required(key, flattened_data, errors, context):
"""
This validator is the standard validator for fields in
helpers.get_required_fields_name(). There is no need to use
scheming_validator
"""
data_dict = unflatten(flattened_data)
if helpers.has_process_state_field_in_schema(data_dict['type']):
if data_dict['process_state'] in helpers.get_process_state_list_not_allow_incomplete(data_dict['type']):
if key[0] in helpers.get_required_fields_name(data_dict['type']):
if not data_dict[key[0]] or data_dict[key[0]] == '[]':
raise Invalid(_('Missing value'))
def resource_required(key, flattened_data, errors, context):
""" check resources. If empty, raise error """
data_dict = unflatten(flattened_data)
if not data_dict.get("id"):
# if there is no package id, it is in creation mode
return
try:
pkg_obj = toolkit.get_action("package_show")(data_dict={"id": data_dict['id']})
except NotFound:
abort(404, _('The dataset {id} could not be found.'
).format(id=data_dict['id']))
else:
if data_dict['process_state'] in helpers.get_process_state_list_not_allow_incomplete(data_dict['type']):
if not pkg_obj.get("resources") and not re.search('new_resource', toolkit.request.url):
# we still allow adding resources in Submitted mode
raise Invalid(_("At least one resource must be set up."))
| from ckan.plugins.toolkit import Invalid, _
from ckan.lib.navl.dictization_functions import unflatten
from ckan.plugins import toolkit
import ckan.logic as logic
import ckan.lib.base as base
import re
import ckanext.workflow.helpers as helpers
import pylons.config as config
NotFound = logic.NotFound
abort = base.abort
def scheming_required(key, flattened_data, errors, context):
"""
This validator is the standard validator for fields in
helpers.get_required_fields_name(). There is no need to use
scheming_validator
"""
data_dict = unflatten(flattened_data)
if helpers.has_process_state_field_in_schema(data_dict['type']):
if data_dict['process_state'] in helpers.get_process_state_list_not_allow_incomplete(data_dict['type']):
if key[0] in helpers.get_required_fields_name(data_dict['type']):
if not data_dict[key[0]] or data_dict[key[0]] == '[]':
if not config.get('ckan.ab_scheming.deployment', False):
raise Invalid(_('Missing value'))
def resource_required(key, flattened_data, errors, context):
""" check resources. If empty, raise error """
data_dict = unflatten(flattened_data)
if not data_dict.get("id"):
# if there is no package id, it is in creation mode
return
try:
pkg_obj = toolkit.get_action("package_show")(data_dict={"id": data_dict['id']})
except NotFound:
abort(404, _('The dataset {id} could not be found.'
).format(id=data_dict['id']))
else:
if data_dict['process_state'] in helpers.get_process_state_list_not_allow_incomplete(data_dict['type']):
if not pkg_obj.get("resources") and not re.search('new_resource', toolkit.request.url):
# we still allow adding resources in Submitted mode
raise Invalid(_("At least one resource must be set up."))
| agpl-3.0 | Python |
2466a5b0c7f278c9c11669aa686ca7e3ec4e774a | Fix emails | ocwc/ocwc-members,ocwc/ocwc-members,ocwc/ocwc-members,ocwc/ocwc-members | members/crm/management/commands/mailing-emails.py | members/crm/management/commands/mailing-emails.py | # -*- coding: utf-8 -*-
from django.core.management.base import BaseCommand
from crm.models import Contact
class Command(BaseCommand):
help = "generates a list of emails with Lead Contact, Certifier and Voting representatives"
def handle(self, *args, **options):
address_list = []
for contact in Contact.objects.filter(organization__membership_status__in=(2, 3, 5, 7, 99), bouncing=False).exclude(contact_type=13):
# for contact in Contact.objects.filter(contact_type__in=(4, 6, 9, 10), organization__membership_status__in=(6,), bouncing=False):
if contact.email and contact.email not in address_list:
address_list.append(contact.email)
for email in address_list:
print "%s" % email
| # -*- coding: utf-8 -*-
from django.core.management.base import BaseCommand
from crm.models import Contact
class Command(BaseCommand):
help = "generates a list of emails with Lead Contact, Certifier and Voting representatives"
def handle(self, *args, **options):
address_list = []
for contact in Contact.objects.filter(contact_type__in=(6, 9, 10), organization__membership_status__in=(2, 3, 5, 7), bouncing=False):
# for contact in Contact.objects.filter(contact_type__in=(4, 6, 9, 10), organization__membership_status__in=(6,), bouncing=False):
if contact.email and contact.email not in address_list:
address_list.append(contact.email)
for email in address_list:
print "%s," % email
| mit | Python |
14f3f3659bc727ef1ac46e4ec4dcaba58e5922c4 | Fix for Python 3 in conll dataset loading | keras-team/keras-contrib,farizrahman4u/keras-contrib,keras-team/keras-contrib,keras-team/keras-contrib | keras_contrib/datasets/conll2000.py | keras_contrib/datasets/conll2000.py | from __future__ import print_function
import numpy
from keras.utils.data_utils import get_file
from zipfile import ZipFile
from collections import Counter
from keras.preprocessing.sequence import pad_sequences
from keras.datasets import cifar10
def load_data(path='conll2000.zip', min_freq=2):
path = get_file(path, origin='https://raw.githubusercontent.com/nltk/nltk_data/gh-pages/packages/corpora/conll2000.zip')
print(path)
archive = ZipFile(path, 'r')
train = _parse_data(archive.open('conll2000/train.txt'))
test = _parse_data(archive.open('conll2000/test.txt'))
archive.close()
word_counts = Counter(row[0].lower() for sample in train for row in sample)
vocab = ['<pad>', '<unk>'] + [w for w, f in iter(word_counts.items()) if f >= min_freq]
pos_tags = sorted(list(set(row[1] for sample in train + test for row in sample))) # in alphabetic order
chunk_tags = sorted(list(set(row[2] for sample in train + test for row in sample))) # in alphabetic order
train = _process_data(train, vocab, pos_tags, chunk_tags)
test = _process_data(test, vocab, pos_tags, chunk_tags)
return train, test, (vocab, pos_tags, chunk_tags)
def _parse_data(fh):
string = fh.read()
data = [[row.split() for row in sample.split('\n')] for sample in string.decode().strip().split('\n\n')]
fh.close()
return data
def _process_data(data, vocab, pos_tags, chunk_tags, maxlen=None, onehot=False):
if maxlen is None:
maxlen = max(len(s) for s in data)
word2idx = dict((w, i) for i, w in enumerate(vocab))
x = [[word2idx.get(w[0].lower(), 1) for w in s] for s in data] # set to <unk> (index 1) if not in vocab
y_pos = [[pos_tags.index(w[1]) for w in s] for s in data]
y_chunk = [[chunk_tags.index(w[2]) for w in s] for s in data]
x = pad_sequences(x, maxlen) # left padding
y_pos = pad_sequences(y_pos, maxlen, value=-1) # lef padded with -1. Indeed, any interger works as it will be masked
y_chunk = pad_sequences(y_chunk, maxlen, value=-1)
if onehot:
y_pos = numpy.eye(len(pos_tags), dtype='float32')[y]
y_chunk = numpy.eye(len(chunk_tags), dtype='float32')[y]
else:
y_pos = numpy.expand_dims(y_pos, 2)
y_chunk = numpy.expand_dims(y_chunk, 2)
return x, y_pos, y_chunk
| from __future__ import print_function
import numpy
from keras.utils.data_utils import get_file
from zipfile import ZipFile
from collections import Counter
from keras.preprocessing.sequence import pad_sequences
from keras.datasets import cifar10
def load_data(path='conll2000.zip', min_freq=2):
path = get_file(path, origin='https://raw.githubusercontent.com/nltk/nltk_data/gh-pages/packages/corpora/conll2000.zip')
print(path)
archive = ZipFile(path, 'r')
train = _parse_data(archive.open('conll2000/train.txt'))
test = _parse_data(archive.open('conll2000/test.txt'))
archive.close()
word_counts = Counter(row[0].lower() for sample in train for row in sample)
vocab = ['<pad>', '<unk>'] + [w for w, f in word_counts.iteritems() if f >= min_freq]
pos_tags = sorted(list(set(row[1] for sample in train + test for row in sample))) # in alphabetic order
chunk_tags = sorted(list(set(row[2] for sample in train + test for row in sample))) # in alphabetic order
train = _process_data(train, vocab, pos_tags, chunk_tags)
test = _process_data(test, vocab, pos_tags, chunk_tags)
return train, test, (vocab, pos_tags, chunk_tags)
def _parse_data(fh):
string = fh.read()
data = [[row.split() for row in sample.split('\n')] for sample in string.strip().split('\n\n')]
fh.close()
return data
def _process_data(data, vocab, pos_tags, chunk_tags, maxlen=None, onehot=False):
if maxlen is None:
maxlen = max(len(s) for s in data)
word2idx = dict((w, i) for i, w in enumerate(vocab))
x = [[word2idx.get(w[0].lower(), 1) for w in s] for s in data] # set to <unk> (index 1) if not in vocab
y_pos = [[pos_tags.index(w[1]) for w in s] for s in data]
y_chunk = [[chunk_tags.index(w[2]) for w in s] for s in data]
x = pad_sequences(x, maxlen) # left padding
y_pos = pad_sequences(y_pos, maxlen, value=-1) # lef padded with -1. Indeed, any interger works as it will be masked
y_chunk = pad_sequences(y_chunk, maxlen, value=-1)
if onehot:
y_pos = numpy.eye(len(pos_tags), dtype='float32')[y]
y_chunk = numpy.eye(len(chunk_tags), dtype='float32')[y]
else:
y_pos = numpy.expand_dims(y_pos, 2)
y_chunk = numpy.expand_dims(y_chunk, 2)
return x, y_pos, y_chunk
| mit | Python |
88d3600064fa5461ea1aeb818349e8b7ab910283 | Fix a varname typo | gfxprim/gfxprim,gfxprim/gfxprim,gfxprim/gfxprim,gfxprim/gfxprim,gfxprim/gfxprim | pylib/gfxprim/render_utils.py | pylib/gfxprim/render_utils.py | #
# gfxprim.render_utils
#
import jinja2
import logging as log
import os
import time
import re
def template_error(s, *args):
raise Exception(s, *args)
def create_environment(config, template_dir):
env = jinja2.Environment(
line_statement_prefix = "%%",
line_comment_prefix = "##",
undefined = jinja2.StrictUndefined,
loader = jinja2.FileSystemLoader(template_dir))
env.globals['undefined'] = jinja2.StrictUndefined()
env.globals['pixelsizes'] = config.pixelsizes
env.globals['pixeltypes'] = config.pixeltypes
env.globals['pixeltypes_dict'] = config.pixeltypes_dict
env.globals['config'] = config
from gfxprim.pixelsize import LE, BE
env.globals['LE'] = LE
env.globals['BE'] = BE
env.globals['len'] = len
env.globals['error'] = template_error
env.globals['hex'] = lambda(x): hex(x).rstrip('L')
return env
def render_file(env, source, result):
source_file = open(source)
try:
source_text = source_file.read()
finally:
source_file.close()
# Hack to preserve empty lines before %% line_statement
source_text = re.sub("\n\n[ \t]*%%", "\n{{''}}\n%%", source_text)
tmpl = env.from_string(source_text)
tmpl.filename = source
result_text = tmpl.render(
date = time.ctime(),
target = result,
template = source,
header_guard = \
os.path.split(result)[1].upper().replace('.', '_').replace('-', '_'),
)
result_file = open(result, "w")
try:
result_file.write(result_text)
finally:
result_file.close()
def load_gfxprimconfig(config_file = None):
"""Initialize GfxPrimConfig from a given or guessed config file.
Looks for the file by parameter, in env['PIXELTYPE_DEFS'] and
in dir(__file__)/../../gfxprim_config.py, in that order.
Returns GfxPrimConfig or None on error
"""
if not config_file:
config_file = os.environ.get("PIXELTYPE_DEFS", None)
if not config_file:
path = os.path.dirname(os.path.abspath(__file__))
config_file = os.path.abspath(
os.path.join(path, "..", "..", "gfxprim_config.py"))
if not os.path.exists(config_file):
log.error("WARNING: GfxPrimConfig file %s not found!\n",
config_file)
return None
from gfxprim.pixeltype import PixelType
from gfxprim.pixelsize import PixelSize, LE, BE
from gfxprim.gfxprimconfig import GfxPrimConfig
l = {"PixelType": PixelType,
"PixelSize": PixelSize,
"LE": LE,
"BE": BE,
"GfxPrimConfig": GfxPrimConfig
}
execfile(config_file, globals(), l)
config = l["config"]
return config
| #
# gfxprim.render_utils
#
import jinja2
import logging as log
import os
import time
import re
def template_error(s, *args):
raise Exception(s, *args)
def create_environment(config, template_dir):
env = jinja2.Environment(
line_statement_prefix = "%%",
line_comment_prefix = "##",
undefined = jinja2.StrictUndefined,
loader = jinja2.FileSystemLoader(template_dir))
env.globals['undefined'] = jinja2.StrictUndefined()
env.globals['pixelsizes'] = config.pixelsizes
env.globals['pixeltypes'] = config.pixeltypes
env.globals['pixeltypes_dict'] = config.pixeltypes_dict
env.globals['config'] = config
from gfxprim.pixelsize import LE, BE
env.globals['LE'] = LE
env.globals['BE'] = BE
env.globals['len'] = len
env.globals['error'] = template_error
env.globals['hex'] = lambda(x): hex(x).rstrip('L')
return env
def render_file(env, source, result):
source_file = open(source)
try:
source_text = source_file.read()
finally:
source_text.close()
# Hack to preserve empty lines before %% line_statement
source_text = re.sub("\n\n[ \t]*%%", "\n{{''}}\n%%", source_text)
tmpl = env.from_string(source_text)
tmpl.filename = source
result_text = tmpl.render(
date = time.ctime(),
target = result,
template = source,
header_guard = \
os.path.split(result)[1].upper().replace('.', '_').replace('-', '_'),
)
result_file = open(result, "w")
try:
result_file.write(result_text)
finally:
resulf_file.close()
def load_gfxprimconfig(config_file = None):
"""Initialize GfxPrimConfig from a given or guessed config file.
Looks for the file by parameter, in env['PIXELTYPE_DEFS'] and
in dir(__file__)/../../gfxprim_config.py, in that order.
Returns GfxPrimConfig or None on error
"""
if not config_file:
config_file = os.environ.get("PIXELTYPE_DEFS", None)
if not config_file:
path = os.path.dirname(os.path.abspath(__file__))
config_file = os.path.abspath(
os.path.join(path, "..", "..", "gfxprim_config.py"))
if not os.path.exists(config_file):
log.error("WARNING: GfxPrimConfig file %s not found!\n",
config_file)
return None
from gfxprim.pixeltype import PixelType
from gfxprim.pixelsize import PixelSize, LE, BE
from gfxprim.gfxprimconfig import GfxPrimConfig
l = {"PixelType": PixelType,
"PixelSize": PixelSize,
"LE": LE,
"BE": BE,
"GfxPrimConfig": GfxPrimConfig
}
execfile(config_file, globals(), l)
config = l["config"]
return config
| lgpl-2.1 | Python |
bf79a85242686db6b832d767897a39c74bf480f8 | Add test for #229 | lzedl/PyMySQL,nju520/PyMySQL,xjzhou/PyMySQL,boneyao/PyMySQL,xjzhou/PyMySQL,modulexcite/PyMySQL,pulsar314/Tornado-MySQL,jheld/PyMySQL,anson-tang/PyMySQL,jwjohns/PyMySQL,Ting-y/PyMySQL,PyMySQL/Tornado-MySQL,lzedl/PyMySQL,yeyinzhu3211/PyMySQL,aio-libs/aiomysql,NunoEdgarGub1/PyMySQL,pymysql/pymysql,methane/PyMySQL,mosquito/Tornado-MySQL,eibanez/PyMySQL,PyMySQL/PyMySQL,wraziens/PyMySQL,wraziens/PyMySQL,Geoion/Tornado-MySQL,DashaChuk/PyMySQL,yeyinzhu3211/PyMySQL,eibanez/PyMySQL,MartinThoma/PyMySQL | pymysql/tests/test_nextset.py | pymysql/tests/test_nextset.py | from pymysql.tests import base
from pymysql import util
try:
import unittest2 as unittest
except ImportError:
import unittest
class TestNextset(base.PyMySQLTestCase):
def setUp(self):
super(TestNextset, self).setUp()
self.con = self.connections[0]
def test_nextset(self):
cur = self.con.cursor()
cur.execute("SELECT 1; SELECT 2;")
self.assertEqual([(1,)], list(cur))
r = cur.nextset()
self.assertTrue(r)
self.assertEqual([(2,)], list(cur))
self.assertIsNone(cur.nextset())
def test_skip_nextset(self):
cur = self.con.cursor()
cur.execute("SELECT 1; SELECT 2;")
self.assertEqual([(1,)], list(cur))
cur.execute("SELECT 42")
self.assertEqual([(42,)], list(cur))
def test_ok_and_next(self):
cur = self.con.cursor()
cur.execute("SELECT 1; commit; SELECT 2;")
self.assertEqual([(1,)], list(cur))
self.assertTrue(cur.nextset())
self.assertTrue(cur.nextset())
self.assertEqual([(2,)], list(cur))
self.assertFalse(bool(cur.nextset()))
@unittest.expectedFailure
def test_multi_cursor(self):
cur1 = self.con.cursor()
cur2 = self.con.cursor()
cur1.execute("SELECT 1; SELECT 2;")
cur2.execute("SELECT 42")
self.assertEqual([(1,)], list(cur1))
self.assertEqual([(42,)], list(cur2))
r = cur1.nextset()
self.assertTrue(r)
self.assertEqual([(2,)], list(cur1))
self.assertIsNone(cur1.nextset())
#TODO: How about SSCursor and nextset?
# It's very hard to implement correctly...
| from pymysql.tests import base
from pymysql import util
try:
import unittest2 as unittest
except ImportError:
import unittest
class TestNextset(base.PyMySQLTestCase):
def setUp(self):
super(TestNextset, self).setUp()
self.con = self.connections[0]
def test_nextset(self):
cur = self.con.cursor()
cur.execute("SELECT 1; SELECT 2;")
self.assertEqual([(1,)], list(cur))
r = cur.nextset()
self.assertTrue(r)
self.assertEqual([(2,)], list(cur))
self.assertIsNone(cur.nextset())
def test_skip_nextset(self):
cur = self.con.cursor()
cur.execute("SELECT 1; SELECT 2;")
self.assertEqual([(1,)], list(cur))
cur.execute("SELECT 42")
self.assertEqual([(42,)], list(cur))
@unittest.expectedFailure
def test_multi_cursor(self):
cur1 = self.con.cursor()
cur2 = self.con.cursor()
cur1.execute("SELECT 1; SELECT 2;")
cur2.execute("SELECT 42")
self.assertEqual([(1,)], list(cur1))
self.assertEqual([(42,)], list(cur2))
r = cur1.nextset()
self.assertTrue(r)
self.assertEqual([(2,)], list(cur1))
self.assertIsNone(cur1.nextset())
#TODO: How about SSCursor and nextset?
# It's very hard to implement correctly...
| mit | Python |
e07eff5c6dfd11517bc6d62157fd9b4ddb527a72 | Add logo to shell startup | oliverhuangchao/thunder,poolio/thunder,j-friedrich/thunder,kcompher/thunder,broxtronix/thunder,zhwa/thunder,poolio/thunder,kcompher/thunder,j-friedrich/thunder,jwittenbach/thunder,oliverhuangchao/thunder,thunder-project/thunder,kunallillaney/thunder,broxtronix/thunder,kunallillaney/thunder,mikarubi/thunder,zhwa/thunder,mikarubi/thunder,pearsonlab/thunder,pearsonlab/thunder | python/thunder/utils/shell.py | python/thunder/utils/shell.py | import thunder
from thunder.utils.context import ThunderContext
from termcolor import colored
tsc = ThunderContext(sc)
print('')
print(colored(' IIIII ', 'yellow'))
print(colored(' IIIII ', 'yellow'))
print(colored(' IIIIIIIIIIIIIIIIII ', 'yellow'))
print(colored(' IIIIIIIIIIIIIIIII ', 'yellow'))
print(colored(' IIIII ', 'yellow'))
print(colored(' IIIII ', 'yellow'))
print(colored(' IIIII ', 'yellow') + 'Thunder')
print(colored(' IIIIIIIII ', 'yellow') + 'version ' + thunder.__version__)
print(colored(' IIIIIII ', 'yellow'))
print('')
print('A Thunder context is available as tsc') | import thunder
from thunder.utils.context import ThunderContext
tsc = ThunderContext(sc)
print('\n')
print('Running thunder version ' + thunder.__version__)
print('A thunder context is available as tsc') | apache-2.0 | Python |
a76ecb81853dcd277999e810a51f3a04a7d75b3a | Print kernelspec | stuertz/staged-recipes,igortg/staged-recipes,hadim/staged-recipes,stuertz/staged-recipes,asmeurer/staged-recipes,kwilcox/staged-recipes,Juanlu001/staged-recipes,isuruf/staged-recipes,igortg/staged-recipes,petrushy/staged-recipes,Juanlu001/staged-recipes,isuruf/staged-recipes,scopatz/staged-recipes,ocefpaf/staged-recipes,scopatz/staged-recipes,jochym/staged-recipes,jochym/staged-recipes,johanneskoester/staged-recipes,patricksnape/staged-recipes,mcs07/staged-recipes,synapticarbors/staged-recipes,jakirkham/staged-recipes,patricksnape/staged-recipes,kwilcox/staged-recipes,ocefpaf/staged-recipes,ReimarBauer/staged-recipes,mariusvniekerk/staged-recipes,conda-forge/staged-recipes,jakirkham/staged-recipes,goanpeca/staged-recipes,birdsarah/staged-recipes,conda-forge/staged-recipes,petrushy/staged-recipes,johanneskoester/staged-recipes,hadim/staged-recipes,mcs07/staged-recipes,birdsarah/staged-recipes,goanpeca/staged-recipes,ReimarBauer/staged-recipes,dschreij/staged-recipes,chrisburr/staged-recipes,synapticarbors/staged-recipes,dschreij/staged-recipes,SylvainCorlay/staged-recipes,mariusvniekerk/staged-recipes,asmeurer/staged-recipes,chrisburr/staged-recipes,SylvainCorlay/staged-recipes | recipes/sos-julia/run_test.py | recipes/sos-julia/run_test.py | import unittest
import sys
from sos_notebook.test_utils import sos_kernel
from ipykernel.tests.utils import execute, wait_for_idle, assemble_output
import jupyter_client
try:
print(jupyter_client.kernelspec.get_kernel_spec('julia-1.0').to_dict())
except jupyter_client.kernelspec.NoSuchKernel:
print('julia-1.0 kernel was not installed')
print('The following kernels are installed:')
print('jupyter_client.kernelspec.find_kernel_specs()')
print(jupyter_client.kernelspec.find_kernel_specs())
class TestSoSKernel(unittest.TestCase):
def testKernel(self):
with sos_kernel() as kc:
execute(kc=kc, code='a = 1')
stdout, stderr = assemble_output(kc.iopub_channel)
self.assertEqual(stdout.strip(), '', f'Stdout is not empty, "{stdout}" received')
self.assertEqual(stderr.strip(), '', f'Stderr is not empty, "{stderr}" received')
execute(kc=kc, code='%use Julia\n%get a\nprint(a)')
stdout, stderr = assemble_output(kc.iopub_channel)
self.assertEqual(stderr.strip(), '', f'Stderr is not empty, "{stderr}" received')
self.assertEqual(stdout.strip(), '1', f'Stdout should be 1, "{stdout}" received')
if __name__ == '__main__':
unittest.main()
| import unittest
import sys
from sos_notebook.test_utils import sos_kernel
from ipykernel.tests.utils import execute, wait_for_idle, assemble_output
@unittest.skipIf(sys.platform == 'win32', 'julia does not exist on win32')
class TestSoSKernel(unittest.TestCase):
def testKernel(self):
with sos_kernel() as kc:
execute(kc=kc, code='a = 1')
stdout, stderr = assemble_output(kc.iopub_channel)
self.assertEqual(stdout.strip(), '', f'Stdout is not empty, "{stdout}" received')
self.assertEqual(stderr.strip(), '', f'Stderr is not empty, "{stderr}" received')
execute(kc=kc, code='%use Julia\n%get a\nprint(a)')
stdout, stderr = assemble_output(kc.iopub_channel)
self.assertEqual(stderr.strip(), '', f'Stderr is not empty, "{stderr}" received')
self.assertEqual(stdout.strip(), '1', f'Stdout should be 1, "{stdout}" received')
if __name__ == '__main__':
unittest.main()
| bsd-3-clause | Python |
b4ff065c8a58506de1b46cc5b58a19779c942f34 | Replace widget with panel | BrickText/BrickText | redactor/coloring/Coloring.py | redactor/coloring/Coloring.py | import re
from coloring.config_tags import config_tags
class Coloring:
def __init__(self, text_editor, language):
self.root = text_editor.get_root()
self.text_widget = text_editor.get_text_panel()
self.keywords = config_tags(self.text_widget, language)
self.pattern = r"\w+"
self.root.after(200, self.findall)
def coloring(self, indices):
for f, l in indices:
word = self.text_widget.get(f, l)
if word in self.keywords.keys():
self.text_widget.tag_remove('blank', f, l)
self.text_widget.tag_add(word, f, l)
else:
for k, _ in self.keywords.items():
self.text_widget.tag_remove(k, f, l)
self.text_widget.tag_add('blank', f, l)
def findall(self, start="1.0", end="end"):
start = self.text_widget.index(start)
end = self.text_widget.index(end)
string = self.text_widget.get(start, end)
indices = []
if string:
matches = re.finditer(self.pattern, string)
for match in matches:
match_start = self.text_widget.index("%s+%dc" %
(start, match.start()))
match_end = self.text_widget.index("%s+%dc" %
(start, match.end()))
indices.append((match_start, match_end))
self.coloring(indices)
self.root.after(200, self.findall)
| import re
from coloring.config_tags import config_tags
class Coloring:
def __init__(self, text_editor, language):
self.root = text_editor.get_root()
self.text_widget = text_editor.get_text_widget()
self.keywords = config_tags(self.text_widget, language)
self.pattern = r"\w+"
self.root.after(200, self.findall)
def coloring(self, indices):
for f, l in indices:
word = self.text_widget.get(f, l)
if word in self.keywords.keys():
self.text_widget.tag_remove('blank', f, l)
self.text_widget.tag_add(word, f, l)
else:
for k, _ in self.keywords.items():
self.text_widget.tag_remove(k, f, l)
self.text_widget.tag_add('blank', f, l)
def findall(self, start="1.0", end="end"):
start = self.text_widget.index(start)
end = self.text_widget.index(end)
string = self.text_widget.get(start, end)
indices = []
if string:
matches = re.finditer(self.pattern, string)
for match in matches:
match_start = self.text_widget.index("%s+%dc" %
(start, match.start()))
match_end = self.text_widget.index("%s+%dc" %
(start, match.end()))
indices.append((match_start, match_end))
self.coloring(indices)
self.root.after(200, self.findall)
| mit | Python |
baf76ad484cc4386a9919e2f7322c541ef2d46d9 | 更新 modules Groups API 中的 serializers.py, 新增函式功能宣告註解 | yrchen/CommonRepo,yrchen/CommonRepo,yrchen/CommonRepo,yrchen/CommonRepo | commonrepo/groups_api/serializers.py | commonrepo/groups_api/serializers.py | # -*- coding: utf-8 -*-
#
# Copyright 2016 edX PDR Lab, National Central University, Taiwan.
#
# http://edxpdrlab.ncu.cc/
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Created By: yrchen@ATCity.org
# Maintained By: yrchen@ATCity.org
#
"""
Serializer of group information in Common Repo project.
"""
from __future__ import absolute_import, unicode_literals
from rest_framework import serializers
from commonrepo.groups.models import Group
from commonrepo.users.models import User as User
class GroupSerializer(serializers.HyperlinkedModelSerializer):
"""
Serializer of group information. (API version 1)
"""
class Meta:
model = Group
fields = (
'url',
'id',
'name',
'creator',
'description',
'create_date',
'update_date',
'members',
)
class GroupSerializerV2(serializers.ModelSerializer):
"""
Serializer of group information. (API version 2)
"""
class Meta:
model = Group
fields = (
'url',
'id',
'name',
'creator',
'description',
'create_date',
'update_date',
'members',
)
| # -*- coding: utf-8 -*-
#
# Copyright 2016 edX PDR Lab, National Central University, Taiwan.
#
# http://edxpdrlab.ncu.cc/
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Created By: yrchen@ATCity.org
# Maintained By: yrchen@ATCity.org
#
from __future__ import absolute_import, unicode_literals
from rest_framework import serializers
from commonrepo.groups.models import Group
from commonrepo.users.models import User as User
class GroupSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Group
fields = (
'url',
'id',
'name',
'creator',
'description',
'create_date',
'update_date',
'members',
)
class GroupSerializerV2(serializers.ModelSerializer):
class Meta:
model = Group
fields = (
'url',
'id',
'name',
'creator',
'description',
'create_date',
'update_date',
'members',
)
| apache-2.0 | Python |
4544557cbd3f1099121744d1e030912a67f69e5c | Work on the hsb example | BrianGasberg/phue,BrianGasberg/phue,Jaiz909/phue | examples/tk_gui_hsb.py | examples/tk_gui_hsb.py | #!/usr/bin/python
from Tkinter import *
from phue import Bridge
'''
This example creates 3 sliders for the first 3 lights
and shows the name of the light under each slider.
There is also a checkbox to toggle the light.
'''
b = Bridge() # Enter bridge IP here.
#If running for the first time, press button on bridge and run with b.connect() uncommented
#b.connect()
root = Tk()
lights = b.get_light_objects('id')
light_selection = []
def curry(fn, *cargs, **ckwargs):
def call_fn(*fargs, **fkwargs):
d = ckwargs.copy()
d.update(fkwargs)
return fn(*(cargs + fargs), **d)
return call_fn
def hue_command(x):
if len(light_selection) > 0:
b.set_light(light_selection, 'hue', int(x))
def sat_command(x):
if len(light_selection) > 0:
b.set_light(light_selection, 'sat', int(x))
def bri_command(x):
if len(light_selection) > 0:
b.set_light(light_selection, 'bri', int(x))
def select_button_command(light, button_state):
global light_selection
if button_state.get():
light_selection.append(light)
else:
light_selection.remove(light)
print light_selection
slider_frame = Frame(root)
slider_frame.pack(pady = 10)
channels_frame = Frame(root)
channels_frame.pack()
label_frame = Frame(channels_frame)
label_frame.pack(side=LEFT, padx = 10)
label_state = Label(label_frame)
label_state.config(text = 'State')
label_state.pack()
label_select = Label(label_frame)
label_select.config(text = 'Select')
label_select.pack()
label_name = Label(label_frame)
label_name.config(text = 'Name')
label_name.pack()
hue_slider = Scale(slider_frame, from_ = 65535, to = 0, command = hue_command)
sat_slider = Scale(slider_frame, from_ = 254, to = 0, command = sat_command)
bri_slider = Scale(slider_frame, from_ = 254, to = 0, command = bri_command)
hue_slider.pack(side=LEFT)
sat_slider.pack(side=LEFT)
bri_slider.pack(side=LEFT)
for light_id in lights:
channel_frame = Frame(channels_frame)
channel_frame.pack(side = LEFT, padx = 10)
button_var = BooleanVar()
button_var.set(b.get_light(light_id, 'on'))
button_command = lambda button_var=button_var, light_id=light_id: b.set_light(light_id, 'on', button_var.get())
button = Checkbutton(channel_frame, variable = button_var, command = button_command)
button.pack()
select_button_var = BooleanVar()
#select_button_var.set(b.get_light(light_id, 'on'))
select_button_callback = curry(select_button_command, light_id, select_button_var)
select_button = Checkbutton(channel_frame, variable = select_button_var, command = select_button_callback)
select_button.pack()
label = Label(channel_frame)
label.config(text = b.get_light(light_id,'name'))
label.pack()
root.mainloop() | #!/usr/bin/python
from Tkinter import *
from phue import Bridge
'''
This example creates 3 sliders for the first 3 lights
and shows the name of the light under each slider.
There is also a checkbox to toggle the light.
'''
b = Bridge() # Enter bridge IP here.
#If running for the first time, press button on bridge and run with b.connect() uncommented
#b.connect()
root = Tk()
def hue_command(x):
print x
lights = b.get_light_objects('id')
slider_frame = Frame(root)
slider_frame.pack()
channels_frame = Frame(root)
channels_frame.pack()
label_frame = Frame(channels_frame)
label_frame.pack(side=LEFT)
label_name = Label(label_frame)
label_name.config(text = 'Name')
label_name.pack()
label_state = Label(label_frame)
label_state.config(text = 'State')
label_state.pack()
label_select = Label(label_frame)
label_select.config(text = 'Select')
label_select.pack()
hue_slider = Scale(slider_frame, from_ = 65535, to = 0, command = hue_command)
sat_slider = Scale(slider_frame, from_ = 254, to = 0, command = hue_command)
bri_slider = Scale(slider_frame, from_ = 254, to = 0, command = hue_command)
hue_slider.pack(side=LEFT)
sat_slider.pack(side=LEFT)
bri_slider.pack(side=LEFT)
for light_id in lights:
channel_frame = Frame(channels_frame)
channel_frame.pack(side = LEFT)
button_var = BooleanVar()
button_var.set(b.get_light(light_id, 'on'))
button_command = lambda button_var=button_var, light_id=light_id: b.set_light(light_id, 'on', button_var.get())
button = Checkbutton(channel_frame, variable = button_var, command = button_command)
button.pack()
button2_var = BooleanVar()
button2_var.set(b.get_light(light_id, 'on'))
button2_command = lambda button2_var=button2_var, light_id=light_id: b.set_light(light_id, 'on', button2_var.get())
button2 = Checkbutton(channel_frame, variable = button2_var, command = button2_command)
button2.pack()
label = Label(channel_frame)
label.config(text = b.get_light(light_id,'name'))
label.pack()
root.mainloop() | mit | Python |
ca6f55628c131360aadb9e33732af93306671718 | Test commit part 2. | naokiur/circle-ci-demo,naokiur/circle-ci-demo,naokiur/circle-ci-demo | test.py | test.py | print("Test", "added", "second")
class Test():
pass | print("Test", "added")
class Test():
pass | apache-2.0 | Python |
20556010d1ec4c5f6c3f412f7f64d9965586d75a | add args parser | evuez/mutations | test.py | test.py | import logging
from argparse import ArgumentParser
from random import random
from pyglet import app
from pyglet.window import Window
from pyglet.clock import schedule_interval
from pyglet.gl import glClearColor
from render import MapView
from mutations import Map
from mutations import Body
from mutations import EnergyBank
parser = ArgumentParser(description='Start a Mutations simulation.')
parser.add_argument('--width', dest='map_width', default=500, type=int)
parser.add_argument('--height', dest='map_height', default=500, type=int)
parser.add_argument('--banks', dest='banks', default=5, type=int)
parser.add_argument('--bodies', dest='bodies', default=200, type=int)
values = parser.parse_args()
logging.basicConfig(level=logging.INFO)
def test_view():
global map_width
map_ = Map(values.map_width, values.map_height)
for i in range(values.banks):
map_.add(EnergyBank(map_, random()))
for i in range(values.bodies):
map_.add(Body(map_, random()))
def update(dt):
map_.tick()
window = Window(map_.width, map_.height)
map_view = MapView(map_)
schedule_interval(update, 0.1)
@window.event
def on_draw():
glClearColor(.5, .6, .6, 1)
window.clear()
map_view.draw()
app.run()
if __name__ == '__main__':
test_view()
| import logging
from random import random
from pyglet import app
from pyglet.window import Window
from pyglet.clock import schedule_interval
from pyglet.gl import glClearColor
from render import MapView
from mutations import Map
from mutations import Body
from mutations import EnergyBank
logging.basicConfig(level=logging.INFO)
def test():
map_ = Map(1000, 1000)
for i in range(2):
map_.add(EnergyBank(map_))
for i in range(5):
map_.add(Body(map_))
for i in range(1000):
map_.tick()
def test_view():
map_ = Map(500, 500)
for i in range(10):
map_.add(EnergyBank(map_, random()))
for i in range(10):
map_.add(Body(map_, random()))
def update(dt):
map_.tick()
window = Window(map_.width, map_.height)
map_view = MapView(map_)
schedule_interval(update, 0.1)
@window.event
def on_draw():
glClearColor(.5, .6, .6, 1)
window.clear()
map_view.draw()
app.run()
if __name__ == '__main__':
test_view()
| mit | Python |
eb1bf148ea5990522d746a2acdb3e2d8347cc249 | Update tests to match new way of creating objects | UngaForskareStockholm/medlem2 | test.py | test.py | import lib.database
lib.database.db.connect(host="10.11.11.24", database="dev_medlem2", username="postgres")
import model.bylaw
import model.user
import model.address
#__builtins__.db=db
params=dict()
params['bylaw']='asd'
params['created_by']=0
m=model.bylaw.Bylaw.create(params)
print "asd", m['bylaw']
m['bylaw'] = "aoieusth8isuey4zhj8rifu4hkr"
print "aoieusth8isuey4zhj8rifu4hkr", m['bylaw']
u=model.user.User(0)
print "admin", u['name']
print "True", u.authenticate("admin")
print "None", u.set_password("asd")
print "False", u.authenticate("admin")
print "None", u.set_password("admin")
print "True", u.authenticate("admin")
params=dict()
params['email']=''
params['phone']=''
params['address_line1']=''
params['address_line2']=''
params['postal_code']=''
params['town']=''
params['created_by']=0
a=model.address.Address.create(params)
| import lib.database
lib.database.db.connect(host="10.11.11.24", database="dev_medlem2", username="postgres")
import model.bylaw
import model.user
import model.address
#__builtins__.db=db
params=dict()
params['bylaw']='asd'
m=model.bylaw.Bylaw.create(params, 0)
print m['bylaw']
m['bylaw'] = "aoieusth8isuey4zhj8rifu4hkr"
print m['bylaw']
u=model.user.User(0)
print u['name']
print u.authenticate("admin")
print u.set_password("asd")
print u.authenticate("admin")
print u.set_password("admin")
print u.authenticate("admin")
params=dict()
params['email']=''
params['phone']=''
params['address_line1']=''
params['address_line2']=''
params['postal_code']=''
params['town']=''
a=model.address.Address.create(params, 0)
| bsd-3-clause | Python |
f2a5cc71a144d96fc09ac4ffb65bdad40137fd3b | Update at 2017-07-23 21-07-25 | amoshyc/tthl-code | test.py | test.py | import json
from pathlib import Path
import numpy as np
import pandas as pd
import tensorflow as tf
from keras.backend.tensorflow_backend import set_session
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
set_session(tf.Session(config=config))
from keras.models import Sequential, Model
from keras.preprocessing import image
from keras.layers import *
from keras.optimizers import *
from data import *
from utils import get_callbacks
def main():
with tf.device('/gpu:3'):
model = Sequential()
model.add(TimeDistributed(BatchNormalization(), input_shape=(4, 224, 224, 3)))
model.add(TimeDistributed(Conv2D(5, kernel_size=5, strides=2, activation='relu')))
model.add(TimeDistributed(Conv2D(10, kernel_size=4, strides=2, activation='relu')))
model.add(TimeDistributed(Conv2D(15, kernel_size=3, strides=1, activation='relu')))
model.add(TimeDistributed(BatchNormalization()))
model.add(TimeDistributed(MaxPooling2D(pool_size=3)))
model.add(Conv3D(8, kernel_size=2, strides=1, activation='relu'))
model.add(Conv3D(4, kernel_size=2, strides=1, activation='relu'))
model.add(Flatten())
model.add(Dense(16))
model.add(Dropout(0.3))
model.add(Dense(1, activation='sigmoid'))
model_arg = {
'loss': 'binary_crossentropy',
'optimizer': 'sgd',
'metrics': ['binary_accuracy']
}
model.compile(**model_arg)
model.summary()
train = np.load('npz/window_train.npz')
x_train, y_train = train['xs'], train['ys']
val = np.load('npz/window_val.npz')
x_val, y_val = val['xs'], val['ys']
print(np.count_nonzero(y_train) / len(y_train))
print(np.count_nonzero(y_val) / len(y_val))
fit_arg = {
'x': x_train,
'y': y_train,
'batch_size': 250,
'epochs': 100,
'shuffle': True,
'validation_data': (x_val, y_val),
'callbacks': get_callbacks('conv3d'),
}
model.fit(**fit_arg)
if __name__ == '__main__':
main() | import json
from pathlib import Path
import numpy as np
import pandas as pd
import tensorflow as tf
from keras.backend.tensorflow_backend import set_session
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
set_session(tf.Session(config=config))
from keras.models import Sequential, Model
from keras.preprocessing import image
from keras.layers import *
from keras.optimizers import *
from data import *
from utils import get_callbacks
def main():
with tf.device('/gpu:3'):
model = Sequential()
model.add(TimeDistributed(BatchNormalization(), input_shape=(4, 224, 224, 3)))
model.add(TimeDistributed(Conv2D(5, kernel_size=5, strides=2, activation='relu')))
model.add(TimeDistributed(Conv2D(10, kernel_size=4, strides=2, activation='relu')))
model.add(TimeDistributed(Conv2D(15, kernel_size=3, strides=1, activation='relu')))
model.add(TimeDistributed(BatchNormalization()))
model.add(TimeDistributed(MaxPooling2D(pool_size=3)))
model.add(Conv3D(8, kernel_size=4, strides=1, activation='relu'))
model.add(BatchNormalization())
model.add(Flatten())
model.add(Dense(16))
model.add(Dropout(0.3))
model.add(Dense(1, activation='sigmoid'))
model_arg = {
'loss': 'binary_crossentropy',
'optimizer': 'sgd',
'metrics': ['binary_accuracy']
}
model.compile(**model_arg)
model.summary()
train = np.load('npz/window_train.npz')
x_train, y_train = train['xs'], train['ys']
val = np.load('npz/window_val.npz')
x_val, y_val = val['xs'], val['ys']
print(np.count_nonzero(y_train) / len(y_train))
print(np.count_nonzero(y_val) / len(y_val))
fit_arg = {
'x': x_train,
'y': y_train,
'batch_size': 250,
'epochs': 100,
'shuffle': True,
'validation_data': (x_val, y_val),
'callbacks': get_callbacks('conv3d'),
}
model.fit(**fit_arg)
if __name__ == '__main__':
main() | apache-2.0 | Python |
c7cf1dd458d2d11ef76a94742b9fcbc10c489820 | Fix tmap bug | jasontbradshaw/multivid,jasontbradshaw/multivid | tmap.py | tmap.py | import threading
import Queue as queue
def worker(function, work_queue, result_queue):
"""
Worker thread that consumes from and produces to two Queues. Each item
in the work queue is assumed to be a tuple of (index, item). When the
work is processed, it is put into the result queue with the same index
and item. This ensures that ordering is preserved when work is done
asynchronously if a PriorityQueue is used. If ordering isn't desired,
simply use normal queues with fake indexes. The work queue is assumed to be
full at the time the worker thread is started.
"""
# keep getting work until there's no more to be had
while 1:
try:
index, item = work_queue.get_nowait()
result_queue.put_nowait((index, function(item)))
work_queue.task_done()
except queue.Empty:
# stop working when all the work has been processed
return
except queue.Full:
# we should NEVER manage to do more work than was expected
assert False
def map(function, sequence, num_threads=2):
"""
Map a function onto a sequence in parallel. Blocks until results are
ready, and returns them in the order of the original sequence.
"""
work_queue = queue.Queue(len(sequence))
result_queue = queue.PriorityQueue(len(sequence))
# add all the original items to the work queue with their index
for index_item_tup in enumerate(sequence):
work_queue.put_nowait(index_item_tup)
assert work_queue.full()
# start the worker threads
threads = []
args = (function, work_queue, result_queue)
for i in xrange(num_threads):
thread = threading.Thread(target=worker, args=args)
threads.append(thread)
thread.start()
# wait until all threads have finished
for thread in threads:
thread.join()
# wait until all work has been processed
work_queue.join()
assert work_queue.empty()
assert result_queue.full()
# return the results in the original order from the result queue
results = []
while not result_queue.empty():
index, result = result_queue.get_nowait()
results.append(result)
assert result_queue.empty()
return results
| import threading
import Queue as queue
def worker(function, work_queue, result_queue):
"""
Worker thread that consumes from and produces to two Queues. Each item
in the work queue is assumed to be a tuple of (index, item). When the
work is processed, it is put into the result queue with the same index
and item. This ensures that ordering is preserved when work is done
asynchronously if a PriorityQueue is used. If ordering isn't desired,
simply use normal queues with fake indexes. The work queue is assumed to be
full at the time the worker thread is started.
"""
try:
index, item = work_queue.get_nowait()
result_queue.put_nowait((index, function(item)))
work_queue.task_done()
except queue.Empty:
# stop working when all the work has been processed
return
except queue.Full:
# we should NEVER manage to do more work than was expected
assert False
def map(function, sequence, num_threads=2):
"""
Map a function onto a sequence in parallel. Blocks until results are
ready, and returns them in the order of the original sequence.
"""
work_queue = queue.Queue(len(sequence))
result_queue = queue.PriorityQueue(len(sequence))
# add all the original items to the work queue with their index
for index_item_tup in enumerate(sequence):
work_queue.put_nowait(index_item_tup)
assert work_queue.full()
# start the worker threads
threads = []
args = (function, work_queue, result_queue)
for i in xrange(num_threads):
thread = threading.Thread(target=worker, args=args)
threads.append(thread)
thread.start()
# wait until all threads have finished
for thread in threads:
thread.join()
# wait until all work has been processed
work_queue.join()
assert work_queue.empty()
assert result_queue.full()
# return the results in the original order from the result queue
results = []
while not result_queue.empty():
index, result = result_queue.get_nowait()
results.append(result)
assert result_queue.empty()
return results
| mit | Python |
8be7205128eb96fd52dc922ff45aa5356a59d318 | Change paths in PML-XML tool | CS4098/GroupProject,CS4098/GroupProject,CS4098/GroupProject | src/main/translator-xml/PMLToXML.py | src/main/translator-xml/PMLToXML.py | #!/usr/bin/env/python
import sys
import os.path
import subprocess
# Read in a pml file and save to an xml file
def translate_pml_file(xml_file, pml_file):
pml_path = os.path.abspath(pml_file.name)
xml_path = os.path.abspath(xml_file.name)
# Call XML generator
# TODO: Remove abs-path
return_code = subprocess.call("/opt/pml-bnfc/xml/Pmlxml %s %s" % (xml_path, pml_path), shell=True)
if return_code != 0:
print "Error occured reading PML file, exiting."
sys.exit(1)
def main():
import argparse
parser = argparse.ArgumentParser(description="Program to output the ast of a PML program in XML format")
parser.add_argument('-x', '--xml', required=True, type=file, help="Output abstract syntax tree in XML format")
parser.add_argument('-p', '--pml', required=True, type=file, help="Input PML file")
try:
args = parser.parse_args()
translate_pml_file(args.xml, args.pml)
except IOError, msg:
parser.error(str(msg))
if __name__ == "__main__":
main()
| #!/usr/bin/env/python
import sys
import os.path
import subprocess
# Read in a pml file and save to an xml file
def translate_pml_file(xml_file, pml_file):
pml_path = os.path.abspath(pml_file.name)
xml_path = os.path.abspath(xml_file.name)
# Call XML generator
return_code = subprocess.call("Pmlxml %s %s" % (xml_path, pml_path), shell=True)
if return_code != 0:
print "Error occured reading PML file, exiting."
sys.exit(1)
def main():
import argparse
parser = argparse.ArgumentParser(description="Program to output the ast of a PML program in XML format")
parser.add_argument('-x', '--xml', required=True, type=file, help="Output abstract syntax tree in XML format")
parser.add_argument('-p', '--pml', required=True, type=file, help="Input PML file")
try:
args = parser.parse_args()
translate_pml_file(args.xml, args.pml)
except IOError, msg:
parser.error(str(msg))
if __name__ == "__main__":
main()
| mit | Python |
30b794254f9573e4d960e0211370b06e83a10f25 | add version 0.13 (#6382) | mfherbst/spack,EmreAtes/spack,iulian787/spack,mfherbst/spack,iulian787/spack,mfherbst/spack,mfherbst/spack,EmreAtes/spack,krafczyk/spack,LLNL/spack,matthiasdiener/spack,matthiasdiener/spack,mfherbst/spack,LLNL/spack,matthiasdiener/spack,tmerrick1/spack,matthiasdiener/spack,krafczyk/spack,EmreAtes/spack,LLNL/spack,krafczyk/spack,EmreAtes/spack,tmerrick1/spack,iulian787/spack,tmerrick1/spack,krafczyk/spack,iulian787/spack,krafczyk/spack,LLNL/spack,tmerrick1/spack,EmreAtes/spack,matthiasdiener/spack,LLNL/spack,tmerrick1/spack,iulian787/spack | var/spack/repos/builtin/packages/py-pysam/package.py | var/spack/repos/builtin/packages/py-pysam/package.py | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class PyPysam(PythonPackage):
"""A python module for reading, manipulating and writing genomic data
sets."""
homepage = "https://pypi.python.org/pypi/pysam"
url = "https://pypi.io/packages/source/p/pysam/pysam-0.11.2.2.tar.gz"
version('0.13', 'a9b502dd1a7e6403e35e6972211688a2')
version('0.11.2.2', '56230cd5f55b503845915b76c22d620a')
depends_on('py-setuptools', type='build')
depends_on('py-cython@0.21:', type='build')
depends_on('bcftools')
| ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class PyPysam(PythonPackage):
"""A python module for reading, manipulating and writing genomic data
sets."""
homepage = "https://pypi.python.org/pypi/pysam"
url = "https://pypi.io/packages/source/p/pysam/pysam-0.11.2.2.tar.gz"
version('0.11.2.2', '56230cd5f55b503845915b76c22d620a')
depends_on('py-setuptools', type='build')
depends_on('py-cython@0.21:', type='build')
depends_on('bcftools')
| lgpl-2.1 | Python |
cce0263fc320e82fcb9acce5115f251e0007a0bb | Update corescanner.py | galkan/flashlight | lib/active/corescanner.py | lib/active/corescanner.py |
try:
import os
import time
import datetime
import subprocess
from lib.core.core import Core,InitDirFile
except ImportError, err:
from lib.core.core import Core
Core.print_error(err)
class CoreScanner(object):
__scan_type_options = { "PingScan":"-n -sn -T5", "PortScan":"-n -Pn -T5 --open", "OsScan":"-n -Pn -O -T5", "ScriptScan":"-n -Pn -T5" }
def __init__(self, ip_file_to_scan, output_file, nmap_optimize, scan_type):
self.__scan_type = scan_type
self.__ip_file = ip_file_to_scan
self.__output_file = output_file
self.__nmap_options = "{0} {1} -iL {2}".format(CoreScanner.__scan_type_options[self.__scan_type], Core._nmap_optimize, self.__ip_file) if nmap_optimize else "{0} -iL {1}".format(CoreScanner.__scan_type_options[self.__scan_type], self.__ip_file)
self._proc_cmd = "{0} {1}".format(Core._commands_path["nmap"], self.__nmap_options)
def _run(self, logger):
# it is inherited from portscan,osscan,scriptscan class
self._ip_file_to_scan.seek(0)
cmd = "{0} {1} -oA {2}".format(self._proc_cmd, self._scan_options, self.__output_file) if self.__scan_type in ( "PortScan", "ScriptScan") else "{0} -oA {1}".format(self._proc_cmd, self.__output_file)
logger._logging("START: Nmap {0}".format(self.__scan_type))
logger._logging("CMD - {0} : {1}".format(self.__scan_type, cmd))
proc = subprocess.Popen([cmd], shell=True, stdout = subprocess.PIPE, stderr = subprocess.PIPE,).communicate()
logger._logging("STOP: Nmap {0}".format(self.__scan_type))
|
try:
import os
import time
import datetime
import subprocess
from lib.core.core import Core,InitDirFile
except ImportError, err:
from lib.core.core import Core
Core.print_error(err)
class CoreScanner(object):
__scan_type_options = { "PingScan":"-n -sn -T5", "PortScan":"-n -Pn -T5 --open", "OsScan":"-n -Pn -O -T5", "ScriptScan":"-n -Pn -T5" }
def __init__(self, ip_file_to_scan, output_file, nmap_optimize, scan_type):
self.__scan_type = scan_type
self.__ip_file = ip_file_to_scan
self.__output_file = output_file
self.__nmap_options = "{0} {1} -iL {2}".format(CoreScanner.__scan_type_options[self.__scan_type], Core.nmap_optimize, self.__ip_file) if nmap_optimize else "{0} -iL {1}".format(CoreScanner.__scan_type_options[self.__scan_type], self.__ip_file)
self._proc_cmd = "{0} {1}".format(Core._commands_path["nmap"], self.__nmap_options)
def _run(self, logger):
# it is inherited from portscan,osscan,scriptscan class
self._ip_file_to_scan.seek(0)
cmd = "{0} {1} -oA {2}".format(self._proc_cmd, self._scan_options, self.__output_file) if self.__scan_type in ( "PortScan", "ScriptScan") else "{0} -oA {1}".format(self._proc_cmd, self.__output_file)
logger._logging("START: Nmap {0}".format(self.__scan_type))
logger._logging("CMD - {0} : {1}".format(self.__scan_type, cmd))
proc = subprocess.Popen([cmd], shell=True, stdout = subprocess.PIPE, stderr = subprocess.PIPE,).communicate()
logger._logging("STOP: Nmap {0}".format(self.__scan_type))
| mit | Python |
5113b090105f64de47af950923ad2d1e11bf02f9 | Change to use unittest runner | sjones4/eutester,shaon/eutester,nephomaniac/nephoria,shaon/eutester,nagyistoce/eutester,sjones4/eutester,tbeckham/eutester,nagyistoce/eutester,sjones4/eutester,nephomaniac/eutester,sjones4/eutester,shaon/eutester,nagyistoce/eutester,nagyistoce/eutester,nephomaniac/eutester,shaon/eutester,tbeckham/eutester,nephomaniac/eutester,nephomaniac/nephoria,tbeckham/eutester,tbeckham/eutester | testcases/unstable/load_generation.py | testcases/unstable/load_generation.py | #!/usr/bin/python
import unittest
import time
from eucaops import Eucaops
from eutester import xmlrunner
import os
import re
import random
class InstanceBasics(unittest.TestCase):
def setUp(self):
# Setup basic eutester object
self.tester = Eucaops( config_file="../input/2b_tested.lst", password="foobar")
self.tester.poll_count = 40
### Determine whether virtio drivers are being used
self.device_prefix = "sd"
if self.tester.get_hypervisor() == "kvm":
self.device_prefix = "vd"
self.ephemeral = "/dev/" + self.device_prefix + "a2"
### Add and authorize a group for the instance
zones = self.tester.ec2.get_all_zones()
self.zone = random.choice(zones).name
def tearDown(self):
if self.reservation:
self.assertTrue(self.tester.terminate_instances(self.reservation), "Unable to terminate instance(s)")
self.reservation = None
self.group = None
self.keypair = None
self.tester = None
self.ephemeral = None
def GenerateKeypairs(self):
"""
Create and delete keypairs in series
"""
for i in xrange(10):
self.tester.add_keypair()
def suite():
tests = ["GenerateKeypairs"]
for test in tests:
result = unittest.TextTestRunner(verbosity=2).run(InstanceBasics(test))
if result.wasSuccessful():
pass
else:
exit(1)
if __name__ == "__main__":
import sys
## If given command line arguments, use them as test names to launch
if (len(sys.argv) > 1):
tests = sys.argv[1:]
else:
### Other wise launch the whole suite
tests = ["BasicInstanceChecks","ElasticIps","PrivateIPAddressing","MaxSmallInstances","LargestInstance","MetaData","Reboot", "Churn"]
for test in tests:
result = unittest.TextTestRunner(verbosity=2).run(InstanceBasics(test))
if result.wasSuccessful():
pass
else:
exit(1) | #!/usr/bin/python
import unittest
import time
from eucaops import Eucaops
from eutester import xmlrunner
import os
import re
import random
class InstanceBasics(unittest.TestCase):
def setUp(self):
# Setup basic eutester object
self.tester = Eucaops( config_file="../input/2b_tested.lst", password="foobar")
self.tester.poll_count = 40
### Determine whether virtio drivers are being used
self.device_prefix = "sd"
if self.tester.get_hypervisor() == "kvm":
self.device_prefix = "vd"
self.ephemeral = "/dev/" + self.device_prefix + "a2"
### Add and authorize a group for the instance
zones = self.tester.ec2.get_all_zones()
self.zone = random.choice(zones).name
def tearDown(self):
if self.reservation:
self.assertTrue(self.tester.terminate_instances(self.reservation), "Unable to terminate instance(s)")
self.reservation = None
self.group = None
self.keypair = None
self.tester = None
self.ephemeral = None
def GenerateKeypairs(self):
"""
Create and delete keypairs in series
"""
for i in xrange(10):
self.tester.add_keypair()
def suite():
tests = ["GenerateKeypairs"]
for test in tests:
result = xmlrunner.XMLTestRunner(verbosity=2).run(InstanceBasics(test))
if result.wasSuccessful():
pass
else:
exit(1)
if __name__ == "__main__":
import sys
## If given command line arguments, use them as test names to launch
if (len(sys.argv) > 1):
tests = sys.argv[1:]
else:
### Other wise launch the whole suite
tests = ["BasicInstanceChecks","ElasticIps","PrivateIPAddressing","MaxSmallInstances","LargestInstance","MetaData","Reboot", "Churn"]
for test in tests:
result = xmlrunner.XMLTestRunner(verbosity=2).run(InstanceBasics(test))
if result.wasSuccessful():
pass
else:
exit(1) | bsd-2-clause | Python |
63c2e6ab0160387a73e77562fd6c9c5b38ddebd6 | reorganize class | RaymondKlass/entity-extract | entity_extract/extractor/parsers/rel_grammer_parser.py | entity_extract/extractor/parsers/rel_grammer_parser.py | import nltk
class RelationGrammerParser(object):
RelPhraseGrammer = r"""
V: {<RB>?<MD|VB|VBD|VBP|VBG|VBN><RP|RB>?}
P: {<RB>?<IN|TO|RP><RB>?}
W: {<PRP$|CD|DT|JJ|JJS|JJR|NN|NNS|NNP|NNPS|POS|RB|RBR|RBS|VBN|VBG>*}
RelP1: {(<V><P>?)*}
RelP2: {(<V>(<W>*<P>)?)*}
RelPhrase: {(<RelP1>*|<RelP2>*)?}
"""
def __init__(self, grammer = self.RelPhraseGrammer):
self.grammer = grammer
self.parser = nltk.RegexpParser(grammer)
def parse(self, tokenized_sent):
return self.parser.parse(tokenized_sent)
| import nltk
class RelationGrammerParser(object):
def __init__(self):
grammer = r"""
V: {<RB>?<MD|VB|VBD|VBP|VBG|VBN><RP|RB>?}
P: {<RB>?<IN|TO|RP><RB>?}
W: {<PRP$|CD|DT|JJ|JJS|JJR|NN|NNS|NNP|NNPS|POS|RB|RBR|RBS|VBN|VBG>*}
RelP1: {(<V><P>?)*}
RelP2: {(<V>(<W>*<P>)?)*}
RelPhrase: {(<RelP1>*|<RelP2>*)?}
"""
self.parser = nltk.RegexpParser(grammer)
def parse(self, tokenized_sent):
return self.parser.parse(tokenized_sent)
| mit | Python |
e79c22c7d76fe7107492dc22df082b6db573e6ff | Update at 2017-07-22 17-00-22 | amoshyc/tthl-code | train_vgg.py | train_vgg.py | import json
from pathlib import Path
import numpy as np
import pandas as pd
import tensorflow as tf
from keras.backend.tensorflow_backend import set_session
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
set_session(tf.Session(config=config))
from keras.models import Sequential, Model
from keras.preprocessing import image
from keras.layers import *
from keras.optimizers import *
from keras.applications.vgg16 import VGG16
from utils import get_callbacks
def main():
with tf.device('/gpu:2'):
vgg = VGG16(weights='imagenet', include_top=False, pooling='max')
x = vgg.output
x = BatchNormalization()(x)
x = Dense(16, activation='relu')(x)
x = Dropout(0.5)(x)
x = Dense(8, activation='relu')(x)
x = Dropout(0.5)(x)
x = Dense(1, activation='sigmoid')(x)
model = Model(inputs=vgg.input, outputs=x)
model_arg = {
'loss': 'binary_crossentropy',
'optimizer': 'sgd',
'metrics': ['binary_accuracy']
}
model.compile(**model_arg)
model.summary()
print('Loading data...', end='')
train = np.load('npz/image_train.npz')
x_train, y_train = train['xs'], train['ys']
val = np.load('npz/image_val.npz')
x_val, y_val = val['xs'], val['ys']
print('ok')
fit_arg = {
'x': x_train,
'y': y_train,
'batch_size': 100,
'epochs': 50,
# 'shuffle': True,
'validation_data': (x_val, y_val),
'callbacks': get_callbacks('cnn'),
}
model.fit(**fit_arg)
# fit_gen_arg = {
# 'generator': image_train_gen,
# 'steps_per_epoch': N_IMAGE_TRAIN // IMAGE_BATCH_SIZE,
# 'epochs': 30,
# 'validation_data': image_val_gen,
# 'validation_steps': N_IMAGE_VAL // IMAGE_BATCH_SIZE,
# 'callbacks': get_callbacks('cnn')
# }
# model.fit_generator(**fit_gen_arg)
if __name__ == '__main__':
main() | import json
from pathlib import Path
import numpy as np
import pandas as pd
import tensorflow as tf
from keras.backend.tensorflow_backend import set_session
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
set_session(tf.Session(config=config))
from keras.models import Sequential, Model
from keras.preprocessing import image
from keras.layers import *
from keras.optimizers import *
from utils import get_callbacks
def main():
with tf.device('/gpu:2'):
vgg = VGG16(weights='imagenet', include_top=False, pooling='max')
x = vgg.output
x = BatchNormalization()(x)
x = Dense(16, activation='relu')(x)
x = Dropout(0.5)(x)
x = Dense(8, activation='relu')(x)
x = Dropout(0.5)(x)
x = Dense(1, activation='sigmoid')(x)
model = Model(inputs=vgg.input, outputs=x)
model_arg = {
'loss': 'binary_crossentropy',
'optimizer': 'sgd',
'metrics': ['binary_accuracy']
}
model.compile(**model_arg)
model.summary()
print('Loading data...', end='')
train = np.load('npz/image_train.npz')
x_train, y_train = train['xs'], train['ys']
val = np.load('npz/image_val.npz')
x_val, y_val = val['xs'], val['ys']
print('ok')
fit_arg = {
'x': x_train,
'y': y_train,
'batch_size': 100,
'epochs': 50,
# 'shuffle': True,
'validation_data': (x_val, y_val),
'callbacks': get_callbacks('cnn'),
}
model.fit(**fit_arg)
# fit_gen_arg = {
# 'generator': image_train_gen,
# 'steps_per_epoch': N_IMAGE_TRAIN // IMAGE_BATCH_SIZE,
# 'epochs': 30,
# 'validation_data': image_val_gen,
# 'validation_steps': N_IMAGE_VAL // IMAGE_BATCH_SIZE,
# 'callbacks': get_callbacks('cnn')
# }
# model.fit_generator(**fit_gen_arg)
if __name__ == '__main__':
main() | apache-2.0 | Python |
913c794b459f845d3e6d8e7c3410112a45339d51 | Fix thinglang VM executable path | ytanay/thinglang,ytanay/thinglang,ytanay/thinglang,ytanay/thinglang | tests/integration/test_integration.py | tests/integration/test_integration.py | import collections
import io
import json
import os
import pytest
import glob
import subprocess
import thinglang
from thinglang import run, utils
BASE_PATH = os.path.dirname(os.path.abspath(__file__))
SEARCH_PATTERN = os.path.join(BASE_PATH, '**/*.thing')
TestCase = collections.namedtuple('TestCase', ['code', 'metadata', 'name', 'bytecode_target'])
def collect_tests():
for path in glob.glob(SEARCH_PATTERN, recursive=True):
with open(path, 'r') as f:
contents = f.read()
metadata_start = contents.index('/*') + 2
metadata_end = contents.index('*/')
metadata = json.loads(contents[metadata_start:metadata_end])
yield TestCase(
contents[metadata_end + 2:],
metadata,
metadata.get('test_name') or '.'.join(path.replace('.thing', '').split(os.sep)[-2:]),
path + 'c'
)
def split_lines(param):
return param.replace('\r', '').split('\n')
@pytest.mark.parametrize('test_file', collect_tests(), ids=lambda x: x.name)
def test_thing_program(test_file):
expected_output = test_file.metadata['expected_output']
utils.print_header("Bytecode generation")
bytecode = thinglang.compiler(test_file.code).compile().finalize()
print(bytecode)
utils.print_header('VM execution')
with open(test_file.bytecode_target, 'wb') as f:
f.write(bytecode)
vm = subprocess.Popen(["thinglang", test_file.bytecode_target], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = (stream.decode('utf-8').strip() for stream in vm.communicate())
print(stderr)
utils.print_header('VM output')
print(stdout)
local = thinglang.run(test_file.code).output
if not isinstance(expected_output, str):
stdout = split_lines(stdout)
local = split_lines(local)
assert vm.returncode == 0, 'VM process crashed'
assert local == expected_output, 'Execution engine output did not match expected output'
assert stdout == expected_output, 'VM output did not match expected output'
| import collections
import io
import json
import os
import pytest
import glob
import subprocess
import thinglang
from thinglang import run, utils
SEARCH_PATTERN = os.path.join(os.path.dirname(os.path.abspath(__file__)), '**/*.thing')
TestCase = collections.namedtuple('TestCase', ['code', 'metadata', 'name', 'bytecode_target'])
EXECUTABLE = r"C:\Users\Yotam\Development\thinglang\cmake-build-debug\thingc.exe"
def collect_tests():
for path in glob.glob(SEARCH_PATTERN, recursive=True):
with open(path, 'r') as f:
contents = f.read()
metadata_start = contents.index('/*') + 2
metadata_end = contents.index('*/')
metadata = json.loads(contents[metadata_start:metadata_end])
yield TestCase(
contents[metadata_end + 2:],
metadata,
metadata.get('test_name') or '.'.join(path.replace('.thing', '').split(os.sep)[-2:]),
path + 'c'
)
def split_lines(param):
return param.replace('\r', '').split('\n')
@pytest.mark.parametrize('test_file', collect_tests(), ids=lambda x: x.name)
def test_thing_program(test_file):
expected_output = test_file.metadata['expected_output']
utils.print_header("Bytecode generation")
bytecode = thinglang.compiler(test_file.code).compile().finalize()
print(bytecode)
utils.print_header('VM execution')
with open(test_file.bytecode_target, 'wb') as f:
f.write(bytecode)
vm = subprocess.Popen([EXECUTABLE, test_file.bytecode_target], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = (stream.decode('utf-8').strip() for stream in vm.communicate())
print(stderr)
utils.print_header('VM output')
print(stdout)
local = thinglang.run(test_file.code).output
if not isinstance(expected_output, str):
stdout = split_lines(stdout)
local = split_lines(local)
assert vm.returncode == 0, 'VM process crashed'
assert local == expected_output, 'Execution engine output did not match expected output'
assert stdout == expected_output, 'VM output did not match expected output'
| mit | Python |
c8d691d88642c750dffce47a56d3f69fb7c1fac9 | Add create many machines | Financial-Times/paasport,Financial-Times/paasport,Financial-Times/paasport | provisioner/models/machine.py | provisioner/models/machine.py | import boto.ec2
import muliprocessing
# EU-WEST-1: RHEL7 HVM
AMI_ID = 'ami-25158352'
pool = muliprocessing.Pool()
class Machine:
@staticmethod
def create_many(definitions):
return pool.map(Machine.create_new, definitions)
@staticmethod
def create_new(data):
# boto create instance
name = data['name']
cpu = int(0 if not 'cpu' in data else data['cpu'])
memory = int(0 if not 'memory' in data else data['memory'])
disk = int( 0)
region = str('eu-west-1')
metadata = {}
security_groups = [ 'sg-fb4c1a9e' ]
connection = boto.ec2.connect_to_region(region)
image_id = connection.run_instances(AMI_ID,
instance_type='m3.medium').instances[0].id
return image_id
@staticmethod
def delete_instance(instanceId, region):
connection = boto.ec2.connect_to_region(region)
connection.terminate_instances(instance_ids=[instanceId])
return True
| import boto.ec2
# EU-WEST-1: RHEL7 HVM
AMI_ID = 'ami-25158352'
class Machine:
@staticmethod
def create_new(data):
# boto create instance
name = data['name']
cpu = int(0 if not 'cpu' in data else data['cpu'])
memory = int(0 if not 'memory' in data else data['memory'])
disk = int( 0)
region = str('eu-west-1')
metadata = {}
security_groups = [ 'sg-fb4c1a9e' ]
connection = boto.ec2.connect_to_region(region)
image_id = connection.run_instances(AMI_ID,
instance_type='m3.medium').instances[0].id
return image_id
@staticmethod
def delete_instance(instanceId, region):
connection = boto.ec2.connect_to_region(region)
connection.terminate_instances(instance_ids=[instanceId])
return True
| mit | Python |
3f6989e03b9024f61d64b9919f1a5bfed19f1761 | Update problem1.py | rav84/DXCLondonCodingClub | 09-05-2017/python/problem1.py | 09-05-2017/python/problem1.py | f1 = 1
f2 = 1
f3 = f1 + f2
for count in range(4,39):
f1 = f2
f2 = f3
f3 = f1 + f2
print("38th Fibnocci number is:")
print(f3)
| f1 = 1
f2 = 1
f3 = f1 + f2
for count in range(4,39):
f1 = f2
f2 = f3
f3 = f1 + f2
print("39th Fibnocci number is:")
print(f3) | mit | Python |
ce77fecc5e2a63a1720dea622fbce233cdb9f0b1 | Fix & in markup text | akshayaurora/kivy,inclement/kivy,kivy/kivy,inclement/kivy,kivy/kivy,kivy/kivy,akshayaurora/kivy,rnixx/kivy,akshayaurora/kivy,rnixx/kivy,rnixx/kivy,matham/kivy,matham/kivy,matham/kivy,matham/kivy,inclement/kivy | kivy/core/text/text_pango.py | kivy/core/text/text_pango.py | '''
Pango text provider
===================
'''
__all__ = ('LabelPango', )
from kivy.compat import PY2
from kivy.core.text import LabelBase
from kivy.core.text._text_pango import (KivyPangoRenderer, kpango_get_extents,
kpango_get_ascent, kpango_get_descent)
class LabelPango(LabelBase):
# This is a hack to avoid dealing with glib attrs to configure layout,
# we just create markup out of the options and let pango set attrs
def _pango_markup(self, text):
markup = (text.replace('&', '&')
.replace('<', '<')
.replace('>', '>'))
options = self.options
tags = []
if options['bold']:
markup = '<b>{}</b>'.format(markup)
if options['underline']:
markup = '<u>{}</u>'.format(markup)
if options['strikethrough']:
markup = '<s>{}</s>'.format(markup)
if options['font_hinting'] == 'mono':
markup = '<tt>{}</tt>'.format(markup)
# FIXME: does this do the right thing? .. don't see much w/roboto
weight_attr = ''
if options['font_hinting'] in ('light', 'normal'):
weight_attr = ' weight="{}"'.format(options['font_hinting'])
return '<span font="{}"{}>{}</span>'.format(
int(self.options['font_size']),
weight_attr,
markup)
def get_extents(self, text):
if not text:
return (0, 0)
w, h = kpango_get_extents(self, self._pango_markup(text))
return (w, h)
def get_ascent(self):
return kpango_get_ascent(self)
def get_descent(self):
return kpango_get_descent(self)
def _render_begin(self):
self._rdr = KivyPangoRenderer(self._size[0], self._size[1])
def _render_text(self, text, x, y):
self._rdr.render(self, self._pango_markup(text), x, y)
def _render_end(self):
imgdata = self._rdr.get_ImageData()
del self._rdr
return imgdata
| '''
Pango text provider
===================
'''
__all__ = ('LabelPango', )
from kivy.compat import PY2
from kivy.core.text import LabelBase
from kivy.core.text._text_pango import (KivyPangoRenderer, kpango_get_extents,
kpango_get_ascent, kpango_get_descent)
class LabelPango(LabelBase):
# This is a hack to avoid dealing with glib attrs to configure layout,
# we just create markup out of the options and let pango set attrs
def _pango_markup(self, text):
markup = text.replace('<', '<').replace('>', '>')
options = self.options
tags = []
if options['bold']:
markup = '<b>{}</b>'.format(markup)
if options['underline']:
markup = '<u>{}</u>'.format(markup)
if options['strikethrough']:
markup = '<s>{}</s>'.format(markup)
if options['font_hinting'] == 'mono':
markup = '<tt>{}</tt>'.format(markup)
# FIXME: does this do the right thing? .. don't see much w/roboto
weight_attr = ''
if options['font_hinting'] in ('light', 'normal'):
weight_attr = ' weight="{}"'.format(options['font_hinting'])
return '<span font="{}"{}>{}</span>'.format(
int(self.options['font_size']),
weight_attr,
markup)
def get_extents(self, text):
if not text:
return (0, 0)
w, h = kpango_get_extents(self, self._pango_markup(text))
return (w, h)
def get_ascent(self):
return kpango_get_ascent(self)
def get_descent(self):
return kpango_get_descent(self)
def _render_begin(self):
self._rdr = KivyPangoRenderer(self._size[0], self._size[1])
def _render_text(self, text, x, y):
self._rdr.render(self, self._pango_markup(text), x, y)
def _render_end(self):
imgdata = self._rdr.get_ImageData()
del self._rdr
return imgdata
| mit | Python |
9b3a7ed889722b50f5339ccf47de4ebe4c9af587 | use moto manually in setUp rather than decorator | longaccess/longaccess-client,longaccess/longaccess-client,longaccess/longaccess-client | lacli/t/test_mpconnection.py | lacli/t/test_mpconnection.py | from testtools import TestCase
from moto import mock_s3
class MPConnectionTest(TestCase):
@classmethod
def setup_class(cls):
cls._token = {
'token_access_key': '',
'token_secret_key': '',
'token_session': '',
'token_expiration': '',
'token_uid': '',
}
def setUp(self):
self.s3 = mock_s3()
self.s3.start()
super(MPConnectionTest, self).setUp()
def tearDown(self):
self.s3.stop()
super(MPConnectionTest, self).tearDown()
def _makeit(self, *args, **kw):
from lacli.pool import MPConnection
return MPConnection(*args, **kw)
def test_mpconnection(self):
assert self._makeit(self._token)
def test_mpconnection_nouid(self):
token = self._token
token['token_uid'] = None
assert self._makeit(token)
def test_getconnection(self):
conn = self._makeit(self._token)
assert conn.getconnection()
def test_getbucket(self):
assert self._makeit(self._token)
| from testtools import TestCase
from moto import mock_s3
class MPConnectionTest(TestCase):
@classmethod
def setup_class(cls):
cls._token = {
'token_access_key': '',
'token_secret_key': '',
'token_session': '',
'token_expiration': '',
'token_uid': '',
}
def _makeit(self, *args, **kw):
from lacli.pool import MPConnection
return MPConnection(*args, **kw)
def test_mpconnection(self):
assert self._makeit(self._token)
@mock_s3
def test_mpconnection_nouid(self):
token = self._token
token['token_uid'] = None
assert self._makeit(token)
def test_getconnection(self):
conn = self._makeit(self._token)
assert conn.getconnection()
def test_getbucket(self):
assert self._makeit(self._token)
| apache-2.0 | Python |
4a0abc12203113819119a6d86d88a94998f6a6c9 | Fix typo | yunify/qingcloud-cli | qingcloud_cli/iaas_client/actions/image/delete_images.py | qingcloud_cli/iaas_client/actions/image/delete_images.py | # coding: utf-8
from qingcloud_cli.misc.utils import explode_array
from qingcloud_cli.iaas_client.actions.base import BaseAction
class DeleteImagesAction(BaseAction):
action = 'DeleteImages'
command = 'delete-images'
usage = '%(prog)s -i image_id, ... [-f <conf_file>]'
@classmethod
def add_ext_arguments(cls, parser):
parser.add_argument('-i', '--images', dest='images',
action='store', type=str, default='',
help='The comma separated IDs of images you want to delete. ')
@classmethod
def build_directive(cls, options):
images = explode_array(options.images)
if not images:
return None
return {'images': images}
| # coding: utf-8
from qingcloud_cli.misc.utils import explode_array
from qingcloud_cli.iaas_client.actions.base import BaseAction
class DeleteImagesAction(BaseAction):
action = 'DeleteImages'
command = 'delete-images'
MSG_USAGE = '%(prog)s -i image_id, ... [-f <conf_file>]'
@classmethod
def add_ext_arguments(cls, parser):
parser.add_argument('-i', '--images', dest='images',
action='store', type=str, default='',
help='The comma separated IDs of images you want to delete. ')
@classmethod
def build_directive(cls, options):
images = explode_array(options.images)
if not images:
return None
return {'images': images}
| apache-2.0 | Python |
d1a5399f37c8f446ea362b72604332f6405fe70d | Bump version to 0.3 for PyPi release | dcramer/feedreader | feedreader/__init__.py | feedreader/__init__.py | import os.path
__all__ = ('__version__', '__build__')
__version__ = (0, 3)
def _get_git_revision(path):
revision_file = os.path.join(path, 'refs', 'heads', 'master')
if not os.path.exists(revision_file):
return None
fh = open(revision_file, 'r')
try:
return fh.read()
finally:
fh.close()
def get_revision():
"""
:returns: Revision number of this branch/checkout, if available. None if
no revision number can be determined.
"""
package_dir = os.path.dirname(__file__)
checkout_dir = os.path.normpath(os.path.join(package_dir, '..'))
path = os.path.join(checkout_dir, '.git')
if os.path.exists(path):
return _get_git_revision(path)
return None
__build__ = get_revision() | import os.path
__all__ = ('__version__', '__build__')
__version__ = (0, 2)
def _get_git_revision(path):
revision_file = os.path.join(path, 'refs', 'heads', 'master')
if not os.path.exists(revision_file):
return None
fh = open(revision_file, 'r')
try:
return fh.read()
finally:
fh.close()
def get_revision():
"""
:returns: Revision number of this branch/checkout, if available. None if
no revision number can be determined.
"""
package_dir = os.path.dirname(__file__)
checkout_dir = os.path.normpath(os.path.join(package_dir, '..'))
path = os.path.join(checkout_dir, '.git')
if os.path.exists(path):
return _get_git_revision(path)
return None
__build__ = get_revision() | bsd-2-clause | Python |
af446590c73c22d1738ba7d8331dd410dfed2d79 | allow calling script to use argparse | USC-ACTLab/crazyswarm,USC-ACTLab/crazyswarm,USC-ACTLab/crazyswarm,USC-ACTLab/crazyswarm | ros_ws/src/crazyswarm/scripts/pycrazyswarm/crazyswarm.py | ros_ws/src/crazyswarm/scripts/pycrazyswarm/crazyswarm.py | import argparse
from . import genericJoystick
class Crazyswarm:
def __init__(self):
parser = argparse.ArgumentParser()
parser.add_argument("--sim", help="Run using simulation", action="store_true")
parser.add_argument("--vis", help="(sim only) Visualization backend [mpl]", choices=['mpl', 'vispy'], default="mpl")
parser.add_argument("--dt", help="(sim only) dt [0.1s]", type=float, default=0.1)
parser.add_argument("--writecsv", help="Enable CSV output (only available in simulation)", action="store_true")
args, unknown = parser.parse_known_args()
if args.sim:
import crazyflieSim
self.timeHelper = crazyflieSim.TimeHelper(args.vis, args.dt, args.writecsv)
self.allcfs = crazyflieSim.CrazyflieServer(self.timeHelper)
else:
import crazyflie
self.allcfs = crazyflie.CrazyflieServer()
self.timeHelper = crazyflie.TimeHelper()
if args.writecsv:
print("WARNING: writecsv argument ignored! This is only available in simulation.")
self.input = genericJoystick.Joystick(self.timeHelper)
| import argparse
from . import genericJoystick
class Crazyswarm:
def __init__(self):
parser = argparse.ArgumentParser()
parser.add_argument("--sim", help="Run using simulation", action="store_true")
parser.add_argument("--vis", help="(sim only) Visualization backend [mpl]", choices=['mpl', 'vispy'], default="mpl")
parser.add_argument("--dt", help="(sim only) dt [0.1s]", type=float, default=0.1)
parser.add_argument("--writecsv", help="Enable CSV output (only available in simulation)", action="store_true")
args = parser.parse_args()
if args.sim:
import crazyflieSim
self.timeHelper = crazyflieSim.TimeHelper(args.vis, args.dt, args.writecsv)
self.allcfs = crazyflieSim.CrazyflieServer(self.timeHelper)
else:
import crazyflie
self.allcfs = crazyflie.CrazyflieServer()
self.timeHelper = crazyflie.TimeHelper()
if args.writecsv:
print("WARNING: writecsv argument ignored! This is only available in simulation.")
self.input = genericJoystick.Joystick(self.timeHelper)
| mit | Python |
6aaa973595c66005d9fb471da618fa8071d456e5 | support playlist | linhua55/you-get,cnbeining/you-get,lilydjwg/you-get,zmwangx/you-get,smart-techs/you-get,linhua55/you-get,cnbeining/you-get,qzane/you-get,zmwangx/you-get,jindaxia/you-get,qzane/you-get,xyuanmu/you-get,smart-techs/you-get,xyuanmu/you-get,lilydjwg/you-get | src/you_get/extractors/yinyuetai.py | src/you_get/extractors/yinyuetai.py | #!/usr/bin/env python
__all__ = ['yinyuetai_download', 'yinyuetai_download_by_id']
from ..common import *
def yinyuetai_download_by_id(vid, title=None, output_dir='.', merge=True, info_only=False):
video_info = json.loads(get_html('http://www.yinyuetai.com/insite/get-video-info?json=true&videoId=%s' % vid))
url_models = video_info['videoInfo']['coreVideoInfo']['videoUrlModels']
url_models = sorted(url_models, key=lambda i: i['qualityLevel'])
url = url_models[-1]['videoUrl']
type = ext = r1(r'\.(flv|mp4)', url)
_, _, size = url_info(url)
print_info(site_info, title, type, size)
if not info_only:
download_urls([url], title, ext, size, output_dir, merge = merge)
def yinyuetai_download(url, output_dir='.', merge=True, info_only=False, **kwargs):
playlist = r1(r'http://\w+.yinyuetai.com/playlist/(\d+)', url)
if playlist:
html = get_html(url)
data_ids = re.findall(r'data-index="\d+"\s*data-id=(\d+)', html)
for data_id in data_ids:
yinyuetai_download('http://v.yinyuetai.com/video/' + data_id,
output_dir=output_dir, merge=merge, info_only=info_only)
return
id = r1(r'http://\w+.yinyuetai.com/video/(\d+)', url)
html = get_html(url, 'utf-8')
title = r1(r'<meta property="og:title"\s+content="([^"]+)"/>', html) or r1(r'<title>(.*)', html)
assert title
title = parse.unquote(title)
title = escape_file_path(title)
yinyuetai_download_by_id(id, title, output_dir, merge=merge, info_only=info_only)
site_info = "YinYueTai.com"
download = yinyuetai_download
download_playlist = playlist_not_supported('yinyuetai')
| #!/usr/bin/env python
__all__ = ['yinyuetai_download', 'yinyuetai_download_by_id']
from ..common import *
def yinyuetai_download_by_id(vid, title=None, output_dir='.', merge=True, info_only=False):
video_info = json.loads(get_html('http://www.yinyuetai.com/insite/get-video-info?json=true&videoId=%s' % vid))
url_models = video_info['videoInfo']['coreVideoInfo']['videoUrlModels']
url_models = sorted(url_models, key=lambda i: i['qualityLevel'])
url = url_models[-1]['videoUrl']
type = ext = r1(r'\.(flv|mp4)', url)
_, _, size = url_info(url)
print_info(site_info, title, type, size)
if not info_only:
download_urls([url], title, ext, size, output_dir, merge = merge)
def yinyuetai_download(url, output_dir='.', merge=True, info_only=False, **kwargs):
id = r1(r'http://\w+.yinyuetai.com/video/(\d+)$', url.split('?')[0])
assert id
html = get_html(url, 'utf-8')
title = r1(r'<meta property="og:title"\s+content="([^"]+)"/>', html) or r1(r'<title>(.*)', html)
assert title
title = parse.unquote(title)
title = escape_file_path(title)
yinyuetai_download_by_id(id, title, output_dir, merge = merge, info_only = info_only)
site_info = "YinYueTai.com"
download = yinyuetai_download
download_playlist = playlist_not_supported('yinyuetai')
| mit | Python |
93593a321fa941c2284a9c5520d06d47d380088c | Fix invalid updating of evecentral caching | kriberg/stationspinner,kriberg/stationspinner | stationspinner/evecentral/models.py | stationspinner/evecentral/models.py | from django.db import models
from stationspinner.libs.pragma import get_location_name
from stationspinner.sde.models import InvType
from datetime import datetime, timedelta
from pytz import UTC
class Market(models.Model):
locationID = models.IntegerField()
cached_until = models.DateTimeField(null=True)
def updated(self):
self.cached_until = datetime.now(tz=UTC) + timedelta(hours=6)
self.save()
def __unicode__(self):
return get_location_name(self.locationID)
class MarketItem(models.Model):
typeID = models.IntegerField()
locationID = models.IntegerField()
typeName = models.CharField(max_length=255)
buy_volume = models.DecimalField(max_digits=20, decimal_places=2, null=True)
buy_avg = models.DecimalField(max_digits=20, decimal_places=2, null=True)
buy_max = models.DecimalField(max_digits=20, decimal_places=2, null=True)
buy_min = models.DecimalField(max_digits=20, decimal_places=2, null=True)
buy_stddev = models.DecimalField(max_digits=20, decimal_places=2, null=True)
buy_median = models.DecimalField(max_digits=20, decimal_places=2, null=True)
buy_percentile = models.DecimalField(max_digits=20, decimal_places=2, null=True)
sell_volume = models.DecimalField(max_digits=20, decimal_places=2, null=True)
sell_avg = models.DecimalField(max_digits=20, decimal_places=2, null=True)
sell_max = models.DecimalField(max_digits=20, decimal_places=2, null=True)
sell_min = models.DecimalField(max_digits=20, decimal_places=2, null=True)
sell_stddev = models.DecimalField(max_digits=20, decimal_places=2, null=True)
sell_median = models.DecimalField(max_digits=20, decimal_places=2, null=True)
sell_percentile = models.DecimalField(max_digits=20, decimal_places=2, null=True)
timestamp = models.DateTimeField(auto_now=True)
def __unicode__(self):
try:
return u'{0} @ {1}'.format(
InvType.objects.get(pk=self.locationID).typeName,
get_location_name(self.locationID))
except:
return u'{0} @ {1}'.format(self.typeID, self.locationID) | from django.db import models
from stationspinner.libs.pragma import get_location_name
from stationspinner.sde.models import InvType
from datetime import datetime, timedelta
from pytz import UTC
class Market(models.Model):
locationID = models.IntegerField()
cached_until = models.DateTimeField(null=True)
def updated(self):
self.last_updated = datetime.now(tz=UTC) + timedelta(hours=6)
self.save()
def __unicode__(self):
return get_location_name(self.locationID)
class MarketItem(models.Model):
typeID = models.IntegerField()
locationID = models.IntegerField()
typeName = models.CharField(max_length=255)
buy_volume = models.DecimalField(max_digits=20, decimal_places=2, null=True)
buy_avg = models.DecimalField(max_digits=20, decimal_places=2, null=True)
buy_max = models.DecimalField(max_digits=20, decimal_places=2, null=True)
buy_min = models.DecimalField(max_digits=20, decimal_places=2, null=True)
buy_stddev = models.DecimalField(max_digits=20, decimal_places=2, null=True)
buy_median = models.DecimalField(max_digits=20, decimal_places=2, null=True)
buy_percentile = models.DecimalField(max_digits=20, decimal_places=2, null=True)
sell_volume = models.DecimalField(max_digits=20, decimal_places=2, null=True)
sell_avg = models.DecimalField(max_digits=20, decimal_places=2, null=True)
sell_max = models.DecimalField(max_digits=20, decimal_places=2, null=True)
sell_min = models.DecimalField(max_digits=20, decimal_places=2, null=True)
sell_stddev = models.DecimalField(max_digits=20, decimal_places=2, null=True)
sell_median = models.DecimalField(max_digits=20, decimal_places=2, null=True)
sell_percentile = models.DecimalField(max_digits=20, decimal_places=2, null=True)
timestamp = models.DateTimeField(auto_now=True)
def __unicode__(self):
try:
return u'{0} @ {1}'.format(
InvType.objects.get(pk=self.locationID).typeName,
get_location_name(self.locationID))
except:
return u'{0} @ {1}'.format(self.typeID, self.locationID) | agpl-3.0 | Python |
d8e0fb484693a9ae48568ff653509da734240b3f | Set working directory in python_version_compat_test.py | Khan/khan-linter,Khan/khan-linter,Khan/khan-linter,Khan/khan-linter | python_version_compat_test.py | python_version_compat_test.py | #!/usr/bin/env python
import os
import subprocess
import unittest
class TestPy2Py3Compat(unittest.TestCase):
"""We need to be compatible with both python2 and 3.
Test that we can at least import runlint.py under both.
"""
def test_python2_compat(self):
# If we're running this test from an external directory (e.g., from
# webapp), take care to set the working directory for the subprocess
# call. Note that if we're running this test from within
# khan-linter-src, we need the cwd to be None, rather than the empty
# string.
cwd = os.path.dirname(__file__) or None
subprocess.check_call(['python2', '-c', 'import runlint'], cwd=cwd)
def test_python3_compat(self):
cwd = os.path.dirname(__file__) or None
subprocess.check_call(['python3', '-c', 'import runlint'], cwd=cwd)
| #!/usr/bin/env python
import subprocess
import unittest
class TestPy2Py3Compat(unittest.TestCase):
"""We need to be compatible with both python2 and 3.
Test that we can at least import runlint.py under both.
"""
def test_python2_compat(self):
subprocess.check_call(['python2', '-c', 'import runlint'])
def test_python3_compat(self):
subprocess.check_call(['python3', '-c', 'import runlint'])
| apache-2.0 | Python |
664258c825a68ac46c8305cb09350a7be0ae8d1c | Update __init__.py | williamFalcon/pytorch-lightning,williamFalcon/pytorch-lightning | pytorch_lightning/__init__.py | pytorch_lightning/__init__.py | """Root package info."""
__version__ = '0.9.0rc11'
__author__ = 'William Falcon et al.'
__author_email__ = 'waf2107@columbia.edu'
__license__ = 'Apache-2.0'
__copyright__ = 'Copyright (c) 2018-2020, %s.' % __author__
__homepage__ = 'https://github.com/PyTorchLightning/pytorch-lightning'
# this has to be simple string, see: https://github.com/pypa/twine/issues/522
__docs__ = (
"PyTorch Lightning is the lightweight PyTorch wrapper for ML researchers."
" Scale your models. Write less boilerplate."
)
__long_docs__ = """
Lightning is a way to organize your PyTorch code to decouple the science code from the engineering.
It's more of a style-guide than a framework.
In Lightning, you organize your code into 3 distinct categories:
1. Research code (goes in the LightningModule).
2. Engineering code (you delete, and is handled by the Trainer).
3. Non-essential research code (logging, etc. this goes in Callbacks).
Although your research/production project might start simple, once you add things like GPU AND TPU training,
16-bit precision, etc, you end up spending more time engineering than researching.
Lightning automates AND rigorously tests those parts for you.
Overall, Lightning guarantees rigorously tested, correct, modern best practices for the automated parts.
Documentation
-------------
- https://pytorch-lightning.readthedocs.io/en/latest
- https://pytorch-lightning.readthedocs.io/en/stable
"""
import logging as python_logging
_logger = python_logging.getLogger("lightning")
_logger.addHandler(python_logging.StreamHandler())
_logger.setLevel(python_logging.INFO)
try:
# This variable is injected in the __builtins__ by the build
# process. It used to enable importing subpackages of skimage when
# the binaries are not built
__LIGHTNING_SETUP__
except NameError:
__LIGHTNING_SETUP__ = False
if __LIGHTNING_SETUP__:
import sys # pragma: no-cover
sys.stdout.write(f'Partial import of `{__name__}` during the build process.\n') # pragma: no-cover
# We are not importing the rest of the lightning during the build process, as it may not be compiled yet
else:
from pytorch_lightning.core import LightningDataModule, LightningModule
from pytorch_lightning.core.step_result import TrainResult, EvalResult
from pytorch_lightning.callbacks import Callback
from pytorch_lightning.trainer import Trainer
from pytorch_lightning.utilities.seed import seed_everything
from pytorch_lightning import metrics
__all__ = [
'Trainer',
'LightningDataModule',
'LightningModule',
'Callback',
'seed_everything',
'metrics',
'EvalResult',
'TrainResult',
]
# necessary for regular bolts imports. Skip exception since bolts is not always installed
try:
from pytorch_lightning import bolts
except ImportError:
pass
# __call__ = __all__
# for compatibility with namespace packages
__import__('pkg_resources').declare_namespace(__name__)
| """Root package info."""
__version__ = '0.9.0rc10'
__author__ = 'William Falcon et al.'
__author_email__ = 'waf2107@columbia.edu'
__license__ = 'Apache-2.0'
__copyright__ = 'Copyright (c) 2018-2020, %s.' % __author__
__homepage__ = 'https://github.com/PyTorchLightning/pytorch-lightning'
# this has to be simple string, see: https://github.com/pypa/twine/issues/522
__docs__ = (
"PyTorch Lightning is the lightweight PyTorch wrapper for ML researchers."
" Scale your models. Write less boilerplate."
)
__long_docs__ = """
Lightning is a way to organize your PyTorch code to decouple the science code from the engineering.
It's more of a style-guide than a framework.
In Lightning, you organize your code into 3 distinct categories:
1. Research code (goes in the LightningModule).
2. Engineering code (you delete, and is handled by the Trainer).
3. Non-essential research code (logging, etc. this goes in Callbacks).
Although your research/production project might start simple, once you add things like GPU AND TPU training,
16-bit precision, etc, you end up spending more time engineering than researching.
Lightning automates AND rigorously tests those parts for you.
Overall, Lightning guarantees rigorously tested, correct, modern best practices for the automated parts.
Documentation
-------------
- https://pytorch-lightning.readthedocs.io/en/latest
- https://pytorch-lightning.readthedocs.io/en/stable
"""
import logging as python_logging
_logger = python_logging.getLogger("lightning")
_logger.addHandler(python_logging.StreamHandler())
_logger.setLevel(python_logging.INFO)
try:
# This variable is injected in the __builtins__ by the build
# process. It used to enable importing subpackages of skimage when
# the binaries are not built
__LIGHTNING_SETUP__
except NameError:
__LIGHTNING_SETUP__ = False
if __LIGHTNING_SETUP__:
import sys # pragma: no-cover
sys.stdout.write(f'Partial import of `{__name__}` during the build process.\n') # pragma: no-cover
# We are not importing the rest of the lightning during the build process, as it may not be compiled yet
else:
from pytorch_lightning.core import LightningDataModule, LightningModule
from pytorch_lightning.core.step_result import TrainResult, EvalResult
from pytorch_lightning.callbacks import Callback
from pytorch_lightning.trainer import Trainer
from pytorch_lightning.utilities.seed import seed_everything
from pytorch_lightning import metrics
__all__ = [
'Trainer',
'LightningDataModule',
'LightningModule',
'Callback',
'seed_everything',
'metrics',
'EvalResult',
'TrainResult',
]
# necessary for regular bolts imports. Skip exception since bolts is not always installed
try:
from pytorch_lightning import bolts
except ImportError:
pass
# __call__ = __all__
# for compatibility with namespace packages
__import__('pkg_resources').declare_namespace(__name__)
| apache-2.0 | Python |
1e87d8714414209d6527458ca79a69292a4c68e8 | Update compare.py | dilipbobby/DataScience | Numpy/compare.py | Numpy/compare.py | import time
import numpy as np
size_of_vec = 10000
def pure_python_version():
t1 = time.time()
X = range(size_of_vec)
Y = range(size_of_vec)
Z = []
for i in range(len(X)):
Z.append(X[i] + Y[i])
return time.time() - t1
def numpy_version():
t1 = time.time()
X = np.arange(size_of_vec)
Y = np.arange(size_of_vec)
Z = X + Y
return time.time() - t1
t1 = pure_python_version()
t2 = numpy_version()
print(t1, t2)
print("this example Numpy is " + str(t1/t2) + " faster!")
| import time
size_of_vec = 1000
def pure_python_version():
t1 = time.time()
X = range(size_of_vec)
Y = range(size_of_vec)
Z = []
for i in range(len(X)):
Z.append(X[i] + Y[i])
return time.time() - t1
def numpy_version():
t1 = time.time()
X = np.arange(size_of_vec)
Y = np.arange(size_of_vec)
Z = X + Y
return time.time() - t1
t1 = pure_python_version()
t2 = numpy_version()
print(t1, t2)
print("Numpy is in this example " + str(t1/t2) + " faster!")
| apache-2.0 | Python |
61966ebfdb45c5a4de67270cec7d04bdc26ce3b4 | add load command | dariusbakunas/rawdisk | rawdisk/modes/cli/cli_mode.py | rawdisk/modes/cli/cli_mode.py | import logging
import os
from rawdisk.session import Session
from rawdisk.modes.mode import Mode
from tabulate import tabulate
from cmd import Cmd
class CliMode(Mode):
@staticmethod
def entry(args=None):
cli = CliShell()
cli.initialize()
cli.cmdloop()
class CliShell(Cmd):
def __init__(self):
super().__init__()
self.prompt = self.get_prompt()
self.ruler = '-'
self.intro = 'Welcome to rawdisk shell. ' \
'Type help or ? to list command.\n'
self.session = Session()
self.logger = logging.getLogger(__name__)
def initialize(self):
self.session.load_plugins()
def do_plugins(self, arg):
"""List loaded plugins"""
plugins = self.session.manager.fs_plugins
data = [
[plugin.name, plugin.author, plugin.version, plugin.description]
for plugin in plugins]
table = tabulate(
tabular_data=data,
headers=['NAME', 'AUTHOR', 'VERSION', 'DESCRIPTION'])
print(table)
def do_load(self, filename):
"""Load disk image for analysis"""
try:
self.session.load(filename)
except IOError as e:
self.logger.error(e.strerror)
def do_shell(self, command):
"""
Execute shell command
Use shell [command] or ![command] syntax
"""
os.system(command)
def do_quit(self, arg):
"""Exit CLI"""
self.close()
return True
def do_exit(self, arg):
"""Exit CLI"""
self.close()
return True
def get_prompt(self):
return 'rawdisk > '
def close(self):
return
if __name__ == '__main__':
CliMode.entry()
| import logging
import os
from rawdisk.session import Session
from rawdisk.modes.mode import Mode
from tabulate import tabulate
from cmd import Cmd
class CliMode(Mode):
@staticmethod
def entry(args=None):
cli = CliShell()
cli.initialize()
cli.cmdloop()
class CliShell(Cmd):
def __init__(self):
super().__init__()
self.prompt = self.get_prompt()
self.ruler = '-'
self.intro = 'Welcome to rawdisk shell. ' \
'Type help or ? to list command.\n'
self.session = Session()
self.logger = logging.getLogger(__name__)
def initialize(self):
self.session.load_plugins()
def do_plugins(self, arg):
"""List loaded plugins"""
plugins = self.session.manager.fs_plugins
data = [
[plugin.name, plugin.author, plugin.version, plugin.description]
for plugin in plugins]
table = tabulate(
tabular_data=data,
headers=['NAME', 'AUTHOR', 'VERSION', 'DESCRIPTION'])
print(table)
def do_shell(self, command):
"""
Execute shell command
Use shell [command] or ![command] syntax
"""
os.system(command)
def do_quit(self, arg):
"""Exit CLI"""
self.close()
return True
def do_exit(self, arg):
"""Exit CLI"""
self.close()
return True
def get_prompt(self):
return 'rawdisk > '
def close(self):
return
if __name__ == '__main__':
CliMode.entry()
| bsd-3-clause | Python |
fa56c04858dd1744e196aca6f4bf82a53b8f681e | Fix post NoneType error. | fi-ksi/web-backend,fi-ksi/web-backend | util/post.py | util/post.py | from db import session
import model
def to_json(post, user_id, last_visit=None, last_visit_filled=False):
if user_id:
if not last_visit_filled:
last_visit = session.query(model.ThreadVisit).\
filter(model.ThreadVisit.user == user_id, model.ThreadVisit.thread == post.thread, model.ThreadVisit.last_last_visit.isnot(None)).first()
is_new = True if (last_visit is None) or (last_visit.last_last_visit is None) else last_visit.last_last_visit < post.published_at
else:
is_new = False
return {
'id': post.id,
'thread': post.thread,
'author': post.author,
'body': post.body,
'published_at': post.published_at.isoformat(),
'reaction': [ reaction.id for reaction in post.reactions ],
'is_new': is_new
}
| from db import session
import model
def to_json(post, user_id, last_visit=None, last_visit_filled=False):
if user_id:
if not last_visit_filled:
last_visit = session.query(model.ThreadVisit).\
filter(model.ThreadVisit.user == user_id, model.ThreadVisit.thread == post.thread, model.ThreadVisit.last_last_visit.isnot(None)).first()
is_new = True if not last_visit or last_visit.last_last_visit is None else last_visit.last_last_visit < post.published_at
else:
is_new = False
return {
'id': post.id,
'thread': post.thread,
'author': post.author,
'body': post.body,
'published_at': post.published_at.isoformat(),
'reaction': [ reaction.id for reaction in post.reactions ],
'is_new': is_new
}
| mit | Python |
93be56b1e462a990f668369d4fa887d783a7bae4 | Remove unused imports | tochev/obshtestvo.bg,tochev/obshtestvo.bg,tochev/obshtestvo.bg | urls.py | urls.py | from django.conf.urls import patterns, include, url
from django.contrib import admin
from web.views import home, wip, about, project, support, members, contact, faq, report
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', home.HomeView.as_view(), name='home'),
url(r'^admin/', include(admin.site.urls)),
url(r'^wip\.html$', wip.WipView.as_view(), name='wip'),
url(r'^about\.html$', about.AboutView.as_view(), name='about'),
url(r'^report\.html$', report.ReportView.as_view(), name='report'),
url(r'^faq\.html$', faq.FaqView.as_view(), name='faq'),
url(r'^support\.html$', support.SupportView.as_view(),
name='support'),
url(r'^members$', members.MembersView.as_view(), name='members'),
url(r'^contact$', contact.ContactView.as_view(), name='contact'),
url(r'^project/(?P<name>[^/]+)\.html$',
project.ProjectView.as_view(),
name='project'),
)
| from django.conf.urls import patterns, include, url
from django.contrib import admin
import autocomplete_light
from web.views import home, wip, about, project, support, members, contact, faq, report
autocomplete_light.autodiscover()
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', home.HomeView.as_view(), name='home'),
url(r'^admin/', include(admin.site.urls)),
url(r'^wip\.html$', wip.WipView.as_view(), name='wip'),
url(r'^about\.html$', about.AboutView.as_view(), name='about'),
url(r'^report\.html$', report.ReportView.as_view(), name='report'),
url(r'^faq\.html$', faq.FaqView.as_view(), name='faq'),
url(r'^support\.html$', support.SupportView.as_view(),
name='support'),
url(r'^members$', members.MembersView.as_view(), name='members'),
url(r'^contact$', contact.ContactView.as_view(), name='contact'),
url(r'^project/(?P<name>[^/]+)\.html$',
project.ProjectView.as_view(),
name='project'),
)
| unlicense | Python |
3a840ceafddc480e58693dcfe552db0be7b6104c | fix url rewriting | thoas/i386,thoas/i386,thoas/i386,thoas/i386 | urls.py | urls.py | from django.conf.urls.defaults import *
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
from django.conf import settings
admin.autodiscover()
urlpatterns = patterns('',
# Example:
# (r'^gobelins_project/', include('gobelins_project.foo.urls')),
# Uncomment the admin/doc line below and add 'django.contrib.admindocs'
# to INSTALLED_APPS to enable admin documentation:
(r'^admin/doc/', include('django.contrib.admindocs.urls')),
(r'^media/(?P<path>.*)$', 'django.views.static.serve', \
{'document_root': settings.MEDIA_ROOT}),
# Uncomment the next line to enable the admin:
(r'^admin/(.*)', admin.site.root),
(r'^survey/', include('gobelins_project.application.survey.urls')),
(r'', 'gobelins_project.application.survey.views.index'),
)
| from django.conf.urls.defaults import *
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
from django.conf import settings
admin.autodiscover()
urlpatterns = patterns('',
# Example:
# (r'^gobelins_project/', include('gobelins_project.foo.urls')),
# Uncomment the admin/doc line below and add 'django.contrib.admindocs'
# to INSTALLED_APPS to enable admin documentation:
(r'^admin/doc/', include('django.contrib.admindocs.urls')),
(r'^media/(?P<path>.*)$', 'django.views.static.serve', \
{'document_root': settings.MEDIA_ROOT}),
# Uncomment the next line to enable the admin:
(r'^admin/(.*)', admin.site.root),
(r'', 'gobelins_project.application.survey.views.index'),
(r'^survey/', include('gobelins_project.application.survey.urls')),
)
| mit | Python |
3eb969baa756d930b2f3bba4914714751e189c86 | rename single-document search url to be more resource-oriented | mprefer/findingaids,emory-libraries/findingaids,emory-libraries/findingaids,mprefer/findingaids | findingaids/fa/urls.py | findingaids/fa/urls.py | from django.conf.urls.defaults import *
TITLE_LETTERS = '[a-zA-Z]'
title_urlpatterns = patterns('findingaids.fa.views',
url('^$', 'browse_titles', name='browse-titles'),
url(r'^(?P<letter>%s)$' % TITLE_LETTERS, 'titles_by_letter', name='titles-by-letter')
)
# patterns for ead document id and series id
# defined here for use in urls and in custom management commands that check id patterns
EADID_URL_REGEX = "(?P<id>[-_A-Za-z0-9.]+)"
series_id = "[a-zA-Z0-9-._]+"
# urls under a single document url (e.g., /documents/abbey244/ )
findingaid_parts = patterns('findingaids.fa.views',
url(r'^$', 'findingaid', name='findingaid'),
url(r'^EAD/$', 'eadxml', name='eadxml'),
# full finding aid as simple html (html version of pdf, for testing)
url(r'^full/$', 'full_findingaid', {'mode': 'html'}, name='full-findingaid'),
# view access to XSL-FO used to generate pdf (for testing)
url(r'^xsl-fo/$', 'full_findingaid', {'mode': 'xsl-fo'}, name='xslfo'),
url(r'^printable/$', 'full_findingaid', {'mode': 'pdf'}, name='printable'),
url(r'^items/$', 'document_search', name='singledoc-search'),
url(r'^(?P<series_id>%s)/$' % series_id, 'series_or_index', name='series-or-index'),
# NOTE: django can't reverse url patterns with optional parameters
# so series, subseries, and sub-subseries urls have to be defined separately
url(r'^(?P<series_id>%(re)s)/(?P<series2_id>%(re)s)/$' % {'re': series_id},
'series_or_index', name='series2'),
url(r'^(?P<series_id>%(re)s)/(?P<series2_id>%(re)s)/(?P<series3_id>%(re)s)/$' \
% {'re': series_id}, 'series_or_index', name='series3'),
)
findingaid_urlpatterns = patterns('findingaids.fa.views',
(r'^%s/' % EADID_URL_REGEX, include(findingaid_parts)),
)
urlpatterns = patterns('findingaids.fa.views',
(r'^titles/', include(title_urlpatterns)),
(r'^documents/', include(findingaid_urlpatterns)),
url(r'^search/?', 'search', name='search')
)
| from django.conf.urls.defaults import *
TITLE_LETTERS = '[a-zA-Z]'
title_urlpatterns = patterns('findingaids.fa.views',
url('^$', 'browse_titles', name='browse-titles'),
url(r'^(?P<letter>%s)$' % TITLE_LETTERS, 'titles_by_letter', name='titles-by-letter')
)
# patterns for ead document id and series id
# defined here for use in urls and in custom management commands that check id patterns
EADID_URL_REGEX = "(?P<id>[-_A-Za-z0-9.]+)"
series_id = "[a-zA-Z0-9-._]+"
# urls under a single document url (e.g., /documents/abbey244/ )
findingaid_parts = patterns('findingaids.fa.views',
url(r'^$', 'findingaid', name='findingaid'),
url(r'^EAD/$', 'eadxml', name='eadxml'),
# full finding aid as simple html (html version of pdf, for testing)
url(r'^full/$', 'full_findingaid', {'mode': 'html'}, name='full-findingaid'),
# view access to XSL-FO used to generate pdf (for testing)
url(r'^xsl-fo/$', 'full_findingaid', {'mode': 'xsl-fo'}, name='xslfo'),
url(r'^printable/$', 'full_findingaid', {'mode': 'pdf'}, name='printable'),
url(r'^search/$', 'document_search', name='singledoc-search'),
url(r'^(?P<series_id>%s)/$' % series_id, 'series_or_index', name='series-or-index'),
# NOTE: django can't reverse url patterns with optional parameters
# so series, subseries, and sub-subseries urls have to be defined separately
url(r'^(?P<series_id>%(re)s)/(?P<series2_id>%(re)s)/$' % {'re': series_id},
'series_or_index', name='series2'),
url(r'^(?P<series_id>%(re)s)/(?P<series2_id>%(re)s)/(?P<series3_id>%(re)s)/$' \
% {'re': series_id}, 'series_or_index', name='series3'),
)
findingaid_urlpatterns = patterns('findingaids.fa.views',
(r'^%s/' % EADID_URL_REGEX, include(findingaid_parts)),
)
urlpatterns = patterns('findingaids.fa.views',
(r'^titles/', include(title_urlpatterns)),
(r'^documents/', include(findingaid_urlpatterns)),
url(r'^search/?', 'search', name='search')
)
| apache-2.0 | Python |
0efeaa258b19d5b1ba204cc55fbdb6969e0f3e64 | Adjust for pep8 package rename. | spookylukey/flake8-respect-noqa | flake8_respect_noqa.py | flake8_respect_noqa.py | # -*- coding: utf-8 -*-
"""
Always ignore lines with '# noqa'
"""
__version__ = 0.2
try:
from pep8 import StandardReport, noqa
except ImportError:
# Try the new (as of 2016-June) pycodestyle package.
from pycodestyle import StandardReport, noqa
class RespectNoqaReport(StandardReport):
def error(self, line_number, offset, text, check):
if len(self.lines) > line_number - 1 and noqa(self.lines[line_number - 1]):
return
else:
return super(RespectNoqaReport, self).error(line_number, offset,
text, check)
class RespectNoqa(object):
name = 'flake8-respect-noqa'
version = __version__
def __init__(self, *args, **kwargs):
pass
@classmethod
def parse_options(cls, options):
# The following only works with (flake8 2.4.1) if you run like "flake8 -j 1",
# or put "jobs = 1" in your [flake8] config.
# Otherwise, flake8 replaces this reported with it's own.
# See https://gitlab.com/pycqa/flake8/issues/66
options.reporter = RespectNoqaReport
options.report = RespectNoqaReport(options)
| # -*- coding: utf-8 -*-
"""
Always ignore lines with '# noqa'
"""
__version__ = 0.2
import pep8
class RespectNoqaReport(pep8.StandardReport):
def error(self, line_number, offset, text, check):
if len(self.lines) > line_number - 1 and pep8.noqa(self.lines[line_number - 1]):
return
else:
return super(RespectNoqaReport, self).error(line_number, offset,
text, check)
class RespectNoqa(object):
name = 'flake8-respect-noqa'
version = __version__
def __init__(self, *args, **kwargs):
pass
@classmethod
def parse_options(cls, options):
# The following only works with (flake8 2.4.1) if you run like "flake8 -j 1",
# or put "jobs = 1" in your [flake8] config.
# Otherwise, flake8 replaces this reported with it's own.
# See https://gitlab.com/pycqa/flake8/issues/66
options.reporter = RespectNoqaReport
options.report = RespectNoqaReport(options)
| mit | Python |
eff06862c52af4ae03cf51373469e7eaa6c7a168 | Verify params is not None | kshvmdn/cobalt-uoft-python | cobaltuoft/endpoints/__init__.py | cobaltuoft/endpoints/__init__.py | from collections import OrderedDict
class Endpoints:
host = 'http://cobalt.qas.im/api/1.0'
@staticmethod
def run(api, get, endpoint=None, params=None, map=None):
endpoint = Endpoints._parse_endpoint(endpoint)
url, params = Endpoints._parse_url(api, endpoint, params), \
Endpoints._parse_params(endpoint, params, map)
return get(url=url, params=params)
@staticmethod
def _process_filter(queries, map=None):
# [[(), (), ()], [(), (), ()], [()]])
if (type(queries) == str):
return queries
a = []
for filter in queries:
o = []
for k, v in filter:
if map and k.lower() not in map:
continue
o.append('%s:%s' % (k, v))
a.append(' OR '.join(o))
return ' AND '.join(a)
@staticmethod
def _parse_endpoint(endpoint):
if not endpoint:
endpoint = ''
endpoint = endpoint.lower().strip().replace('/', '')
return endpoint if endpoint in ('search', 'filter') else ''
@staticmethod
def _parse_url(api, endpoint, params):
url = '%s/%s/%s' % (Endpoints.host, api, endpoint)
if params and endpoint == '' and ('date' in params or 'id' in params):
url += params['id'] if 'id' in params else params['date']
return url
@staticmethod
def _parse_params(endpoint, params, map=None):
if not params:
return None
parsed_params = OrderedDict()
for param, value in params.items():
param = param.lower()
if param in ('date', 'id'):
return None
if param not in ('sort', 'limit', 'skip', 'q'):
continue
if map and endpoint == 'filter' and param == 'q':
value = Endpoints._process_filter(value, map)
parsed_params[param] = value
return dict(parsed_params)
| from collections import OrderedDict
class Endpoints:
host = 'http://cobalt.qas.im/api/1.0'
@staticmethod
def run(api, get, endpoint=None, params=None, map=None):
endpoint = Endpoints._parse_endpoint(endpoint)
url, params = Endpoints._parse_url(api, endpoint, params), \
Endpoints._parse_params(endpoint, params, map)
return get(url=url, params=params)
@staticmethod
def _process_filter(queries, map=None):
# [[(), (), ()], [(), (), ()], [()]])
if (type(queries) == str):
return queries
a = []
for filter in queries:
o = []
for k, v in filter:
if map and k.lower() not in map:
continue
o.append('%s:%s' % (k, v))
a.append(' OR '.join(o))
return ' AND '.join(a)
@staticmethod
def _parse_endpoint(endpoint):
if not endpoint:
endpoint = ''
endpoint = endpoint.lower().strip().replace('/', '')
return endpoint if endpoint in ('search', 'filter') else ''
@staticmethod
def _parse_url(api, endpoint, params):
url = '%s/%s/%s' % (Endpoints.host, api, endpoint)
if endpoint == '' and ('date' in params or 'id' in params):
url += params['id'] if 'id' in params else params['date']
return url
@staticmethod
def _parse_params(endpoint, params, map=None):
if not params:
return None
parsed_params = OrderedDict()
for param, value in params.items():
param = param.lower()
if param in ('date', 'id'):
return None
if param not in ('sort', 'limit', 'skip', 'q'):
continue
if map and endpoint == 'filter' and param == 'q':
value = Endpoints._process_filter(value, map)
parsed_params[param] = value
return dict(parsed_params)
| mit | Python |
1271ea5365d9693722066f05f2fea226a3de6ed5 | Use isinstance so that all subclasses of list and str are supported | chiangf/Flask-Elasticsearch | flask_elasticsearch.py | flask_elasticsearch.py | from elasticsearch import Elasticsearch
# Find the stack on which we want to store the database connection.
# Starting with Flask 0.9, the _app_ctx_stack is the correct one,
# before that we need to use the _request_ctx_stack.
try:
from flask import _app_ctx_stack as stack
except ImportError:
from flask import _request_ctx_stack as stack
class FlaskElasticsearch(object):
def __init__(self, app=None, **kwargs):
self.app = app
if app is not None:
self.init_app(app, **kwargs)
def init_app(self, app, **kwargs):
app.config.setdefault('ELASTICSEARCH_HOST', 'localhost:9200')
app.config.setdefault('ELASTICSEARCH_HTTP_AUTH', None)
self.elasticsearch_options = kwargs
# Use the newstyle teardown_appcontext if it's available,
# otherwise fall back to the request context
if hasattr(app, 'teardown_appcontext'):
app.teardown_appcontext(self.teardown)
else:
app.teardown_request(self.teardown)
def __getattr__(self, item):
ctx = stack.top
if ctx is not None:
if not hasattr(ctx, 'elasticsearch'):
if isinstance(ctx.app.config.get('ELASTICSEARCH_HOST'), str):
hosts = [ctx.app.config.get('ELASTICSEARCH_HOST')]
elif isinstance(ctx.app.config.get('ELASTICSEARCH_HOST'), list):
hosts = ctx.app.config.get('ELASTICSEARCH_HOST')
ctx.elasticsearch = Elasticsearch(hosts=hosts,
http_auth=ctx.app.config.get('ELASTICSEARCH_HTTP_AUTH'),
**self.elasticsearch_options)
return getattr(ctx.elasticsearch, item)
def teardown(self, exception):
ctx = stack.top
if hasattr(ctx, 'elasticsearch'):
ctx.elasticsearch = None
| from elasticsearch import Elasticsearch
# Find the stack on which we want to store the database connection.
# Starting with Flask 0.9, the _app_ctx_stack is the correct one,
# before that we need to use the _request_ctx_stack.
try:
from flask import _app_ctx_stack as stack
except ImportError:
from flask import _request_ctx_stack as stack
class FlaskElasticsearch(object):
def __init__(self, app=None, **kwargs):
self.app = app
if app is not None:
self.init_app(app, **kwargs)
def init_app(self, app, **kwargs):
app.config.setdefault('ELASTICSEARCH_HOST', 'localhost:9200')
app.config.setdefault('ELASTICSEARCH_HTTP_AUTH', None)
self.elasticsearch_options = kwargs
# Use the newstyle teardown_appcontext if it's available,
# otherwise fall back to the request context
if hasattr(app, 'teardown_appcontext'):
app.teardown_appcontext(self.teardown)
else:
app.teardown_request(self.teardown)
def __getattr__(self, item):
ctx = stack.top
if ctx is not None:
if not hasattr(ctx, 'elasticsearch'):
if type(ctx.app.config.get('ELASTICSEARCH_HOST')) is str:
hosts = [ctx.app.config.get('ELASTICSEARCH_HOST')]
elif type(ctx.app.config.get('ELASTICSEARCH_HOST')) is list:
hosts = ctx.app.config.get('ELASTICSEARCH_HOST')
ctx.elasticsearch = Elasticsearch(hosts=hosts,
http_auth=ctx.app.config.get('ELASTICSEARCH_HTTP_AUTH'),
**self.elasticsearch_options)
return getattr(ctx.elasticsearch, item)
def teardown(self, exception):
ctx = stack.top
if hasattr(ctx, 'elasticsearch'):
ctx.elasticsearch = None
| mit | Python |
7686ce67ae643ca4e556c2238c91580555917d3f | add doctests | poldracklab/fmriprep,poldracklab/preprocessing-workflow,oesteban/preprocessing-workflow,oesteban/fmriprep,oesteban/fmriprep,poldracklab/fmriprep,oesteban/preprocessing-workflow,oesteban/fmriprep,poldracklab/preprocessing-workflow,poldracklab/fmriprep | fmriprep/utils/misc.py | fmriprep/utils/misc.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""
Miscelaneous utilities
"""
def fix_multi_T1w_source_name(in_files):
"""
Make up a generic source name when there are multiple T1s
>>> fix_multi_T1w_source_name([
... '/path/to/sub-045_ses-test_T1w.nii.gz',
... '/path/to/sub-045_ses-retest_T1w.nii.gz'])
'/path/to/sub-045_T1w.nii.gz'
"""
import os
if not isinstance(in_files, list):
return in_files
subject_label = in_files[0].split(os.sep)[-1].split("_")[0].split("-")[-1]
base, _ = os.path.split(in_files[0])
return os.path.join(base, "sub-%s_T1w.nii.gz" % subject_label)
def add_suffix(in_files, suffix):
"""
Wrap nipype's fname_presuffix to conveniently just add a prefix
>>> add_suffix([
... '/path/to/sub-045_ses-test_T1w.nii.gz',
... '/path/to/sub-045_ses-retest_T1w.nii.gz'], 'test')
'sub-045_ses-test_T1w_test.nii.gz'
"""
import os.path as op
from niworkflows.nipype.utils.filemanip import fname_presuffix, filename_to_list
return op.basename(fname_presuffix(filename_to_list(in_files)[0],
suffix=suffix))
if __name__ == '__main__':
pass
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""
Miscelaneous utilities
"""
def fix_multi_T1w_source_name(in_files):
"""
Make up a generic source name when there are multiple T1s
"""
import os
if not isinstance(in_files, list):
return in_files
subject_label = in_files[0].split(os.sep)[-1].split("_")[0].split("-")[-1]
base, _ = os.path.split(in_files[0])
return os.path.join(base, "sub-%s_T1w.nii.gz" % subject_label)
def add_suffix(in_files, suffix):
"""
Wrap nipype's fname_presuffix to conveniently just add a prefix
"""
import os.path as op
from niworkflows.nipype.utils.filemanip import fname_presuffix, filename_to_list
return op.basename(fname_presuffix(filename_to_list(in_files)[0],
suffix=suffix))
if __name__ == '__main__':
pass
| bsd-3-clause | Python |
a2863cdb3770a91620868810292d5706b77a178f | Update models.py | 02agarwalt/FNGS_website,02agarwalt/FNGS_website,ebridge2/FNGS_website,ebridge2/FNGS_website,ebridge2/FNGS_website,ebridge2/FNGS_website,02agarwalt/FNGS_website | fngs/explore/models.py | fngs/explore/models.py | from __future__ import unicode_literals
from django.core.urlresolvers import reverse
from django.db import models
from django.core.urlresolvers import reverse_lazy
from django.conf import settings
import os.path as op
import os
import uuid
def get_creds_file_path(instance, filename):
return os.path.join("/".join(["creds", filename]))
class QuerySubmission(models.Model):
output_url = models.CharField(max_length=200, null=True, blank=True)
STATE_CHOICES = (
('status', 'Job status'),
('kill', 'Kill jobs')
)
state = models.CharField(max_length=20, choices=STATE_CHOICES, default=None, blank=False)
jobdir = models.CharField(max_length=100, blank=False)
creds_file = models.FileField(upload_to=get_creds_file_path, null=True, blank=False)
def add_output_url(self, url):
output_url = models.TextField(url)
def __str__(self):
return str(self.jobdir)
def __eq__(self, other):
if isinstance(other, self.__class__):
return (self.jobdir == other.jobdir)
return False
def __ne__(self, other):
return not self.__eq__(other)
| from __future__ import unicode_literals
from django.core.urlresolvers import reverse
from django.db import models
from django.core.urlresolvers import reverse_lazy
from django.conf import settings
import os.path as op
import os
import uuid
def get_creds_file_path(instance, filename):
return os.path.join("/".join(["creds", filename]))
class QuerySubmission(models.Model):
output_url = models.CharField(max_length=200, null=True, blank=True)
STATE_CHOICES = (
('status', 'Job status'),
('kill', 'Kill jobs')
)
state = models.CharField(max_length=20, choices=STATE_CHOICES)
jobdir = models.CharField(max_length=100, blank=True)
creds_file = models.FileField(upload_to=get_creds_file_path, null=True, blank=True)
def add_output_url(self, url):
output_url = models.TextField(url)
def __str__(self):
return str(self.jobdir)
def __eq__(self, other):
if isinstance(other, self.__class__):
return (self.jobdir == other.jobdir)
return False
def __ne__(self, other):
return not self.__eq__(other)
| apache-2.0 | Python |
4ca4c8805fa823b7eb686cd5134fcf836e672350 | Fix typo | APerson241/APersonBot,APerson241/APersonBot,APerson241/EnterpriseyBot,APerson241/APersonBot,APerson241/EnterpriseyBot,APerson241/APersonBot,APerson241/EnterpriseyBot,APerson241/EnterpriseyBot | defcon/defcon.py | defcon/defcon.py | import datetime
import re
import pywikibot
TEMPLATE_NAME = "Template:Vandalism information"
COMMENT = "[[Wikipedia:Bots/Requests for approval/APersonBot 5|Bot]] updating vandalism level to %d RPM"
TEMPLATE_PATH = "/data/project/apersonbot/bot/defcon/template.txt"
site = pywikibot.Site("en", "wikipedia")
site.login()
num_reverts = 0
for change in site.recentchanges(
start=datetime.datetime.now(),
end=datetime.datetime.now() - datetime.timedelta(minutes=30),
changetype="edit"):
if re.search("revert|rv\ |rvv\ |undid", change[u"comment"],
flags=re.IGNORECASE):
num_reverts += 1
rpm = float(num_reverts) / 30
template_page = pywikibot.Page(site, TEMPLATE_NAME)
current_rpm_match = re.search("WikiDefcon/levels\|(\d+)", template_page.get())
if (not current_rpm_match) or (current_rpm_match.group(1) != int(rpm)):
try:
template = open(TEMPLATE_PATH)
except IOError as e:
print(e)
else:
try:
template_page.text = template.read() % (int(rpm), rpm)
template_page.save(COMMENT % int(rpm))
except Exception as e:
print(e)
finally:
template.close()
| import datetime
import re
import pywikibot
TEMPLATE_NAME = "Template:Vandalism information"
COMMENT = "[[Wikipedia:Bots/Requests for approval/APersonBot 5|Bot]] updating vandalism level to %d RPM"
TEMPLATE_PATH = "/data/project/apersonbot/bot/defcon/template.txt"
site = pywikibot.Site("en", "wikipedia")
site.login()
num_reverts = 0
for change in site.recentchanges(
start=datetime.datetime.now(),
end=datetime.datetime.now() - datetime.timedelta(minutes=30),
changetype="edit"):
if re.search("revert|rv\ |rvv\ |undid", change[u"comment"],
flags=re.IGNORECASE):
num_reverts += 1
rpm = float(num_reverts) / 30
template_page = pywikibot.Page(site, TEMPLATE_NAME)
current_rpm_match = re.search("WikiDefcon/levels\|(\d+)", template_page.get())
if (not current_rpm_match) or (current_defcon_match.group(1) != int(rpm)):
try:
template = open(TEMPLATE_PATH)
except IOError as e:
print(e)
else:
try:
template_page.text = template.read() % (int(rpm), rpm)
template_page.save(COMMENT % int(rpm))
except Exception as e:
print(e)
finally:
template.close()
| mit | Python |
8c8705db387f3f74874a9e18ed00a3256c54f2eb | Update mkwrapper_mm.py | araisrobo/machinekit,araisrobo/machinekit,araisrobo/machinekit,araisrobo/machinekit,araisrobo/machinekit,araisrobo/machinekit,araisrobo/machinekit,araisrobo/machinekit,araisrobo/machinekit | configs/sim/axis/mkwrapper_mm.py | configs/sim/axis/mkwrapper_mm.py | #!/usr/bin/python
import sys
import os
import subprocess
import importlib
from machinekit import launcher
from time import *
launcher.register_exit_handler()
launcher.set_debug_level(5)
os.chdir(os.path.dirname(os.path.realpath(__file__)))
try:
launcher.check_installation() # make sure the Machinekit installation is sane
launcher.cleanup_session() # cleanup a previous session
# launcher.load_bbio_file('myoverlay.bbio') # load a BBB universal overlay
launcher.start_process("configserver ~/proj/remote-ui/Machineface") # start the configserver
launcher.start_process('linuxcnc mkwrapper_mm.ini') # start linuxcnc
except subprocess.CalledProcessError:
launcher.end_session()
sys.exit(1)
while True:
sleep(1)
launcher.check_processes()
| #!/usr/bin/python
import sys
import os
import subprocess
import importlib
from machinekit import launcher
from time import *
launcher.register_exit_handler()
launcher.set_debug_level(5)
os.chdir(os.path.dirname(os.path.realpath(__file__)))
try:
launcher.check_installation() # make sure the Machinekit installation is sane
launcher.cleanup_session() # cleanup a previous session
# launcher.load_bbio_file('myoverlay.bbio') # load a BBB universal overlay
# launcher.install_comp('gantry.comp') # install a comp HAL component of not already installed
# launcher.start_process("configserver ../Machineface ../Cetus/") # start the configserver
# launcher.start_process("configserver /home/ysli/proj/remote-ui/Machineface /home/ysli/proj/remote-ui/Cetus") # start the configserver
launcher.start_process("configserver /home/ysli/proj/remote-ui/Cetus") # start the configserver
# launcher.start_process('linuxcnc xyz.ini') # start linuxcnc
launcher.start_process('linuxcnc mkwrapper_mm.ini') # start linuxcnc
except subprocess.CalledProcessError:
launcher.end_session()
sys.exit(1)
while True:
sleep(1)
launcher.check_processes()
| lgpl-2.1 | Python |
71a1c09010a22a6b8b15390dd6e5cc43c18e6721 | Update zoom.py | mcamelo/zoom | zoom.py | zoom.py | ################
# RUNTIME
################
def init(w,h):
'''Initializes zoom, resizing the backbuffer to the given width and height.
'''
def run():
'''Returns true if game is running, false if game has ended.
'''
def time():
'''Returns the number of seconds between the beginning of the last frame and this.
'''
################
# Resources
################
def loadresources(filename):
'''Load resources file to memory.
'''
def getdata(name):
'''Retrieves a data block by the given name.
'''
################
# Screen
################
def doublebuffer():
'''Toogles double buffer mode on and off (defaults to off). When not in double
buffer mode, all drawing primitives are executed to the visible screen buffer
for immediate viewing by the user. Ideal for experimenting within the Python
interactive console.
'''
def makesprite(data):
'''Makes a sprite object out of the given data block.
'''
def target(buffer):
'''Sets the target buffer for drawing. By default, the
target buffer is the backbuffer, but sprites are buffers
and are valid targets too.
'''
def clear():
'''Clears the target buffer.
'''
def point(x,y):
'''Draws a point at the given x and y coordinates.
'''
def line(x0, y0, x1, y1):
'''Draws a line from x0, y0 to x1, y1.
'''
def putsprite(sprite, x, y):
'''Draws the sprite, starting at the given x and y coordinates.
'''
def show():
'''Shows the content of the backbuffer on the screen.
'''
################
# Audio
################
class Clip:
'''
'''
def makeclip(data):
'''Creates a playable audio clip from the given data block.
'''
def playclip(clip):
'''Plays the given clip once.
'''
def loopclip(clip):
'''Loop the given clip repeatedly.
'''
################
# Input
################
class KeyEvent:
'''
'''
def code():
def down():
def up():
def getkey():
'''Gets a next key event in the queue.
'''
################
# Collision
################
class CollisionEvent:
def name1():
def name2():
def makecollisionmask(data, name):
'''Creates a collision mask from the given datablock.
'''
def putmask(mask, x, y):
'''Positions the collision mask in the arena.
'''
def getcollisions():
'''Returns the next detected collision in the queue.
'''
################
# Timer
################
class TimerEvent:
def name():
def settimer(secs, name):
'''Sets a new timer.
'''
def gettimer():
'''Returns the next timer event in the queue.
'''
| ################
# RUNTIME
################
def init(w,h):
'''Initializes zoom, resizing the backbuffer to the given width and height.
'''
def run():
'''Returns the number of seconds since the last frame or false if game has ended.
'''
################
# Resources
################
def loadresources(filename):
'''Load resources file to memory.
'''
def getdata(name):
'''Retrieves a data block by the given name.
'''
################
# Screen
################
def doublebuffer():
'''Toogles double buffer mode on and off (defaults to off). When not in double
buffer mode, all drawing primitives are executed to the visible screen buffer
for immediate viewing by the user. Ideal for experimenting within the Python
interactive console.
'''
def makesprite(data):
'''Makes a sprite object out of the given data block.
'''
def target(buffer):
'''Sets the target buffer for drawing. By default, the
target buffer is the backbuffer, but sprites are buffers
and are valid targets too.
'''
def clear():
'''Clears the target buffer.
'''
def point(x,y):
'''Draws a point at the given x and y coordinates.
'''
def line(x0, y0, x1, y1):
'''Draws a line from x0, y0 to x1, y1.
'''
def putsprite(sprite, x, y):
'''Draws the sprite, starting at the given x and y coordinates.
'''
def show():
'''Shows the content of the backbuffer on the screen.
'''
################
# Audio
################
class Clip:
'''
'''
def makeclip(data):
'''Creates a playable audio clip from the given data block.
'''
def playclip(clip):
'''Plays the given clip once.
'''
def loopclip(clip):
'''Loop the given clip repeatedly.
'''
################
# Input
################
class KeyEvent:
'''
'''
def code():
def down():
def up():
def getkey():
'''Gets a next key event in the queue.
'''
################
# Collision
################
class CollisionEvent:
def name1():
def name2():
def makecollisionmask(data, name):
'''Creates a collision mask from the given datablock.
'''
def putmask(mask, x, y):
'''Positions the collision mask in the arena.
'''
def getcollisions():
'''Returns the next detected collision in the queue.
'''
################
# Timer
################
class TimerEvent:
def name():
def settimer(secs, name):
'''Sets a new timer.
'''
def gettimer():
'''Returns the next timer event in the queue.
'''
| apache-2.0 | Python |
e37eba5f9430cfa3c3cf081066e7079e5c564e95 | Improve templatetag to use either prefix or ... | spapas/django-generic-scaffold,spapas/django-generic-scaffold | generic_scaffold/templatetags/generic_scaffold_tags.py | generic_scaffold/templatetags/generic_scaffold_tags.py | from django import template
from django.conf import settings
from generic_scaffold import get_url_names
register = template.Library()
@register.assignment_tag
def set_urls_for_scaffold(app=None, model=None, prefix=None):
url_name = get_url_names(app, model, prefix)
return url_name
| from django import template
from django.conf import settings
from generic_scaffold import get_url_names
register = template.Library()
@register.simple_tag
def get_url_for_action(prefix, action):
url = get_url_names(prefix)[action]
return url
@register.assignment_tag
def set_url_for_action(prefix, action):
url = get_url_names(prefix)[action]
return url
| mit | Python |
7a170e2a1c31834608aa9401f9605d274396d92c | Fix Python 3.4.x bytes string formatting | ethereum/pydevp2p | devp2p/utils.py | devp2p/utils.py | import struct
import rlp
from rlp.utils import encode_hex, decode_hex, str_to_bytes
import collections
ienc = int_to_big_endian = rlp.sedes.big_endian_int.serialize
def big_endian_to_int(s):
return rlp.sedes.big_endian_int.deserialize(s.lstrip(b'\x00'))
idec = big_endian_to_int
def int_to_big_endian4(integer):
''' 4 bytes big endian integer'''
return struct.pack('>I', integer)
ienc4 = int_to_big_endian4
node_uri_scheme = b'enode://'
def host_port_pubkey_from_uri(uri):
assert uri.startswith(node_uri_scheme) and b'@' in uri and b':' in uri, uri
pubkey_hex, ip_port = uri[len(node_uri_scheme):].split(b'@')
assert len(pubkey_hex) == 2 * 512 // 8
ip, port = ip_port.split(b':')
return ip, port, decode_hex(pubkey_hex)
def host_port_pubkey_to_uri(host, port, pubkey):
assert len(pubkey) == 512 // 8
uri = '{}{}@{}:{}'.format(node_uri_scheme, encode_hex(pubkey),
host, port)
return str_to_bytes(uri)
# ###### config helpers ###############
def hex_decode_config(self):
def _with_dict(d):
"recursively search and decode hex encoded data"
for k, v in d.items():
if k.endswith('_hex'):
d[k[:-len('_hex')]] = decode_hex(v)
if isinstance(v, dict):
_with_dict(v)
_with_dict(self.config)
def update_config_with_defaults(config, default_config):
for k, v in default_config.items():
if isinstance(v, collections.Mapping):
r = update_config_with_defaults(config.get(k, {}), v)
config[k] = r
elif k not in config:
config[k] = default_config[k]
return config
# ###### colors ###############
COLOR_FAIL = '\033[91m'
COLOR_BOLD = '\033[1m'
COLOR_UNDERLINE = '\033[4m'
COLOR_END = '\033[0m'
colors = ['\033[9%dm' % i for i in range(0, 7)]
colors += ['\033[4%dm' % i for i in range(1, 8)]
def cstr(num, txt):
return '%s%s%s' % (colors[num % len(colors)], txt, COLOR_END)
def cprint(num, txt):
print(cstr(num, txt))
def phx(x):
return encode_hex(x)[:8]
if __name__ == '__main__':
for i in range(len(colors)):
cprint(i, 'test')
| import struct
import rlp
from rlp.utils import encode_hex, decode_hex, str_to_bytes
import collections
ienc = int_to_big_endian = rlp.sedes.big_endian_int.serialize
def big_endian_to_int(s):
return rlp.sedes.big_endian_int.deserialize(s.lstrip(b'\x00'))
idec = big_endian_to_int
def int_to_big_endian4(integer):
''' 4 bytes big endian integer'''
return struct.pack('>I', integer)
ienc4 = int_to_big_endian4
node_uri_scheme = b'enode://'
def host_port_pubkey_from_uri(uri):
assert uri.startswith(node_uri_scheme) and b'@' in uri and b':' in uri, uri
pubkey_hex, ip_port = uri[len(node_uri_scheme):].split(b'@')
assert len(pubkey_hex) == 2 * 512 // 8
ip, port = ip_port.split(b':')
return ip, port, decode_hex(pubkey_hex)
def host_port_pubkey_to_uri(host, port, pubkey):
assert len(pubkey) == 512 // 8
return b'%s%s@%s:%d' % (node_uri_scheme, encode_hex(pubkey),
str_to_bytes(host), port)
# ###### config helpers ###############
def hex_decode_config(self):
def _with_dict(d):
"recursively search and decode hex encoded data"
for k, v in d.items():
if k.endswith('_hex'):
d[k[:-len('_hex')]] = decode_hex(v)
if isinstance(v, dict):
_with_dict(v)
_with_dict(self.config)
def update_config_with_defaults(config, default_config):
for k, v in default_config.items():
if isinstance(v, collections.Mapping):
r = update_config_with_defaults(config.get(k, {}), v)
config[k] = r
elif k not in config:
config[k] = default_config[k]
return config
# ###### colors ###############
COLOR_FAIL = '\033[91m'
COLOR_BOLD = '\033[1m'
COLOR_UNDERLINE = '\033[4m'
COLOR_END = '\033[0m'
colors = ['\033[9%dm' % i for i in range(0, 7)]
colors += ['\033[4%dm' % i for i in range(1, 8)]
def cstr(num, txt):
return '%s%s%s' % (colors[num % len(colors)], txt, COLOR_END)
def cprint(num, txt):
print(cstr(num, txt))
def phx(x):
return encode_hex(x)[:8]
if __name__ == '__main__':
for i in range(len(colors)):
cprint(i, 'test')
| mit | Python |
5410c5cc15d94001715ee48745c76caf35db55dc | Upgrade to TBB/2019_U1@conan/stable | acgetchell/CDT-plusplus,acgetchell/CDT-plusplus,acgetchell/CDT-plusplus | conanfile.py | conanfile.py | from conans import ConanFile, CMake
class CausalDynamicalTriangulations(ConanFile):
settings = "os", "compiler", "build_type", "arch"
requires = "boost/1.68.0@conan/stable", "catch2/2.4.1@bincrafters/stable", "TBB/2019_U1@conan/stable",\
"eigen/3.3.5@conan/stable", "docopt/0.6.2@conan/stable",\
"date/2.4.1@bincrafters/stable", "gsl_microsoft/2.0.0@bincrafters/stable"
generators = "cmake"
default_options = "boost:without_thread=False"
# default_options = "Boost:header_only=True"
def build(self):
cmake = CMake(self)
cmake.verbose = True
cmake.definitions["CMAKE_BUILD_TYPE"] = "RelWithDebInfo"
cmake.configure(args=["CMAKE_EXPORT_COMPILE_COMMANDS=ON"])
cmake.build()
def build_requirements(self):
self.build_requires("cmake_installer/3.12.0@conan/stable") | from conans import ConanFile, CMake
class CausalDynamicalTriangulations(ConanFile):
settings = "os", "compiler", "build_type", "arch"
requires = "boost/1.68.0@conan/stable", "catch2/2.4.1@bincrafters/stable", "TBB/2018_U6@conan/stable",\
"eigen/3.3.5@conan/stable", "docopt/0.6.2@conan/stable",\
"date/2.4.1@bincrafters/stable", "gsl_microsoft/2.0.0@bincrafters/stable"
generators = "cmake"
default_options = "boost:without_thread=False"
# default_options = "Boost:header_only=True"
def build(self):
cmake = CMake(self)
cmake.verbose = True
cmake.definitions["CMAKE_BUILD_TYPE"] = "RelWithDebInfo"
cmake.configure(args=["CMAKE_EXPORT_COMPILE_COMMANDS=ON"])
cmake.build()
def build_requirements(self):
self.build_requires("cmake_installer/3.12.0@conan/stable") | bsd-3-clause | Python |
0fcb955f02f2e59367612c56641a3588c20e628f | Update Catch 2.13.9 | offa/scope-guard,offa/scope-guard,offa/scope-guard,offa/scope-guard | conanfile.py | conanfile.py | import re
import os
from conans import ConanFile, CMake, tools
class ScopeguardConan(ConanFile):
name = "scope-guard"
license = "MIT"
author = "offa <offa@github>"
url = "https://github.com.offa/scope-guard"
description = "Implementation of Scoped Guards and Unique Resource as proposed in P0052."
homepage = "https://github.com/offa/scope-guard"
topics = ("cpp", "cpp17", "p0052", "scope-guard",
"scope-exit", "scope-fail", "scope-success", "unique-resource", "cmake")
generators = ("cmake_find_package", "cmake_paths")
exports = ["LICENSE"]
exports_sources = ("CMakeLists.txt", "include/*", "test/*", "cmake/*")
options = {
"unittest": ["ON", "OFF"],
"enable_compat_header": ["ON", "OFF"]
}
default_options = (
"unittest=ON",
"enable_compat_header=OFF"
)
__requirements = [
"catch2/2.13.9",
"trompeloeil/42"
]
def set_version(self):
cmake_lists_content = tools.load(os.path.join(self.recipe_folder, "CMakeLists.txt"))
project_match = re.search(r'project\s*\((.+?)\)', cmake_lists_content, re.DOTALL)
if not project_match:
raise ConanInvalidConfiguration("No valid project() statement found in CMakeLists.txt")
project_params = project_match.group(1).split()
version_string = project_params[project_params.index("VERSION") + 1]
if not re.search(r'\d+\.\d+\.\d+(?:\.\d)?', version_string):
raise ConanInvalidConfiguration("No valid version found in CMakeLists.txt")
self.version = version_string
self.output.info("Project version from CMakeLists.txt: '{}'".format(self.version))
def requirements(self):
if self.options.unittest:
for req in self.__requirements:
self.requires(req)
def package(self):
self.copy("LICENSE", dst="license")
cmake = self._configure_cmake()
cmake.install()
def _configure_cmake(self):
cmake = CMake(self)
cmake.definitions["UNITTEST"] = self.options.unittest
cmake.definitions["ENABLE_COMPAT_HEADER"] = self.options.enable_compat_header
cmake.configure(build_folder="build")
return cmake
| import re
import os
from conans import ConanFile, CMake, tools
class ScopeguardConan(ConanFile):
name = "scope-guard"
license = "MIT"
author = "offa <offa@github>"
url = "https://github.com.offa/scope-guard"
description = "Implementation of Scoped Guards and Unique Resource as proposed in P0052."
homepage = "https://github.com/offa/scope-guard"
topics = ("cpp", "cpp17", "p0052", "scope-guard",
"scope-exit", "scope-fail", "scope-success", "unique-resource", "cmake")
generators = ("cmake_find_package", "cmake_paths")
exports = ["LICENSE"]
exports_sources = ("CMakeLists.txt", "include/*", "test/*", "cmake/*")
options = {
"unittest": ["ON", "OFF"],
"enable_compat_header": ["ON", "OFF"]
}
default_options = (
"unittest=ON",
"enable_compat_header=OFF"
)
__requirements = [
"catch2/2.13.8",
"trompeloeil/42"
]
def set_version(self):
cmake_lists_content = tools.load(os.path.join(self.recipe_folder, "CMakeLists.txt"))
project_match = re.search(r'project\s*\((.+?)\)', cmake_lists_content, re.DOTALL)
if not project_match:
raise ConanInvalidConfiguration("No valid project() statement found in CMakeLists.txt")
project_params = project_match.group(1).split()
version_string = project_params[project_params.index("VERSION") + 1]
if not re.search(r'\d+\.\d+\.\d+(?:\.\d)?', version_string):
raise ConanInvalidConfiguration("No valid version found in CMakeLists.txt")
self.version = version_string
self.output.info("Project version from CMakeLists.txt: '{}'".format(self.version))
def requirements(self):
if self.options.unittest:
for req in self.__requirements:
self.requires(req)
def package(self):
self.copy("LICENSE", dst="license")
cmake = self._configure_cmake()
cmake.install()
def _configure_cmake(self):
cmake = CMake(self)
cmake.definitions["UNITTEST"] = self.options.unittest
cmake.definitions["ENABLE_COMPAT_HEADER"] = self.options.enable_compat_header
cmake.configure(build_folder="build")
return cmake
| mit | Python |
cf766372264996cba55cd79986d5e411480b771f | add a note about argument promotion in variadic calls | topazproject/topaz,topazproject/topaz,topazproject/topaz,babelsberg/babelsberg-r,babelsberg/babelsberg-r,babelsberg/babelsberg-r,babelsberg/babelsberg-r,topazproject/topaz,babelsberg/babelsberg-r | topaz/modules/ffi/variadic_invoker.py | topaz/modules/ffi/variadic_invoker.py | from topaz.module import ClassDef
from topaz.objects.objectobject import W_Object
from topaz.modules.ffi.type import type_object, ffi_types, W_TypeObject, VOID
from topaz.modules.ffi.dynamic_library import coerce_dl_symbol
from topaz.modules.ffi.function import W_FunctionObject
from rpython.rlib import clibffi
from rpython.rtyper.lltypesystem import lltype, rffi
class W_VariadicInvokerObject(W_Object):
classdef = ClassDef('VariadicInvoker', W_Object.classdef)
def __init__(self, space):
W_Object.__init__(self, space)
self.w_ret_type = W_TypeObject(space, VOID)
self.arg_types_w = []
self.funcsym = lltype.nullptr(rffi.VOIDP.TO)
@classdef.singleton_method('allocate')
def singleton_method_allocate(self, space, args_w):
return W_VariadicInvokerObject(space)
@classdef.method('initialize', arg_types_w='array')
def method_initialize(self, space, w_name, arg_types_w,
w_ret_type, w_options=None):
if w_options is None: w_options = space.newhash()
self.w_ret_type = type_object(space, w_ret_type)
self.arg_types_w = [type_object(space, w_type)
for w_type in arg_types_w]
self.w_name = w_name
space.send(self, 'init', [space.newarray(arg_types_w), space.newhash()])
@classdef.method('invoke', arg_types_w='array', arg_values_w='array')
def method_invoke(self, space, arg_types_w, arg_values_w):
w_function = W_FunctionObject(space)
arg_types_w = [type_object(space, t) for t in arg_types_w]
# XXX we are missing argument promotion for the variadic arguments here
# see
# http://stackoverflow.com/questions/1255775/default-argument-promotions-in-c-function-calls
w_ret_type = self.w_ret_type
w_function.initialize_variadic(space, self.w_name, w_ret_type, arg_types_w)
return space.send(w_function, 'call', arg_values_w)
| from topaz.module import ClassDef
from topaz.objects.objectobject import W_Object
from topaz.modules.ffi.type import type_object, ffi_types, W_TypeObject, VOID
from topaz.modules.ffi.dynamic_library import coerce_dl_symbol
from topaz.modules.ffi.function import W_FunctionObject
from rpython.rlib import clibffi
from rpython.rtyper.lltypesystem import lltype, rffi
class W_VariadicInvokerObject(W_Object):
classdef = ClassDef('VariadicInvoker', W_Object.classdef)
def __init__(self, space):
W_Object.__init__(self, space)
self.w_ret_type = W_TypeObject(space, VOID)
self.arg_types_w = []
self.funcsym = lltype.nullptr(rffi.VOIDP.TO)
@classdef.singleton_method('allocate')
def singleton_method_allocate(self, space, args_w):
return W_VariadicInvokerObject(space)
@classdef.method('initialize', arg_types_w='array')
def method_initialize(self, space, w_name, arg_types_w,
w_ret_type, w_options=None):
if w_options is None: w_options = space.newhash()
self.w_ret_type = type_object(space, w_ret_type)
self.arg_types_w = [type_object(space, w_type)
for w_type in arg_types_w]
self.w_name = w_name
space.send(self, 'init', [space.newarray(arg_types_w), space.newhash()])
@classdef.method('invoke', arg_types_w='array', arg_values_w='array')
def method_invoke(self, space, arg_types_w, arg_values_w):
w_function = W_FunctionObject(space)
arg_types_w = [type_object(space, t) for t in arg_types_w]
w_ret_type = self.w_ret_type
w_function.initialize_variadic(space, self.w_name, w_ret_type, arg_types_w)
return space.send(w_function, 'call', arg_values_w)
| bsd-3-clause | Python |
82f95d7f14d4af5d3827af980028c16ffa7a608d | Update job lifecycle add scheduled | polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon | polyaxon/constants/jobs.py | polyaxon/constants/jobs.py | from constants.statuses import BaseStatuses
from constants.unknown import UNKNOWN
class JobLifeCycle(BaseStatuses):
"""Experiment lifecycle
Props:
* CREATED: created.
* BUILDING: This includes time before being bound to a node,
as well as time spent pulling images onto the host.
* RUNNING: The pod has been bound to a node and all of the containers have been started.
* SUCCEEDED: All containers in the pod have voluntarily terminated with a
container exit code of 0, and the system is
not going to restart any of these containers.
* FAILED: All containers in the pod have terminated,
and at least one container has terminated in a failure.
* STOPPED: was stopped/deleted/killed
* UNKNOWN: For some reason the state of the pod could not be obtained,
typically due to an error in communicating with the host of the pod.
"""
CREATED = 'Created'
BUILDING = 'Building'
SCHEDULED = 'Scheduler'
RUNNING = 'Running'
SUCCEEDED = 'Succeeded'
FAILED = 'Failed'
STOPPED = 'Stopped'
UNKNOWN = UNKNOWN
CHOICES = (
(CREATED, CREATED),
(BUILDING, BUILDING),
(SCHEDULED, SCHEDULED),
(RUNNING, RUNNING),
(SUCCEEDED, SUCCEEDED),
(FAILED, FAILED),
(STOPPED, STOPPED),
(UNKNOWN, UNKNOWN),
)
VALUES = {
CREATED, BUILDING, SCHEDULED, RUNNING, SUCCEEDED, FAILED, STOPPED, UNKNOWN
}
STARTING_STATUS = {CREATED, BUILDING}
RUNNING_STATUS = {BUILDING, SCHEDULED, RUNNING}
DONE_STATUS = {FAILED, STOPPED, SUCCEEDED}
FAILED_STATUS = {FAILED, }
TRANSITION_MATRIX = {
CREATED: {None, },
BUILDING: {CREATED, },
SCHEDULED: {CREATED, BUILDING},
RUNNING: {CREATED, SCHEDULED, BUILDING, UNKNOWN},
SUCCEEDED: {CREATED, RUNNING, UNKNOWN, },
FAILED: {CREATED, BUILDING, SCHEDULED, RUNNING, UNKNOWN, },
STOPPED: set(VALUES),
UNKNOWN: set(VALUES),
}
| from constants.statuses import BaseStatuses
from constants.unknown import UNKNOWN
class JobLifeCycle(BaseStatuses):
"""Experiment lifecycle
Props:
* CREATED: created.
* BUILDING: This includes time before being bound to a node,
as well as time spent pulling images onto the host.
* RUNNING: The pod has been bound to a node and all of the containers have been started.
* SUCCEEDED: All containers in the pod have voluntarily terminated with a
container exit code of 0, and the system is
not going to restart any of these containers.
* FAILED: All containers in the pod have terminated,
and at least one container has terminated in a failure.
* STOPPED: was stopped/deleted/killed
* UNKNOWN: For some reason the state of the pod could not be obtained,
typically due to an error in communicating with the host of the pod.
"""
CREATED = 'Created'
BUILDING = 'Building'
RUNNING = 'Running'
SUCCEEDED = 'Succeeded'
FAILED = 'Failed'
STOPPED = 'Stopped'
UNKNOWN = UNKNOWN
CHOICES = (
(CREATED, CREATED),
(BUILDING, BUILDING),
(RUNNING, RUNNING),
(SUCCEEDED, SUCCEEDED),
(FAILED, FAILED),
(STOPPED, STOPPED),
(UNKNOWN, UNKNOWN),
)
VALUES = {
CREATED, BUILDING, RUNNING, SUCCEEDED, FAILED, STOPPED, UNKNOWN
}
STARTING_STATUS = {CREATED, BUILDING}
RUNNING_STATUS = {BUILDING, RUNNING}
DONE_STATUS = {FAILED, STOPPED, SUCCEEDED}
FAILED_STATUS = {FAILED, }
TRANSITION_MATRIX = {
CREATED: {None, },
BUILDING: {CREATED, },
RUNNING: {CREATED, BUILDING, UNKNOWN},
SUCCEEDED: {CREATED, RUNNING, UNKNOWN, },
FAILED: {CREATED, RUNNING, UNKNOWN, },
STOPPED: set(VALUES),
UNKNOWN: set(VALUES),
}
| apache-2.0 | Python |
9ea68b38ddc417b62ad2a5d83b80b3c1d3185f7d | bump version | czpython/aldryn-faq,czpython/aldryn-faq,czpython/aldryn-faq,czpython/aldryn-faq,mkoistinen/aldryn-faq | aldryn_faq/__init__.py | aldryn_faq/__init__.py | __version__ = '0.2.0'
request_faq_category_identifier = 'aldryn_faq_current_category'
request_faq_question_identifier = 'aldryn_faq_current_question' | __version__ = '0.1.11'
request_faq_category_identifier = 'aldryn_faq_current_category'
request_faq_question_identifier = 'aldryn_faq_current_question' | bsd-3-clause | Python |
c2e18d42358754a11ccb56a58473c78b85c7aaa5 | set up django log rotation | sergeii/swat4stats.com,sergeii/swat4stats.com,sergeii/swat4stats.com | swat4tracker/settings/production.py | swat4tracker/settings/production.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
from .common import *
ALLOWED_HOSTS = ['swat4tracker.com', 'swat4stats.com']
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'HOST': '127.0.0.1',
'PORT': '5432',
'NAME': 'swat4tracker',
'USER': 'swat4tracker',
'PASSWORD': 'swat4tracker',
'OPTIONS': {},
},
}
LOGGING['handlers'].update({
'django': {
'level': 'WARNING',
'class': 'logging.handlers.RotatingFileHandler',
'filename': PATH_VENV.child('log', 'django.log'),
'formatter': 'simple',
'maxBytes': 1024*1024*5,
'backupCount': 5,
},
'stream': {
'level': 'INFO',
'class': 'logging.handlers.RotatingFileHandler',
'filename': PATH_VENV.child('log', 'stream.log'),
'maxBytes': 1024*1024*50,
'backupCount': 10,
},
})
STATIC_ROOT = Path('/var/www/static/swat4tracker/')
MEDIA_ROOT = Path('/var/www/media/swat4tracker/')
CACHES['default'] = {
'BACKEND': 'redis_cache.cache.RedisCache',
'LOCATION': 'unix:/var/run/redis/redis.sock:1',
}
CACHEOPS_REDIS = {
'unix_socket_path': '/var/run/redis/redis.sock',
'db': 2,
}
COMPRESS_OFFLINE = True
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
from .common import *
ALLOWED_HOSTS = ['swat4tracker.com', 'swat4stats.com']
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'HOST': '127.0.0.1',
'PORT': '5432',
'NAME': 'swat4tracker',
'USER': 'swat4tracker',
'PASSWORD': 'swat4tracker',
'OPTIONS': {},
},
}
LOGGING['handlers'].update({
'django': {
'level': 'WARNING',
'class': 'logging.FileHandler',
'filename': PATH_VENV.child('log', 'django.log'),
'formatter': 'simple',
},
'stream': {
'level': 'INFO',
'class': 'logging.FileHandler',
'filename': PATH_VENV.child('log', 'stream.log'),
},
})
STATIC_ROOT = Path('/var/www/static/swat4tracker/')
MEDIA_ROOT = Path('/var/www/media/swat4tracker/')
CACHES['default'] = {
'BACKEND': 'redis_cache.cache.RedisCache',
'LOCATION': 'unix:/var/run/redis/redis.sock:1',
}
CACHEOPS_REDIS = {
'unix_socket_path': '/var/run/redis/redis.sock',
'db': 2,
}
COMPRESS_OFFLINE = True
| mit | Python |
7b2e30e610bd8ad951c0dbd0ce3c55753a5ed36f | Remove obsolete code. | kxepal/phoxpy | phoxpy/messages/directory.py | phoxpy/messages/directory.py | # -*- coding: utf-8 -*-
#
# Copyright (C) 2011 Alexander Shorin
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
#
from phoxpy.messages import PhoxRequest
from phoxpy.mapping import Mapping, ObjectField, ListField, \
RefField, TextField, AttributeField
__all__ = ['DirectoryLoad', 'DirectorySave', 'DirectorySaveNew',
'DirectoryRemove', 'DirectoryRemoveNew', 'DirectoryRestore',
'DirectoryVersions']
class DirectoryLoad(PhoxRequest, 'directory'):
"""Message for request type ``directory``."""
#: Directory data source name.
name = TextField()
#: List of object ids. If None all of them will be requests.
elements = ListField(RefField())
class DirectorySave(PhoxRequest, 'directory-save'):
"""Message for request type ``directory-save``."""
#: Directory data source name.
directory = TextField()
#: Directory element to save.
element = ObjectField(Mapping.build(id=AttributeField()))
class DirectorySaveNew(DirectorySave,'directory-save-new'):
"""Message for request type ``directory-save-new``.
Applies to only specific group of directories which are listed in
:const:`~phoxpy.modules.directory.DIRS_FOR_NEW_PROC`."""
class DirectoryRemove(PhoxRequest, 'directory-remove'):
"""Message for request type ``directory-remove``."""
#: Directory data source name.
directory = TextField()
#: List of object ids to remove.
ids = ListField(RefField())
class DirectoryRemoveNew(DirectoryRemove, 'directory-remove-new'):
"""Message for request type ``directory-remove-new``.
Applies to only specific group of directories which are listed in
:const:`~phoxpy.modules.directory.DIRS_FOR_NEW_PROC`."""
class DirectoryRestore(PhoxRequest, 'directory-restore'):
"""Message for request type ``directory-restore``."""
#: Directory data source name.
directory = TextField()
#: List of object ids to restore.
ids = ListField(RefField())
class DirectoryVersions(PhoxRequest, 'directory-versions'):
"""Message for request type ``directory-versions``."""
| # -*- coding: utf-8 -*-
#
# Copyright (C) 2011 Alexander Shorin
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
#
from phoxpy import xml
from phoxpy.messages import PhoxRequest
from phoxpy.mapping import Mapping, ObjectField, ListField, \
RefField, TextField, AttributeField
__all__ = ['DirectoryLoad', 'DirectorySave', 'DirectorySaveNew',
'DirectoryRemove', 'DirectoryRemoveNew', 'DirectoryRestore',
'DirectoryVersions']
class DirectoryRequestNewMixIn(PhoxRequest):
"""MixIn to generate XML output in required format."""
def unwrap(self):
root = xml.Element('phox-request')
content = xml.Element('content')
obj = xml.Element('o')
content.append(obj)
root.append(content)
return super(PhoxRequest, self).unwrap(root, obj)
@classmethod
def wrap_xmlelem(cls, xmlelem, defaults):
defaults.update(xmlelem.attrib)
root = xmlelem.find('content/o')
assert root is not None
return super(PhoxRequest, cls).wrap_xmlelem(root, defaults)
class DirectoryLoad(PhoxRequest, 'directory'):
"""Message for request type ``directory``."""
#: Directory data source name.
name = TextField()
#: List of object ids. If None all of them will be requests.
elements = ListField(RefField())
class DirectorySave(PhoxRequest, 'directory-save'):
"""Message for request type ``directory-save``."""
#: Directory data source name.
directory = TextField()
#: Directory element to save.
element = ObjectField(Mapping.build(id=AttributeField()))
class DirectorySaveNew(DirectoryRequestNewMixIn, DirectorySave,
'directory-save-new'):
"""Message for request type ``directory-save-new``.
Applies to only specific group of directories which are listed in
:const:`~phoxpy.modules.directory.DIRS_FOR_NEW_PROC`."""
class DirectoryRemove(PhoxRequest, 'directory-remove'):
"""Message for request type ``directory-remove``."""
#: Directory data source name.
directory = TextField()
#: List of object ids to remove.
ids = ListField(RefField())
class DirectoryRemoveNew(DirectoryRequestNewMixIn, DirectoryRemove,
'directory-remove-new'):
"""Message for request type ``directory-remove-new``.
Applies to only specific group of directories which are listed in
:const:`~phoxpy.modules.directory.DIRS_FOR_NEW_PROC`."""
class DirectoryRestore(PhoxRequest, 'directory-restore'):
"""Message for request type ``directory-restore``."""
#: Directory data source name.
directory = TextField()
#: List of object ids to restore.
ids = ListField(RefField())
class DirectoryVersions(PhoxRequest, 'directory-versions'):
"""Message for request type ``directory-versions``."""
| bsd-3-clause | Python |
605fab847797679fa961bde98be893d09fd4f89b | Add the remaining tests to testall. | python-postgres/fe,python-postgres/fe | postgresql/test/testall.py | postgresql/test/testall.py | ##
# copyright 2009, James William Pye
# http://python.projects.postgresql.org
##
import sys
import os
import unittest
import warnings
from ..installation import Installation
from .test_exceptions import *
from .test_bytea_codec import *
from .test_iri import *
from .test_protocol import *
from .test_configfile import *
from .test_pgpassfile import *
from .test_cluster import *
from .test_connect import *
# No SSL? cluster initialization will fail.
if Installation.default().ssl:
from .test_ssl_connect import *
else:
warnings.warn("installation doesn't not support SSL")
from .test_driver import *
if __name__ == '__main__':
unittest.main()
| ##
# copyright 2009, James William Pye
# http://python.projects.postgresql.org
##
import sys
import os
import unittest
from postgresql.test.test_iri import *
from postgresql.test.test_protocol import *
from postgresql.test.test_exceptions import *
if __name__ == '__main__':
from types import ModuleType
this = ModuleType("this")
this.__dict__.update(globals())
unittest.main(this)
| bsd-3-clause | Python |
dd41eacf8e1517c90664c956de93bf7d63c47caf | Add users set() to base channel objects | UltrosBot/Ultros,UltrosBot/Ultros | system/protocols/generic/channel.py | system/protocols/generic/channel.py | from system.translations import Translations
__author__ = 'Sean'
_ = Translations().get()
class Channel(object):
name = ""
users = set()
def __init__(self, name, protocol=None):
self.name = name
self.protocol = protocol
self.users = set()
def respond(self, message):
raise NotImplementedError(_("This method must be overridden"))
| __author__ = 'Sean'
from system.translations import Translations
_ = Translations().get()
class Channel(object):
name = ""
def __init__(self, name, protocol=None):
self.name = name
self.protocol = protocol
def respond(self, message):
raise NotImplementedError(_("This method must be overridden"))
| artistic-2.0 | Python |
eec37fb717ed913533f377a4c1f5780efd8b4721 | add pages option | visio2img/visio2img | visio2img.py | visio2img.py | import win32com.client
from win32com.client import constants
from os import path, chdir, getcwd
from sys import exit
from optparse import OptionParser
def get_dispatch_format(extension):
if extension == 'vsd':
return 'Visio.Application'
if extension == 'vsdx':
pass # What?
def get_pages(app, page_num=None):
"""
app -> page
if page_num is None, return all pages.
if page_num is int object, return path_num-th page(fromm 1).
"""
pages = app.ActiveDocument.Pages
return [list(pages)[page_num - 1]] if page_num else pages
if __name__ == '__main__':
# define parser
parser = OptionParser()
parser.add_option(
'-p', '--page',
action='store',
type='int',
dest='page',
help='transform only one page(set number of this page)'
)
(options, argv) = parser.parse_args()
print(options)
print(argv)
# if len(arguments) != 2, raise exception
if len(argv) != 2:
print('Enter Only input_filename and output_filename')
# define input_filename and output_filename
in_filename = path.abspath(argv[0])
out_filename = path.abspath(argv[1])
# define filename without extension and extension variable
in_filename_without_extension, in_extension = path.splitext(in_filename)
out_filename_without_extension, out_extension = path.splitext(out_filename)
# if file is not found, exit from program
if not path.exists(in_filename):
print('File Not Found')
exit()
try:
# make instance for visio
application = win32com.client.Dispatch(get_dispatch_format(in_extension[1:]))
application.Visible = False
document = application.Documents.Open(in_filename)
# make pages of picture
print(dir(parser))
pages = get_pages(application, page_num=options.page)
# define page_names
if len(pages) == 1:
page_names = [out_filename]
else: # len(pages) >= 2
page_names = (out_filename_without_extension + str(page_cnt + 1) + '.png'
for page_cnt in range(0, len(pages)))
# Export pages
for page, page_name in zip(pages, page_names):
page.Export(page_name)
finally:
application.Quit()
| import win32com.client
from win32com.client import constants
from os import path, chdir, getcwd
from sys import argv, exit
def get_dispatch_format(extension):
if extension == 'vsd':
return 'Visio.Application'
if extension == 'vsdx':
pass # What?
if __name__ == '__main__':
# if len(arguments) != 2, raise exception
if len(argv) != 3:
print('Enter Only input_filename and output_filename')
# define input_filename and output_filename
in_filename = path.abspath(argv[1])
out_filename = path.abspath(argv[2])
# define filename without extension and extension variable
in_filename_without_extension, in_extension = path.splitext(in_filename)
out_filename_without_extension, out_extension = path.splitext(out_filename)
# if file is not found, exit from program
if not path.exists(in_filename):
print('File Not Found')
exit()
try:
# make instance for visio
application = win32com.client.Dispatch(get_dispatch_format(in_extension[1:]))
application.Visible = False
document = application.Documents.Open(in_filename)
# make pages of picture
pages = application.ActiveDocument.Pages
# define page_names
if len(pages) == 1:
page_names = [out_filename]
else: # len(pages) >= 2
page_names = (out_filename_without_extension + str(page_cnt + 1) + '.png'
for page_cnt in range(0, len(pages)))
# Export pages
for page, page_name in zip(pages, page_names):
page.Export(page_name)
finally:
application.Quit()
| apache-2.0 | Python |
1076e1e3dc7788429333578f9cf57be1a9d6c70d | Clean up. | ariutta/target-interaction-finder | targetinteractionfinder/__init__.py | targetinteractionfinder/__init__.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
from target_interaction_finder import TargetInteractionFinder
def main():
parser = argparse.ArgumentParser(description='Extract subgraph(s) from XGMML network(s).')
parser.add_argument('ids',
type=str,
help='identifier or file path to identifier list')
parser.add_argument('-c', '--column',
default=1,
type=int,
help='column number for node identifiers in node_ids file (default = 1)')
parser.add_argument('-s', '--source',
default='./source_xgmml/',
help='source file or directory path(s) to database XGMML (default = directory named "source_xgmml" in current working directory)')
parser.add_argument('-t', '--type',
default='rna',
help='node type (rna or protein; default = rna)')
parser.add_argument('-o', '--output',
default='.',
help='output directory path (default = current working directory)')
parser.add_argument('-d', '--debug',
default=False,
type=bool, help='Show debug messages (default = False)')
args = parser.parse_args()
node_ids = args.ids
source_xgmml = args.source
node_id_list_column_index = args.column - 1
source_type = args.type
output_dir = args.output
debug = args.debug
return TargetInteractionFinder(source_xgmml=source_xgmml, node_ids=node_ids, node_id_list_column_index=node_id_list_column_index, output_dir=output_dir, debug=debug)
if __name__ == '__main__':
main()
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
from target_interaction_finder import TargetInteractionFinder
def main():
parser = argparse.ArgumentParser(description='Extract subgraph(s) from XGMML network(s).')
parser.add_argument('ids',
type=str,
help='identifier or file path to identifier list')
parser.add_argument('-c', '--column',
default=1,
type=int,
help='column number for node identifiers in node_ids file (default = 1)')
parser.add_argument('-s', '--source',
default='./source_xgmml/',
help='source file or directory path(s) to database XGMML(default = directory named "source_xgmml" in current working directory)')
parser.add_argument('-t', '--type',
default='rna',
help='node type (rna or protein; default = rna)')
parser.add_argument('-o', '--output',
default='.',
help='output directory path (default = current working directory)')
parser.add_argument('-d', '--debug',
default=False,
type=bool, help='Show debug messages')
args = parser.parse_args()
node_ids = args.ids
source_xgmml = args.source
node_id_list_column_index = args.column - 1
source_type = args.type
output_dir = args.output
debug = args.debug
return TargetInteractionFinder(source_xgmml=source_xgmml, node_ids=node_ids, node_id_list_column_index=node_id_list_column_index, output_dir=output_dir, debug=debug)
if __name__ == '__main__':
main()
| apache-2.0 | Python |
9f63bdee0502f29c4a97a24a984e99f4dbc00a37 | Remove comment | prattl/teamfinder,prattl/teamfinder,prattl/teamfinder,prattl/teamfinder | api/teamfinder/urls.py | api/teamfinder/urls.py | from django.conf import settings
from django.urls import include, path
from django.conf.urls.static import static
from django.contrib import admin
from rest_framework import routers
from common.api import views as common_views
from feedback.api import views as feedback_views
from players.api import views as player_views
from teams.api import views as team_views
from tf_auth import views as tf_auth_views
from common.views import deploy, social_redirect
router = routers.DefaultRouter()
router.register(r'applications', common_views.ApplicationViewSet)
router.register(r'feedback', feedback_views.FeedbackViewSet)
router.register(r'interests', common_views.InterestViewSet)
router.register(r'invitations', common_views.InvitationViewSet)
router.register(r'languages', common_views.LanguageViewSet)
router.register(r'players', player_views.PlayerViewSet)
router.register(r'regions', common_views.RegionViewSet)
router.register(r'positions', common_views.PositionViewSet)
router.register(r'teams', team_views.TeamViewSet)
router.register(r'memberships', common_views.MembershipViewSet)
router.register(r'user_email_preferences', common_views.UserEmailPreferencesViewSet)
urlpatterns = [
path('api/auth/', include('djoser.urls.authtoken')),
path('api/', include(router.urls)),
path('api/account/', tf_auth_views.AccountView.as_view()),
path('api-auth/', include('rest_framework.urls')),
path('admin/', admin.site.urls),
path('deploy/', deploy),
path('s3/sign/', common_views.S3SignView.as_view()),
path('social-redirect/', social_redirect),
path('', include('social_django.urls', namespace='social'))
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
if settings.DEBUG:
import debug_toolbar
urlpatterns += [
path('__debug__/', include(debug_toolbar.urls)),
]
| from django.conf import settings
from django.urls import include, path
from django.conf.urls.static import static
from django.contrib import admin
from rest_framework import routers
from common.api import views as common_views
from feedback.api import views as feedback_views
from players.api import views as player_views
from teams.api import views as team_views
from tf_auth import views as tf_auth_views
from common.views import deploy, social_redirect
router = routers.DefaultRouter()
router.register(r'applications', common_views.ApplicationViewSet)
# router.register(r'account', tf_auth_views.AccountView, base_name="account")
router.register(r'feedback', feedback_views.FeedbackViewSet)
router.register(r'interests', common_views.InterestViewSet)
router.register(r'invitations', common_views.InvitationViewSet)
router.register(r'languages', common_views.LanguageViewSet)
router.register(r'players', player_views.PlayerViewSet)
router.register(r'regions', common_views.RegionViewSet)
router.register(r'positions', common_views.PositionViewSet)
router.register(r'teams', team_views.TeamViewSet)
router.register(r'memberships', common_views.MembershipViewSet)
router.register(r'user_email_preferences', common_views.UserEmailPreferencesViewSet)
urlpatterns = [
path('api/auth/', include('djoser.urls.authtoken')),
path('api/', include(router.urls)),
path('api/account/', tf_auth_views.AccountView.as_view()),
path('api-auth/', include('rest_framework.urls')),
path('admin/', admin.site.urls),
path('deploy/', deploy),
path('s3/sign/', common_views.S3SignView.as_view()),
path('social-redirect/', social_redirect),
path('', include('social_django.urls', namespace='social'))
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
if settings.DEBUG:
import debug_toolbar
urlpatterns += [
path('__debug__/', include(debug_toolbar.urls)),
]
| apache-2.0 | Python |
bd7a721cbd16b926cfdc97790fe0d1ae6ede3ce2 | Bump version 33 | hugovk/terroroftinytown,ArchiveTeam/terroroftinytown,hugovk/terroroftinytown,ArchiveTeam/terroroftinytown,ArchiveTeam/terroroftinytown,hugovk/terroroftinytown | terroroftinytown/client/__init__.py | terroroftinytown/client/__init__.py | VERSION = 33 # Please update this whenever .client or .services changes
# Please update MIN_VERSION_OVERRIDE and MIN_CLIENT_VERSION_OVERRIDE as needed
| VERSION = 32 # Please update this whenever .client or .services changes
# Please update MIN_VERSION_OVERRIDE and MIN_CLIENT_VERSION_OVERRIDE as needed
| mit | Python |
c69f8d9ed42da8eef1730ee59e838f905a5e6ef3 | Bump version 16. | hugovk/terroroftinytown,hugovk/terroroftinytown,hugovk/terroroftinytown,ArchiveTeam/terroroftinytown,ArchiveTeam/terroroftinytown,ArchiveTeam/terroroftinytown | terroroftinytown/client/__init__.py | terroroftinytown/client/__init__.py | VERSION = 16 # Please update this whenever .client or .services changes
# Please update MIN_VERSION_OVERRIDE and MIN_CLIENT_VERSION_OVERRIDE as needed
| VERSION = 15 # Please update this whenever .client or .services changes
# Please update MIN_VERSION_OVERRIDE and MIN_CLIENT_VERSION_OVERRIDE as needed
| mit | Python |
8f42a1f5e0225e7e457bcaec01969ec0716d7c7e | use 'is' to compare to None | akaihola/PyChecker,akaihola/PyChecker,thomasvs/pychecker,thomasvs/pychecker | pychecker2/ReturnChecks.py | pychecker2/ReturnChecks.py | from pychecker2.Check import Check
from pychecker2.Check import Warning
from pychecker2.util import BaseVisitor, type_filter
from pychecker2 import symbols
from compiler import ast, walk
class Returns(BaseVisitor):
def __init__(self):
self.result = []
def visitReturn(self, node):
self.result.append(node)
# Don't descend into other scopes
def visitFunction(self, node): pass
visitClass = visitFunction
visitLambda = visitFunction
def _is_implicit(node):
if isinstance(node, ast.Const) and node.value is None:
return 1
return None
class MixedReturnCheck(Check):
mixedReturns = \
Warning('Report functions using "return" and "return value"',
'Function %s uses both "return" and "return value"')
def check(self, file, unused_checker):
for scope in type_filter(file.scopes.values(), symbols.FunctionScope):
returns = walk(scope.node.code, Returns()).result
empty, value = [], []
for node in returns:
if _is_implicit(node.value):
empty.append(node)
else:
value.append(node)
if len(empty) > 0 and len(value) > 0:
file.warning(empty[0], self.mixedReturns, scope.name)
| from pychecker2.Check import Check
from pychecker2.Check import Warning
from pychecker2.util import BaseVisitor, type_filter
from pychecker2 import symbols
from compiler import ast, walk
class Returns(BaseVisitor):
def __init__(self):
self.result = []
def visitReturn(self, node):
self.result.append(node)
# Don't descend into other scopes
def visitFunction(self, node): pass
visitClass = visitFunction
visitLambda = visitFunction
def _is_implicit(node):
if isinstance(node, ast.Const) and node.value == None:
return 1
return None
class MixedReturnCheck(Check):
mixedReturns = \
Warning('Report functions using "return" and "return value"',
'Function %s uses both "return" and "return value"')
def check(self, file, unused_checker):
for scope in type_filter(file.scopes.values(), symbols.FunctionScope):
returns = walk(scope.node.code, Returns()).result
empty, value = [], []
for node in returns:
if _is_implicit(node.value):
empty.append(node)
else:
value.append(node)
if len(empty) > 0 and len(value) > 0:
file.warning(empty[0], self.mixedReturns, scope.name)
| bsd-3-clause | Python |
f8d980de69607e73f207fea808c3b0558a4159c0 | Add date and social media fields to proposal | pyvec/cz.pycon.org-2017,pyvec/cz.pycon.org-2017,benabraham/cz.pycon.org-2017,pyvec/cz.pycon.org-2016,pyvec/cz.pycon.org-2016,benabraham/cz.pycon.org-2017,benabraham/cz.pycon.org-2017,pyvec/cz.pycon.org-2017,pyvec/cz.pycon.org-2016 | pyconcz_2016/cfp/models.py | pyconcz_2016/cfp/models.py | from django.db import models
from django.utils.timezone import now
from pyconcz_2016.conferences.models import Conference
class Cfp(models.Model):
conference = models.ForeignKey(Conference, related_name="cfps")
title = models.CharField(max_length=200)
date_start = models.DateTimeField()
date_end = models.DateTimeField()
class Meta:
ordering = ['date_start']
def __str__(self):
return self.title
class Proposal(models.Model):
DIFFICULTY = (
('all', 'All'),
('beginner', 'Beginner'),
('advanced', 'Advanced'),
)
cfp = models.ForeignKey(Cfp, related_name='proposals')
# Public speaker info
full_name = models.CharField(max_length=200)
bio = models.TextField()
twitter = models.CharField(max_length=20, blank=True)
github = models.CharField(max_length=20, blank=True)
# Public talk info
title = models.CharField(max_length=200)
abstract = models.TextField()
difficulty = models.CharField(
max_length=10, choices=DIFFICULTY, default='all')
# Private notes (for reviewers only)
note = models.TextField()
date = models.DateTimeField(default=now)
| from django.db import models
from pyconcz_2016.conferences.models import Conference
class Cfp(models.Model):
conference = models.ForeignKey(Conference, related_name="cfps")
title = models.CharField(max_length=200)
date_start = models.DateTimeField()
date_end = models.DateTimeField()
class Meta:
ordering = ['date_start']
def __str__(self):
return self.title
class Proposal(models.Model):
DIFFICULTY = (
('all', 'All'),
('beginner', 'Beginner'),
('advanced', 'Advanced'),
)
cfp = models.ForeignKey(Cfp, related_name='proposals')
# Public speaker info
full_name = models.CharField(max_length=200)
bio = models.TextField()
twitter = models.CharField(max_length=20, blank=True)
github = models.CharField(max_length=20, blank=True)
# Public talk info
title = models.CharField(max_length=200)
abstract = models.TextField()
difficulty = models.CharField(
max_length=10, choices=DIFFICULTY, default='all')
# Private notes (for reviewers only)
note = models.TextField()
| mit | Python |
f91a77351443aeb1990b188b77f96a0128f7cddb | Use sys.platform rather than platform.platform(), as joerick RTFM for me :) | joerick/pyinstrument,joerick/pyinstrument,joerick/pyinstrument,joerick/pyinstrument,joerick/pyinstrument,joerick/pyinstrument | pyinstrument/middleware.py | pyinstrument/middleware.py | from django.http import HttpResponse
from django.conf import settings
from pyinstrument import Profiler
import sys
import time
import os
try:
from django.utils.deprecation import MiddlewareMixin
except ImportError:
MiddlewareMixin = object
class ProfilerMiddleware(MiddlewareMixin):
def process_request(self, request):
profile_dir = getattr(settings, 'PYINSTRUMENT_PROFILE_DIR', None)
if getattr(settings, 'PYINSTRUMENT_URL_ARGUMENT', 'profile') in request.GET or profile_dir:
profiler = Profiler()
profiler.start()
request.profiler = profiler
def process_response(self, request, response):
if hasattr(request, 'profiler'):
request.profiler.stop()
output_html = request.profiler.output_html()
profile_dir = getattr(settings, 'PYINSTRUMENT_PROFILE_DIR', None)
# Limit the length of the file name (255 characters is the max limit on major current OS, but it is rather
# high and the other parts (see line 36) are to be taken into account; so a hundred will be fine here).
path = request.get_full_path().replace('/', '_')[:100]
# Swap ? for _qs_ on Windows, as it does not support ? in filenames.
if sys.platform in ['win32', 'cygwin']:
path = path.replace('?', '_qs_')
if profile_dir:
filename = '{total_time:.3f}s {path} {timestamp:.0f}.html'.format(
total_time=request.profiler.root_frame().time(),
path=path,
timestamp=time.time()
)
file_path = os.path.join(profile_dir, filename)
if not os.path.exists(profile_dir):
os.mkdir(profile_dir)
with open(file_path, 'w') as f:
f.write(output_html)
if getattr(settings, 'PYINSTRUMENT_URL_ARGUMENT', 'profile') in request.GET:
return HttpResponse(output_html)
else:
return response
else:
return response
| from django.http import HttpResponse
from django.conf import settings
from pyinstrument import Profiler
import platform
import time
import os
try:
from django.utils.deprecation import MiddlewareMixin
except ImportError:
MiddlewareMixin = object
class ProfilerMiddleware(MiddlewareMixin):
def process_request(self, request):
profile_dir = getattr(settings, 'PYINSTRUMENT_PROFILE_DIR', None)
if getattr(settings, 'PYINSTRUMENT_URL_ARGUMENT', 'profile') in request.GET or profile_dir:
profiler = Profiler()
profiler.start()
request.profiler = profiler
def process_response(self, request, response):
if hasattr(request, 'profiler'):
request.profiler.stop()
output_html = request.profiler.output_html()
profile_dir = getattr(settings, 'PYINSTRUMENT_PROFILE_DIR', None)
# Limit the length of the file name (255 characters is the max limit on major current OS, but it is rather
# high and the other parts (see line 36) are to be taken into account; so a hundred will be fine here).
path = request.get_full_path().replace('/', '_')[:100]
# Swap ? for _qs_ on Windows, as it does not support ? in filenames.
if platform.platform().startswith('Windows'):
path = path.replace('?', '_qs_')
if profile_dir:
filename = '{total_time:.3f}s {path} {timestamp:.0f}.html'.format(
total_time=request.profiler.root_frame().time(),
path=path,
timestamp=time.time()
)
file_path = os.path.join(profile_dir, filename)
if not os.path.exists(profile_dir):
os.mkdir(profile_dir)
with open(file_path, 'w') as f:
f.write(output_html)
if getattr(settings, 'PYINSTRUMENT_URL_ARGUMENT', 'profile') in request.GET:
return HttpResponse(output_html)
else:
return response
else:
return response
| bsd-3-clause | Python |
715b999a0527a717da3032f7ce8ae5d3115174bb | Update version.py | dpressel/baseline,dpressel/baseline,dpressel/baseline,dpressel/baseline | python/baseline/version.py | python/baseline/version.py | __version__ = "1.4.4"
| __version__ = "1.4.3"
| apache-2.0 | Python |
01c54e91f40c02346e74f1d0d6aecbfbd0bed1b4 | bump version to 1.3.1 | hobu/laz-perf,hobu/laz-perf,hobu/laz-perf,hobu/laz-perf,hobu/laz-perf | python/lazperf/__init__.py | python/lazperf/__init__.py | __version__='1.3.1'
from .pylazperfapi import PyDecompressor as Decompressor
from .pylazperfapi import PyCompressor as Compressor
from .pylazperfapi import PyVLRDecompressor as VLRDecompressor
from .pylazperfapi import PyVLRCompressor as VLRCompressor
from .pylazperfapi import PyRecordSchema as RecordSchema
from .pylazperfapi import PyLazVlr as LazVLR
from .pylazperfapi import buildNumpyDescription
from .pylazperfapi import buildGreyhoundDescription
| __version__='1.3.0'
from .pylazperfapi import PyDecompressor as Decompressor
from .pylazperfapi import PyCompressor as Compressor
from .pylazperfapi import PyVLRDecompressor as VLRDecompressor
from .pylazperfapi import PyVLRCompressor as VLRCompressor
from .pylazperfapi import PyRecordSchema as RecordSchema
from .pylazperfapi import PyLazVlr as LazVLR
from .pylazperfapi import buildNumpyDescription
from .pylazperfapi import buildGreyhoundDescription
| apache-2.0 | Python |
2ff480b8d38e74224ff70f2633dbc519753e967a | Remove redundant import. | TheLunchtimeAttack/matasano-challenges,TheLunchtimeAttack/matasano-challenges | python/matasano/set1/c8.py | python/matasano/set1/c8.py | if __name__ == "__main__":
chal_file = open("matasano/data/c8.txt", 'r')
coll_count = {}
for idx, line in enumerate(chal_file):
count = 0
ct = line[:-1]
for i in range(0, len(ct), 32):
for j in range(i+32, len(ct), 32):
if ct[i:i+32] == ct[j:j+32]:
print("Block collision found in ciphertext: {} Block {} == Block {}".format(idx, i//32, j//32))
count += 1
coll_count[idx] = count
most_coll = max(coll_count, key=coll_count.get)
print("Ciphertext {} had the most collisions with {}".format(most_coll, coll_count[most_coll]))
| from matasano.util.converters import hex_to_bytestr
if __name__ == "__main__":
chal_file = open("matasano/data/c8.txt", 'r')
coll_count = {}
for idx, line in enumerate(chal_file):
count = 0
ct = line[:-1]
for i in range(0, len(ct), 32):
for j in range(i+32, len(ct), 32):
if ct[i:i+32] == ct[j:j+32]:
print("Block collision found in ciphertext: {} Block {} == Block {}".format(idx, i//32, j//32))
count += 1
coll_count[idx] = count
most_coll = max(coll_count, key=coll_count.get)
print("Ciphertext {} had the most collisions with {}".format(most_coll, coll_count[most_coll]))
| mit | Python |
41eeea9a854747aa7e9715aa8186218d7d66fb46 | rename unused var to _ | opalmer/pywincffi,opalmer/pywincffi,opalmer/pywincffi,opalmer/pywincffi | pywincffi/ws2_32/events.py | pywincffi/ws2_32/events.py | """
Events
------
A module containing Windows functions for working with events.
"""
from six import integer_types
from pywincffi.core import dist
from pywincffi.core.checks import input_check, error_check
from pywincffi.exceptions import WindowsAPIError
from pywincffi.wintypes import HANDLE, SOCKET, wintype_to_cdata
def WSAEventSelect(socket, hEventObject, lNetworkEvents):
"""
Specifies an event object to be associated with the specified set of
FD_XXX network events.
.. seealso::
https://msdn.microsoft.com/en-us/library/ms741576
:param int socket:
A descriptor identify the socket.
:param :class:`pywincffi.wintypes.WSAEVENT` hEventObject:
A handle which identifies the event object to be associated
with the network events.
:param int lNetworkEvents:
A bitmask which specifies the combination of ``FD_XXX`` network
events which the application has interest in.
"""
input_check(
"socket", socket, allowed_types=(SOCKET, ))
input_check(
"hEventObject", hEventObject,
allowed_types=(HANDLE, )
)
input_check("lNetworkEvents", lNetworkEvents, integer_types)
ffi, library = dist.load()
# TODO: `socket` needs conversion
code = library.WSAEventSelect(
wintype_to_cdata(socket),
wintype_to_cdata(hEventObject),
ffi.cast("long", lNetworkEvents)
)
if code == library.SOCKET_ERROR:
errno = WSAGetLastError()
raise WindowsAPIError(
"WSAEventSelect", "Socket error %d" % errno, errno)
error_check("WSAEventSelect", code, expected=0)
def WSACreateEvent():
"""
Creates a new event object.
.. seealso::
https://msdn.microsoft.com/en-us/library/ms741561
:returns:
Returns a handle to a new event object.
"""
_, library = dist.load()
event = library.WSACreateEvent()
if library.wsa_invalid_event(event):
errno = WSAGetLastError()
raise WindowsAPIError(
"WSACreateEvent", "Socket error %d" % errno, errno)
return HANDLE(event)
def WSAGetLastError():
"""
Returns the last error status for a windows socket operation.
.. seealso::
https://msdn.microsoft.com/en-us/library/ms741580
"""
_, library = dist.load()
return library.WSAGetLastError()
| """
Events
------
A module containing Windows functions for working with events.
"""
from six import integer_types
from pywincffi.core import dist
from pywincffi.core.checks import input_check, error_check
from pywincffi.exceptions import WindowsAPIError
from pywincffi.wintypes import HANDLE, SOCKET, wintype_to_cdata
def WSAEventSelect(socket, hEventObject, lNetworkEvents):
"""
Specifies an event object to be associated with the specified set of
FD_XXX network events.
.. seealso::
https://msdn.microsoft.com/en-us/library/ms741576
:param int socket:
A descriptor identify the socket.
:param :class:`pywincffi.wintypes.WSAEVENT` hEventObject:
A handle which identifies the event object to be associated
with the network events.
:param int lNetworkEvents:
A bitmask which specifies the combination of ``FD_XXX`` network
events which the application has interest in.
"""
input_check(
"socket", socket, allowed_types=(SOCKET, ))
input_check(
"hEventObject", hEventObject,
allowed_types=(HANDLE, )
)
input_check("lNetworkEvents", lNetworkEvents, integer_types)
ffi, library = dist.load()
# TODO: `socket` needs conversion
code = library.WSAEventSelect(
wintype_to_cdata(socket),
wintype_to_cdata(hEventObject),
ffi.cast("long", lNetworkEvents)
)
if code == library.SOCKET_ERROR:
errno = WSAGetLastError()
raise WindowsAPIError(
"WSAEventSelect", "Socket error %d" % errno, errno)
error_check("WSAEventSelect", code, expected=0)
def WSACreateEvent():
"""
Creates a new event object.
.. seealso::
https://msdn.microsoft.com/en-us/library/ms741561
:returns:
Returns a handle to a new event object.
"""
ffi, library = dist.load()
event = library.WSACreateEvent()
if library.wsa_invalid_event(event):
errno = WSAGetLastError()
raise WindowsAPIError(
"WSACreateEvent", "Socket error %d" % errno, errno)
return HANDLE(event)
def WSAGetLastError():
"""
Returns the last error status for a windows socket operation.
.. seealso::
https://msdn.microsoft.com/en-us/library/ms741580
"""
_, library = dist.load()
return library.WSAGetLastError()
| mit | Python |
72bd9c2d6d7050b8a35e3c0203558d01cc832e79 | Fix in sparql output | cedar101/quepy-ko,pombredanne/quepy,cedar101/quepy,DrDub/quepy,iScienceLuvr/quepy,emoron/quepy | quepy/sparql_generation.py | quepy/sparql_generation.py | # -*- coding: utf-8 -*-
"""
Sparql generation code.
"""
from quepy import settings
from quepy.expression import isnode
from quepy.encodingpolicy import assert_valid_encoding
from quepy.intermediate_representation import IsRelatedTo
_indent = u" "
def adapt(x):
if isnode(x):
x = u"?x{}".format(x)
return x
if isinstance(x, basestring):
assert_valid_encoding(x)
if x.startswith(u"\""):
return x
return u'"{}"'.format(x)
return unicode(x)
def expression_to_sparql(e, full=False):
template = u"{preamble}\n" +\
u"SELECT DISTINCT {select} WHERE {{\n" +\
u"{expression}\n" +\
u"}}\n"
head = adapt(e.get_head())
if full:
select = u"*"
else:
select = head
y = 0
xs = []
for node in e.iter_nodes():
for relation, dest in e.iter_edges(node):
if relation is IsRelatedTo:
relation = u"?y{}".format(y)
y += 1
xs.append(triple(adapt(node), relation, adapt(dest),
indentation=1))
sparql = template.format(preamble=settings.SPARQL_PREAMBLE,
select=select,
expression=u"\n".join(xs))
return select, sparql
def triple(a, p, b, indentation=0):
s = _indent * indentation + u"{0} {1} {2}."
return s.format(a, p, b)
| # -*- coding: utf-8 -*-
"""
Sparql generation code.
"""
from quepy import settings
from quepy.expression import isnode
from quepy.encodingpolicy import assert_valid_encoding
from quepy.intermediate_representation import IsRelatedTo
_indent = u" "
def adapt(x):
if isnode(x):
x = u"?x{}".format(x)
return x
if isinstance(x, basestring):
assert_valid_encoding(x)
if x.startswith(u"\""):
return x
return u'"{}"'.format(x)
return unicode(x)
def expression_to_sparql(e, full=False):
template = u"{preamble}\n" +\
u"SELECT DISTINCT {select} WHERE {{\n" +\
u" {expression}\n" +\
u"}}\n"
head = adapt(e.get_head())
if full:
select = u"*"
else:
select = head
y = 0
xs = []
for node in e.iter_nodes():
for relation, dest in e.iter_edges(node):
if relation is IsRelatedTo:
relation = u"?y{}".format(y)
y += 1
xs.append(triple(adapt(node), relation, adapt(dest),
indentation=1))
sparql = template.format(preamble=settings.SPARQL_PREAMBLE,
select=select,
expression=u"\n".join(xs))
return select, sparql
def triple(a, p, b, indentation=0):
s = _indent * indentation + u"{0} {1} {2}."
return s.format(a, p, b)
| bsd-3-clause | Python |
60a95fee791a991930e97af12f6ef14e7d96244b | fix tests | fejta/test-infra,BenTheElder/test-infra,monopole/test-infra,fejta/test-infra,cblecker/test-infra,BenTheElder/test-infra,fejta/test-infra,michelle192837/test-infra,michelle192837/test-infra,jessfraz/test-infra,dims/test-infra,kubernetes/test-infra,dims/test-infra,BenTheElder/test-infra,dims/test-infra,jessfraz/test-infra,fejta/test-infra,cblecker/test-infra,fejta/test-infra,dims/test-infra,monopole/test-infra,cjwagner/test-infra,monopole/test-infra,kubernetes/test-infra,jessfraz/test-infra,cjwagner/test-infra,jessfraz/test-infra,kubernetes/test-infra,dims/test-infra,michelle192837/test-infra,kubernetes/test-infra,kubernetes/test-infra,cblecker/test-infra,kubernetes/test-infra,michelle192837/test-infra,dims/test-infra,michelle192837/test-infra,monopole/test-infra,fejta/test-infra,cblecker/test-infra,BenTheElder/test-infra,monopole/test-infra,cblecker/test-infra,monopole/test-infra,cjwagner/test-infra,jessfraz/test-infra,cblecker/test-infra,BenTheElder/test-infra,michelle192837/test-infra,BenTheElder/test-infra,cjwagner/test-infra,cjwagner/test-infra,cjwagner/test-infra,jessfraz/test-infra | releng/generate_tests_test.py | releng/generate_tests_test.py | #!/usr/bin/env python3
# Copyright 2019 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import tempfile
import shutil
from generate_tests import E2ETest
class TestGenerateTests(unittest.TestCase):
def setUp(self):
self.temp_directory = tempfile.mkdtemp()
self.job_name = "ci-kubernetes-e2e-cloud_a-image_b-k8sfoo-focus_c"
self.job = {
"interval": "1h"
}
self.config = {
"jobs": {"ci-kubernetes-e2e-cloud_a-image_b-k8sfoo-focus_c": self.job},
"common": {"args": []},
"cloudProviders": {"cloud_a": {"args": []}},
"images": {"image_b": {}},
"k8sVersions": {"foo": {"version": "2.4"}},
"testSuites": {"focus_c": {"args": ["--timeout=10"]}},
}
def tearDown(self):
shutil.rmtree(self.temp_directory)
def test_e2etests_testgrid_annotations_default(self):
generator = E2ETest(self.temp_directory, self.job_name, self.job, self.config)
_, prow_config, _ = generator.generate()
dashboards = prow_config["annotations"]["testgrid-dashboards"]
self.assertFalse("sig-release-2.4-blocking" in dashboards)
self.assertTrue("sig-release-generated" in dashboards)
def test_e2etests_testgrid_annotations_blocking_job(self):
self.job = {
"releaseBlocking": True,
"interval": "1h"
}
generator = E2ETest(self.temp_directory, self.job_name, self.job, self.config)
_, prow_config, _ = generator.generate()
dashboards = prow_config["annotations"]["testgrid-dashboards"]
self.assertTrue("sig-release-2.4-blocking" in dashboards)
self.assertFalse("sig-release-generated" in dashboards)
if __name__ == '__main__':
unittest.main()
| #!/usr/bin/env python3
# Copyright 2019 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import tempfile
import shutil
from generate_tests import E2ETest
class TestGenerateTests(unittest.TestCase):
def setUp(self):
self.temp_directory = tempfile.mkdtemp()
self.job_name = "ci-kubernetes-e2e-gce-cos-k8sbeta-ingress"
self.job = {
"interval": "1h"
}
self.config = {
"jobs": {"ci-kubernetes-e2e-gce-cos-k8sbeta-ingress": self.job},
"common": {"args": []},
"cloudProviders": {"gce": {"args": []}},
"images": {"cos": {}},
"k8sVersions": {"beta": {"version": "2.4"}},
"testSuites": {"ingress": {"args": ["--timeout=10"]}},
}
def tearDown(self):
shutil.rmtree(self.temp_directory)
def test_e2etests_testgrid_annotations_default(self):
generator = E2ETest(self.temp_directory, self.job_name, self.job, self.config)
_, prow_config, _ = generator.generate()
dashboards = prow_config["annotations"]["testgrid-dashboards"]
self.assertFalse("sig-release-2.4-blocking" in dashboards)
self.assertTrue("sig-release-2.4-all" in dashboards)
def test_e2etests_testgrid_annotations_blocking_job(self):
self.job = {
"releaseBlocking": True,
"interval": "1h"
}
generator = E2ETest(self.temp_directory, self.job_name, self.job, self.config)
_, prow_config, _ = generator.generate()
dashboards = prow_config["annotations"]["testgrid-dashboards"]
self.assertTrue("sig-release-2.4-blocking" in dashboards)
self.assertFalse("sig-release-2.4-all" in dashboards)
if __name__ == '__main__':
unittest.main()
| apache-2.0 | Python |
c48706db3a1d5a68e173ed8212dde1c7269740c2 | use local aiohttpsession | mralext20/alex-bot | alexBot/cogs/phoneMonitor.py | alexBot/cogs/phoneMonitor.py | import logging
from collections import defaultdict
from typing import TYPE_CHECKING
import aiohttp
from discord.ext import tasks
from ..tools import Cog, get_json
if TYPE_CHECKING:
from bot import Bot
log = logging.getLogger(__name__)
TABLE = defaultdict(lambda: "Alex is Away")
TABLE['home'] = "Alex is At Home"
TABLE['walmart'] = "Alex is At Work"
class PhoneMonitor(Cog):
def __init__(self, bot: "Bot"):
super().__init__(bot)
self.session: aiohttp.ClientSession = None
self.phone_update.start()
@tasks.loop(minutes=1)
async def phone_update(self):
ret = await get_json(
self.session,
f"{self.bot.config.hass_host}/api/states/{self.bot.config.hass_target}",
headers={'Authorization': self.bot.config.hass_token},
)
ret = ret['state']
log.debug(f"asked HA, ret = {ret}, table is {TABLE[ret]}")
alex = self.bot.get_guild(791528974442299412).me
await alex.edit(nick=TABLE[ret])
@phone_update.before_loop
async def before_phone_updates(self):
self.session = aiohttp.ClientSession()
await self.bot.wait_until_ready()
async def cog_unload(self):
await self.session.close()
self.phone_update.cancel()
async def setup(bot: "Bot"):
if bot.location == "dev":
return
await bot.add_cog(PhoneMonitor(bot))
| import logging
from collections import defaultdict
from typing import TYPE_CHECKING
from discord.ext import tasks
from ..tools import Cog, get_json
if TYPE_CHECKING:
from bot import Bot
log = logging.getLogger(__name__)
TABLE = defaultdict(lambda: "Alex is Away")
TABLE['home'] = "Alex is At Home"
TABLE['walmart'] = "Alex is At Work"
class PhoneMonitor(Cog):
def __init__(self, bot: "Bot"):
super().__init__(bot)
self.phone_update.start()
@tasks.loop(minutes=1)
async def phone_update(self):
ret = await get_json(
self.bot.session,
f"{self.bot.config.hass_host}/api/states/{self.bot.config.hass_target}",
headers={'Authorization': self.bot.config.hass_token},
)
ret = ret['state']
log.debug(f"asked HA, ret = {ret}, table is {TABLE[ret]}")
alex = self.bot.get_guild(791528974442299412).me
await alex.edit(nick=TABLE[ret])
@phone_update.before_loop
async def before_phone_updates(self):
await self.bot.wait_until_ready()
def cog_unload(self):
self.phone_update.cancel()
async def setup(bot: "Bot"):
if bot.location == "dev":
return
await bot.add_cog(PhoneMonitor(bot))
| mit | Python |
bee9373dcf852e7af9f0f1a78dcc17a0922f96fe | Modify main.py tests to use get_anchorhub_path() | samjabrahams/anchorhub | anchorhub/tests/test_main.py | anchorhub/tests/test_main.py | """
test_main.py - Tests for main.py
main.py:
http://www.github.com/samjabrahams/anchorhub/main.py
"""
from nose.tools import *
import anchorhub.main as main
from anchorhub.util.getanchorhubpath import get_anchorhub_path
from anchorhub.compatibility import get_path_separator
def test_one():
"""
main.py: Test defaults with local directory as input.
"""
main.main([get_anchorhub_path() + get_path_separator() +
'../sample/multi-file'])
| """
test_main.py - Tests for main.py
main.py:
http://www.github.com/samjabrahams/anchorhub/main.py
"""
from nose.tools import *
import anchorhub.main as main
def test_one():
"""
main.py: Test defaults with local directory as input.
"""
main.main(['.'])
| apache-2.0 | Python |
d7e79e32240b73c8565c7e526e679b3c9bb84bae | Fix the bug in the blob admin async uploader when no local file is present | GISAElkartea/amv2,GISAElkartea/amv2,GISAElkartea/amv2 | antxetamedia/blobs/fields.py | antxetamedia/blobs/fields.py | # -*- coding: utf-8 -*-
from django.forms import fields, widgets
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext_lazy as _
from django.core.urlresolvers import reverse
from django.core.files.storage import default_storage
class UploadWidget(widgets.TextInput):
link = '<a href="{link}" target="_blank">{view}</a>'
widget = '<p>{input}<span>{link}</span><br><input type="file" onchange="{script}" value="upload"></p>'
script = "GetBlobUploader('{upload_url}', '{media_url}', '{pending}', '{view}')(this);"
class Media:
js = ['js/blob_uploader.js']
def __init__(self, attrs=None):
attrs = {} if attrs is None else attrs.copy()
attrs['readonly'] = 'readonly'
super(UploadWidget, self).__init__(attrs)
def render(self, name, value, attrs=None):
attrs = {} if attrs is None else attrs.copy()
input = super(UploadWidget, self).render(name, value, attrs=attrs)
link = getattr(value, 'url', None) if value else None
link = '' if link is None else self.link.format(link=link, view=_('Listen'))
upload_url = reverse('blobs:admin_async_blob_upload', kwargs={'filename': 'filename'})
media_url = default_storage.url('filename')
script = self.script.format(upload_url=upload_url, media_url=media_url,
pending=_('Pending…'), view=_('Listen'))
return mark_safe(self.widget.format(input=input, link=link, script=script, view=_('Listen')))
class UploadField(fields.CharField):
widget = UploadWidget
| # -*- coding: utf-8 -*-
from django.forms import fields, widgets
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext_lazy as _
from django.core.urlresolvers import reverse
from django.core.files.storage import default_storage
class UploadWidget(widgets.TextInput):
widget = ('<p>{input}<span><a href="{link}" target="_blank">{view}</a></span><br>'
'<input type="file" onchange="{script}" value="upload"></p>')
script = "GetBlobUploader('{upload_url}', '{media_url}', '{pending}', '{view}')(this);"
class Media:
js = ['js/blob_uploader.js']
def __init__(self, attrs=None):
attrs = {} if attrs is None else attrs.copy()
attrs['readonly'] = 'readonly'
super(UploadWidget, self).__init__(attrs)
def render(self, name, value, attrs=None):
attrs = attrs or {}
attrs['class'] = 'vTextField'
input = super(UploadWidget, self).render(name, value, attrs=attrs)
link = getattr(value, 'url', None) or value
upload_url = reverse('blobs:admin_async_blob_upload', kwargs={'filename': 'filename'})
media_url = default_storage.url('filename')
script = self.script.format(upload_url=upload_url, media_url=media_url,
pending=_('Pending…'), view=_('Listen'))
return mark_safe(self.widget.format(input=input, link=link, script=script, view=_('Listen')))
class UploadField(fields.CharField):
widget = UploadWidget
| agpl-3.0 | Python |
fbc9e950c42829c366d28da978adbaa42a7f9a10 | Update berepi_logger.py | jeonghoonkang/BerePi,jeonghoonkang/BerePi,jeonghoonkang/BerePi,jeonghoonkang/BerePi,jeonghoonkang/BerePi,jeonghoonkang/BerePi,jeonghoonkang/BerePi | apps/logger/berepi_logger.py | apps/logger/berepi_logger.py | # -*- coding: utf-8 -*-
# author : http://github.com/jeonghoonkang
import logging
#import logging.config
from logging.handlers import RotatingFileHandler
#logging.config.fileConfig('logging.conf')
LOG_FILENAME = "./log/berelogger.log"
logger = logging.getLogger('BereLogger')
logger.setLevel(logging.DEBUG)
handler = logging.handlers.TimedRotatingFileHandler(filename=LOG_FILENAME, when="midnight", interval=1, encoding="utf-8")
#handler = logging.handlers.RotatingFileHandler(LOG_FILENAME, mode='a', maxBytes=10, backupCount=10)
handler.formatter = logging.Formatter('%(asctime)s [%(levelname)s] %(message)s', datefmt='%Y-%m-%d %H:%M:%S')
logger.addHandler(handler)
def berelog(msg_name, value):
logger.info(msg_name + ' ==> ' + value)
def berelog(msg):
logger.info(msg)
if __name__ == "__main__":
berelog('sesnor type co2', '25')
# 'application' code
#logger.debug('debug message')
#logger.info('info message')
#logger.warn('warn message')
#logger.error('error message')
#logger.critical('critical message')
"""
if you want to use, this berepi_logger
import logging, and use berelog('*****')
"""
| # -*- coding: utf-8 -*-
# author : http://github.com/jeonghoonkang
import logging
#import logging.config
from logging.handlers import RotatingFileHandler
#logging.config.fileConfig('logging.conf')
LOG_FILENAME = "./log/berelogger.log"
logger = logging.getLogger('BereLogger')
logger.setLevel(logging.DEBUG)
handler = logging.handlers.TimedRotatingFileHandler(filename=LOG_FILENAME, when="midnight", interval=1, encoding="utf-8")
#handler = logging.handlers.RotatingFileHandler(LOG_FILENAME, mode='a', maxBytes=10, backupCount=10)
handler.formatter = logging.Formatter('%(asctime)s [%(levelname)s] %(message)s', datefmt='%Y-%m-%d %H:%M:%S')
logger.addHandler(handler)
def berelog(msg_name, value):
logger.info(msg_name + ' ==> ' + value)
if __name__ == "__main__":
berelog('sesnor type co2', '25')
# 'application' code
#logger.debug('debug message')
#logger.info('info message')
#logger.warn('warn message')
#logger.error('error message')
#logger.critical('critical message')
"""
if you want to use, this berepi_logger
import logging, and use berelog('*****')
"""
| bsd-2-clause | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.