commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
c8e09d7d8fae748731b97b5f1f77bdf1062ce539 | remove most likely unintentional string comparison with is | jplusplus/statscraper | statscraper/base_scraper.py | statscraper/base_scraper.py | # encoding: utf-8
import csv
import requests
class BaseScraper():
"""Base class from which all scrapers inherit."""
def select(self, label):
"""Select a dataset."""
def filterfunc(x):
return x.label == label
self.selection = next(filter(filterfunc, self._datasets))
return self
def list(self):
"""List all available datasets."""
return [x.label for x in self._datasets]
def fetch(self, params):
"""Make the actual request and fetch the data."""
url = self._urls[self.selection.label]
r = requests.get(url.format(**params))
if 'filetype' in params:
if params['filetype'] == 'csv':
data = csv.DictReader(r.iter_lines(decode_unicode=True),
delimiter=';')
if params['filetype'] == 'json':
data = r.json()
data = r.text
return self.selection.load(data)
| # encoding: utf-8
import csv
import requests
class BaseScraper():
"""Base class from which all scrapers inherit."""
def select(self, label):
"""Select a dataset."""
def filterfunc(x):
return x.label == label
self.selection = next(filter(filterfunc, self._datasets))
return self
def list(self):
"""List all available datasets."""
return [x.label for x in self._datasets]
def fetch(self, params):
"""Make the actual request and fetch the data."""
url = self._urls[self.selection.label]
r = requests.get(url.format(**params))
if 'filetype' in params:
if params['filetype'] is 'csv':
data = csv.DictReader(r.iter_lines(decode_unicode=True), delimiter=';')
if params['filetype'] is 'json':
data = r.json()
data = r.text
return self.selection.load(data)
| mit | Python |
4dd9d571cc99c5558cff3e8e65cfd4214ad94a62 | Test default states | AstroHuntsman/POCS,AstroHuntsman/POCS,Guokr1991/POCS,AstroHuntsman/POCS,fmin2958/POCS,panoptes/POCS,AstroHuntsman/POCS,joshwalawender/POCS,joshwalawender/POCS,Guokr1991/POCS,joshwalawender/POCS,Guokr1991/POCS,panoptes/POCS,fmin2958/POCS,fmin2958/POCS,panoptes/POCS,panoptes/POCS,Guokr1991/POCS | panoptes/test/mount/test_ioptron.py | panoptes/test/mount/test_ioptron.py | from nose.tools import raises
import panoptes
from panoptes.mount.ioptron import Mount
class TestIOptron():
@raises(AssertionError)
def test_no_config_no_commands(self):
""" Mount needs a config """
mount = Mount()
@raises(AssertionError)
def test_config_bad_commands(self):
""" Passes in a default config but blank commands, which should error """
mount = Mount(config={'mount': { 'model': 'ioptron', 'port':'/dev/ttyUSB0' } }, commands={'foo': 'bar'})
def test_config_auto_commands(self):
""" Passes in config like above, but no commands, so they should read from defaults """
mount = Mount(config={'mount': { 'model': 'ioptron', 'port':'/dev/ttyUSB0' } })
def test_default_settings(self):
""" Passes in config like above, but no commands, so they should read from defaults """
mount = Mount(config={'mount': { 'model': 'ioptron', 'port':'/dev/ttyUSB0' } })
assert mount.is_connected is False
assert mount.is_initialized is False
assert mount.is_slewing is False | from nose.tools import raises
import panoptes
from panoptes.mount.ioptron import Mount
class TestIOptron():
@raises(AssertionError)
def test_no_config_no_commands(self):
""" Mount needs a config """
mount = Mount()
@raises(AssertionError)
def test_config_bad_commands(self):
""" Passes in a default config but blank commands, which should error """
mount = Mount(config={'mount': { 'model': 'ioptron', 'port':'/dev/ttyUSB0' } }, commands={'foo': 'bar'})
def test_config_auto_commands(self):
""" Passes in config like above, but no commands, so they should read from defaults """
mount = Mount(config={'mount': { 'model': 'ioptron', 'port':'/dev/ttyUSB0' } }) | mit | Python |
94f5d70753c87250eb13a867c37779078198abed | fix typo | StellarCN/py-stellar-base | stellar_sdk/type_checked.py | stellar_sdk/type_checked.py | import os
from typing import Callable, overload
from typeguard import T_CallableOrType
from typeguard import typechecked as _typechecked
_STELLAR_SDK_ENFORCE_TYPE_CHECK_FLAG: str = "STELLAR_SDK_ENFORCE_TYPE_CHECK"
_STELLAR_SDK_ENFORCE_TYPE_CHECK: bool = os.getenv(
_STELLAR_SDK_ENFORCE_TYPE_CHECK_FLAG, "False"
).lower() in ("true", "1", "t")
@overload
def type_checked() -> Callable[[T_CallableOrType], T_CallableOrType]:
...
@overload
def type_checked(func: T_CallableOrType) -> T_CallableOrType:
...
def type_checked(
func=None,
):
return _typechecked(func=func, always=_STELLAR_SDK_ENFORCE_TYPE_CHECK)
| import os
from typing import Callable, overload
from typeguard import T_CallableOrType
from typeguard import typechecked as _typechecked
_STELLAR_SDK_ENFORCE_TYPE_CHECK_FLAG: str = "STELLAR_SDK_ENFORCE_TYPE_CHECK_FLAG"
_STELLAR_SDK_ENFORCE_TYPE_CHECK: bool = os.getenv(
_STELLAR_SDK_ENFORCE_TYPE_CHECK_FLAG, "False"
).lower() in ("true", "1", "t")
@overload
def type_checked() -> Callable[[T_CallableOrType], T_CallableOrType]:
...
@overload
def type_checked(func: T_CallableOrType) -> T_CallableOrType:
...
def type_checked(
func=None,
):
return _typechecked(func=func, always=_STELLAR_SDK_ENFORCE_TYPE_CHECK)
| apache-2.0 | Python |
8fb340577f89d8eb62425f324307cdb61e4beb6a | Rephrase note on `config.ini` | oemof/oemof.db | oemof/db/__init__.py | oemof/db/__init__.py | from configparser import NoOptionError as option, NoSectionError as section
from sqlalchemy import create_engine
import keyring
from . import config as cfg
def engine(db_section="postGIS"):
"""Creates engine object for database access
If keyword argument `db_section` is used it requires an existing config.ini
file at the right location.
Parameters
----------
db_section : str, optional
Section (in config.ini) of targeted database containing connection
details that are used to set up connection
Returns
-------
engine : :class:`sqlalchemy.engine.Engine`
Engine for sqlalchemy
Notes
-----
For documentation on config.ini see the README section on
:ref:`configuring <readme#configuration>` :mod:`oemof.db`.
"""
pw = keyring.get_password(cfg.get(db_section, "database"),
cfg.get(db_section, "username"))
if pw is None:
try:
pw = cfg.get(db_section, "pw")
except option:
print("Unable to find the database password in " +
"the oemof config or keyring." +
"\nExiting.")
exit(-1)
except section:
print("Unable to find the 'postGIS' section in oemof's config." +
"\nExiting.")
exit(-1)
return create_engine(
"postgresql+psycopg2://{user}:{passwd}@{host}:{port}/{db}".format(
user=cfg.get(db_section, "username"),
passwd=pw,
host=cfg.get(db_section, "host"),
db=cfg.get(db_section, "database"),
port=int(cfg.get(db_section, "port"))))
def connection(db_section="postGIS"):
"""Database connection method of sqlalchemy engine object
This function purely calls the `connect()` method of the engine object
returned by :py:func:`engine`.
For description of parameters see :py:func:`engine`.
"""
return engine(db_section=db_section).connect()
| from configparser import NoOptionError as option, NoSectionError as section
from sqlalchemy import create_engine
import keyring
from . import config as cfg
def engine(db_section="postGIS"):
"""Creates engine object for database access
If keyword argument `db_section` is used it requires an existing config.ini
file at the right location.
Parameters
----------
db_section : str, optional
Section (in config.ini) of targeted database containing connection
details that are used to set up connection
Returns
-------
engine : :class:`sqlalchemy.engine.Engine`
Engine for sqlalchemy
Notes
-----
A description of how the config.ini is given within itself, see
:ref:`configuration <readme#configuration>`.
"""
pw = keyring.get_password(cfg.get(db_section, "database"),
cfg.get(db_section, "username"))
if pw is None:
try:
pw = cfg.get(db_section, "pw")
except option:
print("Unable to find the database password in " +
"the oemof config or keyring." +
"\nExiting.")
exit(-1)
except section:
print("Unable to find the 'postGIS' section in oemof's config." +
"\nExiting.")
exit(-1)
return create_engine(
"postgresql+psycopg2://{user}:{passwd}@{host}:{port}/{db}".format(
user=cfg.get(db_section, "username"),
passwd=pw,
host=cfg.get(db_section, "host"),
db=cfg.get(db_section, "database"),
port=int(cfg.get(db_section, "port"))))
def connection(db_section="postGIS"):
"""Database connection method of sqlalchemy engine object
This function purely calls the `connect()` method of the engine object
returned by :py:func:`engine`.
For description of parameters see :py:func:`engine`.
"""
return engine(db_section=db_section).connect()
| mit | Python |
99dad2a9f46480d6fdf1994b63339200ad96bca2 | Remove build links | dbinetti/barberscore,barberscore/barberscore-api,barberscore/barberscore-api,dbinetti/barberscore,barberscore/barberscore-api,dbinetti/barberscore-django,barberscore/barberscore-api,dbinetti/barberscore-django | project/apps/website/urls.py | project/apps/website/urls.py | from django.conf.urls import url
from django.conf.urls.static import static
from django.conf import settings
from . import views
urlpatterns = [
url(r'^$', views.home, name='home'),
url(r'^person/$', views.persons, name='persons'),
url(r'^chorus/$', views.choruses, name='choruses'),
url(r'^quartet/$', views.quartets, name='quartets'),
url(r'^song/$', views.songs, name='songs'),
# url(r'^build-chorus/$', views.build_chorus, name='build-chorus'),
# url(r'^build-quartet/$', views.build_quartet, name='build-quartet'),
# url(r'^build-song/$', views.build_song, name='build-song'),
# url(r'^build-person/$', views.build_person, name='build-person'),
url(r'^merge-groups/(?P<parent_id>[a-zA-Z0-9-]+)/(?P<child_id>[a-zA-Z0-9-]+)/$', views.merge_groups, name='merge-groups'),
url(r'^remove-group/(?P<parent_id>[a-zA-Z0-9-]+)/$', views.remove_group, name='remove-group'),
url(r'^merge-songs/(?P<parent_id>[a-zA-Z0-9-]+)/(?P<child_id>[a-zA-Z0-9-]+)/$', views.merge_songs, name='merge-songs'),
url(r'^remove-song/(?P<parent_id>[a-zA-Z0-9-]+)/$', views.remove_song, name='remove-song'),
url(r'^merge-persons/(?P<parent_id>[a-zA-Z0-9-]+)/(?P<child_id>[a-zA-Z0-9-]+)/$', views.merge_persons, name='merge-persons'),
url(r'^remove-person/(?P<parent_id>[a-zA-Z0-9-]+)/$', views.remove_person, name='remove-person'),
]
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| from django.conf.urls import url
from django.conf.urls.static import static
from django.conf import settings
from . import views
urlpatterns = [
url(r'^$', views.home, name='home'),
url(r'^person/$', views.persons, name='persons'),
url(r'^chorus/$', views.choruses, name='choruses'),
url(r'^quartet/$', views.quartets, name='quartets'),
url(r'^song/$', views.songs, name='songs'),
url(r'^build-chorus/$', views.build_chorus, name='build-chorus'),
url(r'^build-quartet/$', views.build_quartet, name='build-quartet'),
url(r'^build-song/$', views.build_song, name='build-song'),
url(r'^build-person/$', views.build_person, name='build-person'),
url(r'^merge-groups/(?P<parent_id>[a-zA-Z0-9-]+)/(?P<child_id>[a-zA-Z0-9-]+)/$', views.merge_groups, name='merge-groups'),
url(r'^remove-group/(?P<parent_id>[a-zA-Z0-9-]+)/$', views.remove_group, name='remove-group'),
url(r'^merge-songs/(?P<parent_id>[a-zA-Z0-9-]+)/(?P<child_id>[a-zA-Z0-9-]+)/$', views.merge_songs, name='merge-songs'),
url(r'^remove-song/(?P<parent_id>[a-zA-Z0-9-]+)/$', views.remove_song, name='remove-song'),
url(r'^merge-persons/(?P<parent_id>[a-zA-Z0-9-]+)/(?P<child_id>[a-zA-Z0-9-]+)/$', views.merge_persons, name='merge-persons'),
url(r'^remove-person/(?P<parent_id>[a-zA-Z0-9-]+)/$', views.remove_person, name='remove-person'),
]
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| bsd-2-clause | Python |
32a8eb430b235349ffece6504e3fd8734943fca0 | Update settings.py | ResearchComputing/oide-simpleupload,ResearchComputing/oide-simpleupload,ResearchComputing/oide-simpleupload | oidesupl/settings.py | oidesupl/settings.py | import os
APP_DIRECTORY = os.path.dirname(os.path.abspath(__file__))
APP_SPECIFICATION = {
'APP_DESCRIPTION': {
'name': 'Upload',
'link': '/#/supl',
'description': 'Simple HTTP Upload'
},
'NG_MODULE_NAME': 'supl',
'NG_MODULE_STYLESHEETS': (
'supl.css',
),
'NG_MODULE_SCRIPTS': (
'bower_components/es5-shim/es5-shim.min.js',
'bower_components/angular-file-upload/dist/angular-file-upload.min.js',
'supl.js',
),
}
| import os
APP_DIRECTORY = os.path.dirname(os.path.abspath(__file__))
APP_SPECIFICATION = {
'APP_DESCRIPTION': {
'name': 'Upload',
'link': '/#/supl',
'description': 'Simple HTTP Upload'
},
'NG_MODULE_NAME': 'supl',
'NG_MODULE_STYLESHEETS': (
'supl.css',
),
'NG_MODULE_SCRIPTS': (
'bower_components/es5-shim/es5-shim.min.js',
'bower_components/angular-file-upload/angular-file-upload.min.js',
'supl.js',
),
}
| agpl-3.0 | Python |
a2bb91243a9c7259faee4b3ee792a8c20e5cd9f4 | Fix NoAuthorizationError (#19355) | zhouyao1994/incubator-superset,airbnb/caravel,airbnb/caravel,zhouyao1994/incubator-superset,airbnb/caravel,zhouyao1994/incubator-superset,zhouyao1994/incubator-superset,zhouyao1994/incubator-superset,airbnb/caravel | superset/views/users/api.py | superset/views/users/api.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from flask import g, Response
from flask_appbuilder.api import BaseApi, expose, safe
from flask_jwt_extended.exceptions import NoAuthorizationError
from .schemas import UserResponseSchema
user_response_schema = UserResponseSchema()
class CurrentUserRestApi(BaseApi):
""" An api to get information about the current user """
resource_name = "me"
openapi_spec_tag = "Current User"
openapi_spec_component_schemas = (UserResponseSchema,)
@expose("/", methods=["GET"])
@safe
def get_me(self) -> Response:
"""Get the user object corresponding to the agent making the request
---
get:
description: >-
Returns the user object corresponding to the agent making the request,
or returns a 401 error if the user is unauthenticated.
responses:
200:
description: The current user
content:
application/json:
schema:
type: object
properties:
result:
$ref: '#/components/schemas/UserResponseSchema'
401:
$ref: '#/components/responses/401'
"""
try:
if g.user is None or g.user.is_anonymous:
return self.response_401()
except NoAuthorizationError:
return self.response_401()
return self.response(200, result=user_response_schema.dump(g.user))
| # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from flask import g, Response
from flask_appbuilder.api import BaseApi, expose, safe
from .schemas import UserResponseSchema
user_response_schema = UserResponseSchema()
class CurrentUserRestApi(BaseApi):
""" An api to get information about the current user """
resource_name = "me"
openapi_spec_tag = "Current User"
openapi_spec_component_schemas = (UserResponseSchema,)
@expose("/", methods=["GET"])
@safe
def get_me(self) -> Response:
"""Get the user object corresponding to the agent making the request
---
get:
description: >-
Returns the user object corresponding to the agent making the request,
or returns a 401 error if the user is unauthenticated.
responses:
200:
description: The current user
content:
application/json:
schema:
type: object
properties:
result:
$ref: '#/components/schemas/UserResponseSchema'
401:
$ref: '#/components/responses/401'
"""
if g.user is None or g.user.is_anonymous:
return self.response_401()
return self.response(200, result=user_response_schema.dump(g.user))
| apache-2.0 | Python |
e92f10706aaf2249853b4407c8b803feb177e530 | Add in coyright | oldarmyc/pitchfork,rackerlabs/pitchfork,oldarmyc/pitchfork,rackerlabs/pitchfork,oldarmyc/pitchfork,rackerlabs/pitchfork | pitchfork/config/config.example.py | pitchfork/config/config.example.py | # Copyright 2014 Dave Kludt
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
MONGO_HOST = 'localhost'
MONGO_PORT = 27017
MONGO_KWARGS = {'tz_aware': True}
MONGO_DATABASE = 'pitchfork'
ADMIN = 'cloud_username'
ADMIN_NAME = 'Admin Full Name'
SECRET_KEY = 'secret_key_for_cookie'
LOG_PATH = os.path.join(os.path.dirname(__file__), 'logs/devel.log')
| # Application config file
import os
MONGO_HOST = 'localhost'
MONGO_PORT = 27017
MONGO_KWARGS = {'tz_aware': True}
MONGO_DATABASE = 'pitchfork'
ADMIN = 'cloud_username'
ADMIN_NAME = 'Admin Full Name'
SECRET_KEY = 'secret_key_for_cookie'
LOG_PATH = os.path.join(os.path.dirname(__file__), 'logs/devel.log')
| apache-2.0 | Python |
5c68ed4ac5ca97a5836c1c3b9a65591af7781315 | Fix ikasan unittest | kfdm/promgen,kfdm/promgen,kfdm/promgen,kfdm/promgen | promgen/tests/test_ikasan.py | promgen/tests/test_ikasan.py | import json
from unittest import mock
from django.contrib.contenttypes.models import ContentType
from django.test import TestCase, override_settings
from django.urls import reverse
from promgen import models
from promgen.sender.ikasan import SenderIkasan
from promgen.tests import TEST_ALERT, TEST_SETTINGS
_MESSAGE = '''node_down foo-BETA testhost.localhost:9100 node resolved
description: testhost.localhost:9100 of job node has been down for more than 5 minutes.
summary: Instance testhost.localhost:9100 down
Prometheus: https://monitoring.promehteus.localhost/graph#%5B%7B%22expr%22%3A%22up%20%3D%3D%200%22%2C%22tab%22%3A0%7D%5D
Alert Manager: https://am.promehteus.localhost'''
class IkasanTest(TestCase):
@mock.patch('django.db.models.signals.post_save', mock.Mock())
def setUp(self):
self.shard = models.Shard.objects.create(name='Shard 1')
self.service = models.Service.objects.create(name='Service 1', shard=self.shard)
self.project = models.Project.objects.create(name='Project 1', service=self.service)
project_type = ContentType.objects.get_for_model(self.project)
self.sender = models.Sender.objects.create(
object_id=self.project.id,
content_type_id=project_type.id,
sender=SenderIkasan.__module__,
value='#',
)
@override_settings(PROMGEN=TEST_SETTINGS)
@override_settings(CELERY_TASK_ALWAYS_EAGER=True)
@mock.patch('promgen.util.post')
def test_ikasan(self, mock_post):
self.client.post(reverse('alert'),
data=json.dumps(TEST_ALERT),
content_type='application/json'
)
mock_post.assert_has_calls([
mock.call(
'http://ikasan.example', {
'color': 'green',
'channel': '#',
'message_format': 'text',
'message': _MESSAGE}
)
])
| import json
from unittest import mock
from django.contrib.contenttypes.models import ContentType
from django.test import TestCase, override_settings
from django.urls import reverse
from promgen import models
from promgen.sender.ikasan import SenderIkasan
from promgen.tests import TEST_ALERT, TEST_SETTINGS
_MESSAGE = '''node_down foo-BETA testhost.localhost:9100 node resolved
description: testhost.localhost:9100 of job node has been down for more than 5 minutes.
summary: Instance testhost.localhost:9100 down
Prometheus: https://monitoring.promehteus.localhost/graph#%5B%7B%22expr%22%3A%22up%20%3D%3D%200%22%2C%22tab%22%3A0%7D%5D
Alert Manager: https://am.promehteus.localhost'''
class IkasanTest(TestCase):
@mock.patch('django.db.models.signals.post_save', mock.Mock())
def setUp(self):
self.shard = models.Shard.objects.create(name='Shard 1')
self.service = models.Service.objects.create(name='Service 1', shard=self.shard)
self.project = models.Project.objects.create(name='Project 1', service=self.service)
project_type = ContentType.objects.get_for_model(self.project)
self.sender = models.Sender.objects.create(
object_id=self.project.id,
content_type_id=project_type.id,
sender=SenderIkasan.__module__,
value='#',
)
@override_settings(PROMGEN=TEST_SETTINGS)
@override_settings(CELERY_TASK_ALWAYS_EAGER=True)
@mock.patch('promgen.util.post')
def test_ikasan(self, mock_post):
self.client.post(reverse('alert'),
data=json.dumps(TEST_ALERT),
content_type='application/json'
)
mock_post.assert_has_calls([
mock.call(
'http://ikasan.example', {
'color': 'green',
'channel': '#',
'message': _MESSAGE}
)
])
| mit | Python |
72eebc13ed443a3108d0fad69b20bcd0dbf13d33 | Format catimg --help correctly | asmeurer/catimg | catimg/__main__.py | catimg/__main__.py | import sys
import argparse
from iterm2_tools import display_image_file
from .imgur import update_img_cache, get_random_image
from . import __version__, __doc__
def main():
parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument('-f', '--files', nargs='+', metavar='IMAGES', default=(),
help='Files to display')
parser.add_argument('-v', '--verbose', action='store_true', help="Show verbose output")
parser.add_argument('-V', '--version', action='version', version='catimg ' + __version__)
parser.add_argument('--update-cache', action='store_true', help="Update the image cache and exit")
parser.add_argument('--no-download', action='store_false', dest='download', default=True, help="Don't download anything from the internet")
parser.add_argument('--no-delete', action='store_false', dest='delete',
default=True, help="Don't delete old images")
args = parser.parse_args()
for img in args.files:
display_image_file(img)
print()
if not args.files:
if args.update_cache:
update_img_cache(verbose=args.verbose)
return
image = get_random_image(verbose=args.verbose)
if not image and args.download:
print("No cat images found, downloading...")
update_img_cache(verbose=args.verbose)
image = get_random_image(delete=args.delete, verbose=args.verbose)
if image:
display_image_file(image)
print()
if __name__ == '__main__':
sys.exit(main())
| import sys
import argparse
from iterm2_tools import display_image_file
from .imgur import update_img_cache, get_random_image
from . import __version__, __doc__
def main():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('-f', '--files', nargs='+', metavar='IMAGES', default=(),
help='Files to display')
parser.add_argument('-v', '--verbose', action='store_true', help="Show verbose output")
parser.add_argument('-V', '--version', action='version', version='catimg ' + __version__)
parser.add_argument('--update-cache', action='store_true', help="Update the image cache and exit")
parser.add_argument('--no-download', action='store_false', dest='download', default=True, help="Don't download anything from the internet")
parser.add_argument('--no-delete', action='store_false', dest='delete',
default=True, help="Don't delete old images")
args = parser.parse_args()
for img in args.files:
display_image_file(img)
print()
if not args.files:
if args.update_cache:
update_img_cache(verbose=args.verbose)
return
image = get_random_image(verbose=args.verbose)
if not image and args.download:
print("No cat images found, downloading...")
update_img_cache(verbose=args.verbose)
image = get_random_image(delete=args.delete, verbose=args.verbose)
if image:
display_image_file(image)
print()
if __name__ == '__main__':
sys.exit(main())
| mit | Python |
6a3a28ae45915571fbfcc594789c4d41e9750633 | bump version number to rc5 | jepegit/cellpy,jepegit/cellpy | cellpy/_version.py | cellpy/_version.py | version_info = (0, 3, 0, "rc5")
__version__ = '.'.join(map(str, version_info))
| version_info = (0, 3, 0, "rc4")
__version__ = '.'.join(map(str, version_info))
| mit | Python |
89cb89b521771cc217a1f26ee21ba978c0a55822 | bump version to 0.9.66 | craigahobbs/chisel | chisel/__init__.py | chisel/__init__.py | # Copyright (C) 2012-2017 Craig Hobbs
#
# Licensed under the MIT License
# https://github.com/craigahobbs/chisel/blob/master/LICENSE
__version__ = '0.9.66'
from .action import \
action, \
Action, \
ActionError
from .app import \
Application, \
Context
from .app_defs import \
Environ, \
StartResponse
from .doc import \
DocAction, \
DocPage, \
Element
from .model import \
ValidationError, \
ValidationMode
from .request import \
request, \
Request
from .spec import \
SpecParser, \
SpecParserError
from .url import \
decode_query_string, \
encode_query_string
from .util import \
JSONEncoder, \
TZLOCAL, \
TZUTC
| # Copyright (C) 2012-2017 Craig Hobbs
#
# Licensed under the MIT License
# https://github.com/craigahobbs/chisel/blob/master/LICENSE
__version__ = '0.9.65'
from .action import \
action, \
Action, \
ActionError
from .app import \
Application, \
Context
from .app_defs import \
Environ, \
StartResponse
from .doc import \
DocAction, \
DocPage, \
Element
from .model import \
ValidationError, \
ValidationMode
from .request import \
request, \
Request
from .spec import \
SpecParser, \
SpecParserError
from .url import \
decode_query_string, \
encode_query_string
from .util import \
JSONEncoder, \
TZLOCAL, \
TZUTC
| mit | Python |
784a5264d9247623ff8c942f3a56f3843e57659c | Increase time limits by 10x to help race condition | openai/universe,rht/universe | universe/wrappers/tests/test_time_limit.py | universe/wrappers/tests/test_time_limit.py | import gym
import time
import universe
from gym.envs import register
from universe import wrappers
register(
id='test.SecondsLimitDummyVNCEnv-v0',
entry_point='universe.envs:DummyVNCEnv',
tags={
'vnc': True,
'wrapper_config.TimeLimit.max_episode_seconds': 0.1
}
)
register(
id='test.StepsLimitDummyVNCEnv-v0',
entry_point='universe.envs:DummyVNCEnv',
tags={
'vnc': True,
'wrapper_config.TimeLimit.max_episode_steps': 2
}
)
def test_steps_limit_restart():
env = gym.make('test.StepsLimitDummyVNCEnv-v0')
env = wrappers.TimeLimit(env)
env.configure(_n=1)
env.reset()
assert env._max_episode_seconds == None
assert env._max_episode_steps == 2
# Episode has started
_, _, done, info = env.step([[]])
assert done == [False]
# Limit reached, now we get a done signal and the env resets itself
_, _, done, info = env.step([[]])
assert done == [True]
assert env._elapsed_steps == 0
def test_steps_limit_restart_unused_when_not_wrapped():
env = gym.make('test.StepsLimitDummyVNCEnv-v0')
env.configure(_n=1)
env.reset()
for i in range(10):
_, _, done, info = env.step([[]])
assert done == [False]
def test_seconds_limit_restart():
env = gym.make('test.SecondsLimitDummyVNCEnv-v0')
env = wrappers.TimeLimit(env)
env.configure(_n=1)
env.reset()
assert env._max_episode_seconds == 0.1
assert env._max_episode_steps == None
# Episode has started
_, _, done, info = env.step([[]])
assert done == [False]
# Not enough time has passed
_, _, done, info = env.step([[]])
assert done == [False]
time.sleep(0.2)
# Limit reached, now we get a done signal and the env resets itself
_, _, done, info = env.step([[]])
assert done == [True]
def test_default_time_limit():
# We need an env without a default limit
register(
id='test.NoLimitDummyVNCEnv-v0',
entry_point='universe.envs:DummyVNCEnv',
tags={
'vnc': True,
},
)
env = gym.make('test.NoLimitDummyVNCEnv-v0')
env = wrappers.TimeLimit(env)
env.configure(_n=1)
env.reset()
assert env._max_episode_seconds == wrappers.time_limit.DEFAULT_MAX_EPISODE_SECONDS
assert env._max_episode_steps == None
| import gym
import time
import universe
from gym.envs import register
from universe import wrappers
register(
id='test.SecondsLimitDummyVNCEnv-v0',
entry_point='universe.envs:DummyVNCEnv',
tags={
'vnc': True,
'wrapper_config.TimeLimit.max_episode_seconds': 0.01
}
)
register(
id='test.StepsLimitDummyVNCEnv-v0',
entry_point='universe.envs:DummyVNCEnv',
tags={
'vnc': True,
'wrapper_config.TimeLimit.max_episode_steps': 2
}
)
def test_steps_limit_restart():
env = gym.make('test.StepsLimitDummyVNCEnv-v0')
env = wrappers.TimeLimit(env)
env.configure(_n=1)
env.reset()
assert env._max_episode_seconds == None
assert env._max_episode_steps == 2
# Episode has started
_, _, done, info = env.step([[]])
assert done == [False]
# Limit reached, now we get a done signal and the env resets itself
_, _, done, info = env.step([[]])
assert done == [True]
assert env._elapsed_steps == 0
def test_steps_limit_restart_unused_when_not_wrapped():
env = gym.make('test.StepsLimitDummyVNCEnv-v0')
env.configure(_n=1)
env.reset()
for i in range(10):
_, _, done, info = env.step([[]])
assert done == [False]
def test_seconds_limit_restart():
env = gym.make('test.SecondsLimitDummyVNCEnv-v0')
env = wrappers.TimeLimit(env)
env.configure(_n=1)
env.reset()
assert env._max_episode_seconds == 0.01
assert env._max_episode_steps == None
# Episode has started
_, _, done, info = env.step([[]])
assert done == [False]
# Not enough time has passed
_, _, done, info = env.step([[]])
assert done == [False]
time.sleep(0.02)
# Limit reached, now we get a done signal and the env resets itself
_, _, done, info = env.step([[]])
assert done == [True]
def test_default_time_limit():
# We need an env without a default limit
register(
id='test.NoLimitDummyVNCEnv-v0',
entry_point='universe.envs:DummyVNCEnv',
tags={
'vnc': True,
},
)
env = gym.make('test.NoLimitDummyVNCEnv-v0')
env = wrappers.TimeLimit(env)
env.configure(_n=1)
env.reset()
assert env._max_episode_seconds == wrappers.time_limit.DEFAULT_MAX_EPISODE_SECONDS
assert env._max_episode_steps == None
| mit | Python |
48c0e72d7df002c3e6548b8453a0781e7efacbdc | Revert to original try/except style, this time with correct Exception! | laterpay/djtranslationchecker | djtranslationchecker/management/commands/translationchecker.py | djtranslationchecker/management/commands/translationchecker.py | from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from django.db.models import get_app_paths
import fnmatch
import importlib
import subprocess
import os
class Command(BaseCommand):
help = "Checks your message files for missing or fuzzy translations"
def handle(self, *args, **kwargs):
errors = []
for po_filepath in get_po_filepaths():
self.stdout.write("Checking %s" % po_filepath)
out = check_po_for_fuzzy_translations(po_filepath)
if out:
errors.append("Fuzzy translation(s) found in %s" % po_filepath)
out = check_po_for_untranslated(po_filepath)
if out:
errors.append("Untranslated message(s) found in %s" % po_filepath)
if errors:
raise CommandError('\n' + '\n'.join(errors)) # This is the best way I've found of setting management command exit code to nonzero
def check_po_for_fuzzy_translations(po_filepath):
return check_po(["msgattrib", "--only-fuzzy"], po_filepath)
def check_po_for_untranslated(po_filepath):
return check_po(["msgattrib", "--untranslated"], po_filepath)
def check_po(command, po_filepath):
"""
:: [String] -> FilePath -> Maybe String
Until I do some funky parsing, treat this as returning Maybe OpaqueErrorType
"""
p = subprocess.Popen(command + [po_filepath], stdout=subprocess.PIPE)
p.wait()
output = p.stdout.read()
p.stdout.close()
if len(output):
return output
else:
return None
def get_po_filepaths():
try:
apps = settings.PROJECT_APPS
except AttributeError:
apps = settings.INSTALLED_APPS
pos = []
for app_name in apps:
path = os.path.dirname(importlib.import_module(app_name).__file__)
for root, dirnames, filenames in os.walk(path):
for filename in fnmatch.filter(filenames, '*.po'):
pos.append(os.path.join(root, filename))
return pos
| from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from django.db.models import get_app_paths
import fnmatch
import importlib
import subprocess
import os
class Command(BaseCommand):
help = "Checks your message files for missing or fuzzy translations"
def handle(self, *args, **kwargs):
errors = []
for po_filepath in get_po_filepaths():
self.stdout.write("Checking %s" % po_filepath)
out = check_po_for_fuzzy_translations(po_filepath)
if out:
errors.append("Fuzzy translation(s) found in %s" % po_filepath)
out = check_po_for_untranslated(po_filepath)
if out:
errors.append("Untranslated message(s) found in %s" % po_filepath)
if errors:
raise CommandError('\n' + '\n'.join(errors)) # This is the best way I've found of setting management command exit code to nonzero
def check_po_for_fuzzy_translations(po_filepath):
return check_po(["msgattrib", "--only-fuzzy"], po_filepath)
def check_po_for_untranslated(po_filepath):
return check_po(["msgattrib", "--untranslated"], po_filepath)
def check_po(command, po_filepath):
"""
:: [String] -> FilePath -> Maybe String
Until I do some funky parsing, treat this as returning Maybe OpaqueErrorType
"""
p = subprocess.Popen(command + [po_filepath], stdout=subprocess.PIPE)
p.wait()
output = p.stdout.read()
p.stdout.close()
if len(output):
return output
else:
return None
def get_po_filepaths():
apps = getattr(settings, 'PROJECT_APPS', settings.INSTALLED_APPS)
pos = []
for app_name in apps:
path = os.path.dirname(importlib.import_module(app_name).__file__)
for root, dirnames, filenames in os.walk(path):
for filename in fnmatch.filter(filenames, '*.po'):
pos.append(os.path.join(root, filename))
return pos
| mit | Python |
e618764e86d530b961193a615c47b25a49fac927 | Fix bug | OpenNewsLabs/pybossa,geotagx/pybossa,inteligencia-coletiva-lsd/pybossa,inteligencia-coletiva-lsd/pybossa,PyBossa/pybossa,stefanhahmann/pybossa,Scifabric/pybossa,jean/pybossa,stefanhahmann/pybossa,geotagx/pybossa,jean/pybossa,PyBossa/pybossa,OpenNewsLabs/pybossa,Scifabric/pybossa | pybossa/sentinel/__init__.py | pybossa/sentinel/__init__.py | from redis import sentinel
class Sentinel(object):
def __init__(self, app=None):
self.app = app
if app is not None: # pragma: no cover
self.init_app(app)
def init_app(self, app):
self.connection = sentinel.Sentinel(app.config['REDIS_SENTINEL'],
socket_timeout=0.1)
redis_db = app.config.get('REDIS_DB') or 0
self.master = self.connection.master_for('mymaster', db=redis_db)
self.slave = self.connection.slave_for('mymaster', db=redis_db)
| from redis import sentinel
class Sentinel(object):
def __init__(self, app=None):
self.app = app
if app is not None: # pragma: no cover
self.init_app(app)
def init_app(self, app):
self.connection = sentinel.Sentinel(app.config['REDIS_SENTINEL'],
socket_timeout=0.1)
redis_db = app.config['REDIS_DB'] or 0
self.master = self.connection.master_for('mymaster', db=redis_db)
self.slave = self.connection.slave_for('mymaster', db=redis_db)
| agpl-3.0 | Python |
4eea5c3ce0caa121ac6b12ac4107f4650f74679d | Fix warnings. | pyflakes/pyflakes,PyCQA/pyflakes,jayvdb/pyflakes,bitglue/pyflakes,nikolas/pyflakes,asmeurer/pyflakes,epsy/pyflakes | pyflakes/scripts/pyflakes.py | pyflakes/scripts/pyflakes.py | """
Implementation of the command-line I{pyflakes} tool.
"""
from __future__ import absolute_import
# For backward compatibility
__all__ = ['check', 'checkPath', 'checkRecursive', 'iterSourceCode', 'main']
from pyflakes.api import check, checkPath, checkRecursive, iterSourceCode, main
| """
Implementation of the command-line I{pyflakes} tool.
"""
from __future__ import absolute_import
# For backward compatibility
from pyflakes.api import check, checkPath, checkRecursive, iterSourceCode, main
| mit | Python |
8bac5a68b9668092e7c1316f6b2727e668e5e38d | Bump version | thombashi/pytablereader,thombashi/pytablereader,thombashi/pytablereader | pytablereader/__version__.py | pytablereader/__version__.py | # encoding: utf-8
from datetime import datetime
__author__ = "Tsuyoshi Hombashi"
__copyright__ = "Copyright 2016-{}, {}".format(datetime.now().year, __author__)
__license__ = "MIT License"
__version__ = "0.24.2"
__maintainer__ = __author__
__email__ = "tsuyoshi.hombashi@gmail.com"
| # encoding: utf-8
from datetime import datetime
__author__ = "Tsuyoshi Hombashi"
__copyright__ = "Copyright 2016-{}, {}".format(datetime.now().year, __author__)
__license__ = "MIT License"
__version__ = "0.24.1"
__maintainer__ = __author__
__email__ = "tsuyoshi.hombashi@gmail.com"
| mit | Python |
bcf64f2394465f1002ace06e7c02074a91a70a9a | Bump version | thombashi/pytablewriter | pytablewriter/__version__.py | pytablewriter/__version__.py | __author__ = "Tsuyoshi Hombashi"
__copyright__ = "Copyright 2016, {}".format(__author__)
__license__ = "MIT License"
__version__ = "0.59.0"
__maintainer__ = __author__
__email__ = "tsuyoshi.hombashi@gmail.com"
| __author__ = "Tsuyoshi Hombashi"
__copyright__ = "Copyright 2016, {}".format(__author__)
__license__ = "MIT License"
__version__ = "0.58.0"
__maintainer__ = __author__
__email__ = "tsuyoshi.hombashi@gmail.com"
| mit | Python |
dd0845f1ff3dbd6754867d3f61ffc2be5d019e4b | Fix #40: Use shop.redirect correctly | armicron/plata,armicron/plata,armicron/plata | plata/payment/modules/cod.py | plata/payment/modules/cod.py | """
Payment module for cash on delivery handling
Automatically completes every order passed.
"""
from datetime import datetime
import logging
from django.shortcuts import redirect
from django.utils.translation import ugettext_lazy as _
from plata.payment.modules.base import ProcessorBase
from plata.product.stock.models import StockTransaction
from plata.shop.models import OrderPayment
logger = logging.getLogger('plata.payment.cod')
class PaymentProcessor(ProcessorBase):
key = 'cod'
default_name = _('Cash on delivery')
def process_order_confirmed(self, request, order):
if not order.balance_remaining:
return self.already_paid(order)
logger.info('Processing order %s using COD' % order)
payment = self.create_pending_payment(order)
payment.status = OrderPayment.AUTHORIZED
payment.authorized = datetime.now()
payment.save()
order = order.reload()
self.create_transactions(order, _('sale'),
type=StockTransaction.SALE, negative=True, payment=payment)
self.order_paid(order, payment=payment)
return self.shop.redirect('plata_order_success')
| """
Payment module for cash on delivery handling
Automatically completes every order passed.
"""
from datetime import datetime
import logging
from django.shortcuts import redirect
from django.utils.translation import ugettext_lazy as _
from plata.payment.modules.base import ProcessorBase
from plata.product.stock.models import StockTransaction
from plata.shop.models import OrderPayment
logger = logging.getLogger('plata.payment.cod')
class PaymentProcessor(ProcessorBase):
key = 'cod'
default_name = _('Cash on delivery')
def process_order_confirmed(self, request, order):
if not order.balance_remaining:
return self.already_paid(order)
logger.info('Processing order %s using COD' % order)
payment = self.create_pending_payment(order)
payment.status = OrderPayment.AUTHORIZED
payment.authorized = datetime.now()
payment.save()
order = order.reload()
self.create_transactions(order, _('sale'),
type=StockTransaction.SALE, negative=True, payment=payment)
self.order_paid(order, payment=payment)
return self.redirect('plata_order_success')
| bsd-3-clause | Python |
ba7da40c8893a35d9f9b807ce88d7c79eb53e256 | Test fix | evernym/plenum,evernym/zeno | plenum/test/node/test_api.py | plenum/test/node/test_api.py | import pytest
from common.exceptions import LogicError
from plenum.common.constants import TXN_TYPE, TXN_PAYLOAD, TXN_PAYLOAD_METADATA, TXN_PAYLOAD_METADATA_DIGEST, \
TXN_PAYLOAD_TYPE, TXN_PAYLOAD_DATA, TXN_PAYLOAD_METADATA_REQ_ID, TXN_METADATA, TXN_METADATA_SEQ_NO, \
TXN_PAYLOAD_METADATA_PAYLOAD_DIGEST
from plenum.common.request import Request
def test_on_view_change_complete_fails(test_node):
with pytest.raises(LogicError) as excinfo:
test_node.on_view_change_complete()
assert "Not all replicas have primaries" in str(excinfo.value)
def test_ledger_id_for_request_fails(test_node):
for r in (Request(operation={}), Request(operation={TXN_TYPE: None})):
with pytest.raises(ValueError) as excinfo:
test_node.ledger_id_for_request(r)
assert "TXN_TYPE is not defined for request" in str(excinfo.value)
def test_seq_no_db_updates(test_node):
oldSize = test_node.seqNoDB.size
test_txn = {
TXN_PAYLOAD: {
TXN_PAYLOAD_TYPE: "2",
TXN_PAYLOAD_METADATA: {
TXN_PAYLOAD_METADATA_DIGEST: "11222",
TXN_PAYLOAD_METADATA_PAYLOAD_DIGEST: "112222",
},
TXN_PAYLOAD_DATA: {}
}
}
test_node.postTxnFromCatchupAddedToLedger(2, test_txn, False)
assert oldSize == test_node.seqNoDB.size
def test_seq_no_db_updates_by_default(test_node):
oldSize = test_node.seqNoDB.size
test_txn = {
TXN_PAYLOAD: {
TXN_PAYLOAD_TYPE: "2",
TXN_PAYLOAD_METADATA: {
TXN_PAYLOAD_METADATA_DIGEST: "11222",
TXN_PAYLOAD_METADATA_PAYLOAD_DIGEST: "112222",
TXN_PAYLOAD_METADATA_REQ_ID: "12"
},
TXN_PAYLOAD_DATA: {}
},
TXN_METADATA: {
TXN_METADATA_SEQ_NO: "1"
}
}
test_node.postTxnFromCatchupAddedToLedger(2, test_txn)
assert oldSize + 1 == test_node.seqNoDB.size
| import pytest
from common.exceptions import LogicError
from plenum.common.constants import TXN_TYPE, TXN_PAYLOAD, TXN_PAYLOAD_METADATA, TXN_PAYLOAD_METADATA_DIGEST, \
TXN_PAYLOAD_TYPE, TXN_PAYLOAD_DATA, TXN_PAYLOAD_METADATA_REQ_ID, TXN_METADATA, TXN_METADATA_SEQ_NO
from plenum.common.request import Request
def test_on_view_change_complete_fails(test_node):
with pytest.raises(LogicError) as excinfo:
test_node.on_view_change_complete()
assert "Not all replicas have primaries" in str(excinfo.value)
def test_ledger_id_for_request_fails(test_node):
for r in (Request(operation={}), Request(operation={TXN_TYPE: None})):
with pytest.raises(ValueError) as excinfo:
test_node.ledger_id_for_request(r)
assert "TXN_TYPE is not defined for request" in str(excinfo.value)
def test_seq_no_db_updates(test_node):
oldSize = test_node.seqNoDB.size
test_txn = {
TXN_PAYLOAD: {
TXN_PAYLOAD_TYPE: "2",
TXN_PAYLOAD_METADATA: {
TXN_PAYLOAD_METADATA_DIGEST: "11222"
},
TXN_PAYLOAD_DATA: {}
}
}
test_node.postTxnFromCatchupAddedToLedger(2, test_txn, False)
assert oldSize == test_node.seqNoDB.size
def test_seq_no_db_updates_by_default(test_node):
oldSize = test_node.seqNoDB.size
test_txn = {
TXN_PAYLOAD: {
TXN_PAYLOAD_TYPE: "2",
TXN_PAYLOAD_METADATA: {
TXN_PAYLOAD_METADATA_DIGEST: "11222",
TXN_PAYLOAD_METADATA_REQ_ID: "12"
},
TXN_PAYLOAD_DATA: {}
},
TXN_METADATA: {
TXN_METADATA_SEQ_NO: "1"
}
}
test_node.postTxnFromCatchupAddedToLedger(2, test_txn)
assert oldSize + 1 == test_node.seqNoDB.size
| apache-2.0 | Python |
99d16198b5b61ba13a441a6546ccd1f7ce0b91bc | Add octicons in font test script | mkofinas/prompt-support,mkofinas/prompt-support | test/symbols/show_glyphs.py | test/symbols/show_glyphs.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
devicons_start = "e700"
devicons_end = "e7c5"
print "Devicons"
for ii in xrange(int(devicons_start, 16), int(devicons_end, 16) + 1):
print unichr(ii),
custom_start = "e5fa"
custom_end = "e62b"
print "\nCustom"
for ii in xrange(int(custom_start, 16), int(custom_end, 16) + 1):
print unichr(ii),
font_awesome_start = "f000"
font_awesome_end = "f295"
print "\nFont Awesome"
for ii in xrange(int(font_awesome_start, 16), int(font_awesome_end, 16) + 1):
print unichr(ii),
powerline_start = "e0a0"
powerline_end = "e0d4"
print "\nPowerline"
for ii in xrange(int(powerline_start, 16), int(powerline_end, 16) + 1):
print unichr(ii),
octicons_start = "f400"
octicons_end = "f4e5"
print "\nOcticons"
for ii in xrange(int(octicons_start, 16), int(octicons_end, 16) + 1):
print unichr(ii),
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
devicons_start = "e700"
devicons_end = "e7c5"
print "Devicons"
for ii in xrange(int(devicons_start, 16), int(devicons_end, 16) + 1):
print unichr(ii),
custom_start = "e5fa"
custom_end = "e62b"
print "\nCustom"
for ii in xrange(int(custom_start, 16), int(custom_end, 16) + 1):
print unichr(ii),
font_awesome_start = "f000"
font_awesome_end = "f295"
print "\nFont Awesome"
for ii in xrange(int(font_awesome_start, 16), int(font_awesome_end, 16) + 1):
print unichr(ii),
powerline_start = "e0a0"
powerline_end = "e0d4"
print "\nPowerline"
for ii in xrange(int(powerline_start, 16), int(powerline_end, 16) + 1):
print unichr(ii),
| mit | Python |
6b1cf995116ed060679477c0496e98836a05e6af | decrease timeout to 2 seconds for bad-server tests | MSLNZ/msl-loadlib,MSLNZ/msl-loadlib,MSLNZ/msl-loadlib,MSLNZ/msl-loadlib,MSLNZ/msl-loadlib | tests/bad_servers/client.py | tests/bad_servers/client.py | from msl.loadlib import Client64
class Client(Client64):
def __init__(self, module32):
super(Client, self).__init__(module32, timeout=2)
| from msl.loadlib import Client64
class Client(Client64):
def __init__(self, module32):
super(Client, self).__init__(module32, timeout=5)
| mit | Python |
fb68461b249602fc3568281fb7eaa4fa2aff8072 | exclude duplicate/ambiguous district codes in epsom+ewell import | DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations,chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations | polling_stations/apps/data_collection/management/commands/import_epsom_and_ewell.py | polling_stations/apps/data_collection/management/commands/import_epsom_and_ewell.py | from data_collection.morph_importer import BaseMorphApiImporter
class Command(BaseMorphApiImporter):
srid = 27700
districts_srid = 27700
council_id = 'E07000208'
elections = ['local.surrey.2017-05-04']
scraper_name = 'wdiv-scrapers/DC-PollingStations-EpsomAndEwell'
geom_type = 'gml'
duplicate_districts = set()
def pre_import(self):
self.find_duplicate_districts()
def find_duplicate_districts(self):
# identify any district codes which appear
# more than once (with 2 different polygons)
# We do not want to import these.
seen = set()
districts = self.get_districts()
for district in districts:
if str(district['wardcode']) in seen:
self.duplicate_districts.add(str(district['wardcode']))
seen.add(str(district['wardcode']))
def get_station_hash(self, record):
# handle exact dupes on code/address
return "-".join([
record['wardname'],
record['uprn']
])
def district_record_to_dict(self, record):
poly = self.extract_geometry(record, self.geom_type, self.get_srid('districts'))
if record['wardcode'] in self.duplicate_districts:
return None
else :
return {
'internal_council_id': record['wardcode'],
'name' : record['wardcode'],
'area' : poly,
'polling_station_id' : record['wardcode'],
}
def station_record_to_dict(self, record):
location = self.extract_geometry(record, self.geom_type, self.get_srid('stations'))
return {
'internal_council_id': record['wardname'],
'postcode': '',
'address': record['address'],
'location': location,
}
| from data_collection.morph_importer import BaseMorphApiImporter
class Command(BaseMorphApiImporter):
srid = 27700
districts_srid = 27700
council_id = 'E07000208'
elections = ['local.surrey.2017-05-04']
scraper_name = 'wdiv-scrapers/DC-PollingStations-EpsomAndEwell'
geom_type = 'gml'
def get_station_hash(self, record):
# handle exact dupes on code/address
return "-".join([
record['wardname'],
record['uprn']
])
def district_record_to_dict(self, record):
poly = self.extract_geometry(record, self.geom_type, self.get_srid('districts'))
return {
'internal_council_id': record['wardcode'],
'name' : record['wardcode'],
'area' : poly,
'polling_station_id' : record['wardcode'],
}
def station_record_to_dict(self, record):
location = self.extract_geometry(record, self.geom_type, self.get_srid('stations'))
return {
'internal_council_id': record['wardname'],
'postcode': '',
'address': record['address'],
'location': location,
}
| bsd-3-clause | Python |
23da7e360533d166eaccd70a39502625d461ae75 | truncate output in html preview | c3nav/c3nav,c3nav/c3nav,c3nav/c3nav,c3nav/c3nav | src/c3nav/api/__init__.py | src/c3nav/api/__init__.py | from functools import wraps
from rest_framework.renderers import JSONRenderer
from c3nav.mapdata.utils import json_encoder_reindent
orig_render = JSONRenderer.render
@wraps(JSONRenderer.render)
def nicer_renderer(self, data, accepted_media_type=None, renderer_context=None):
if self.get_indent(accepted_media_type, renderer_context) is None:
return orig_render(self, data, accepted_media_type, renderer_context)
shorten = isinstance(data, (list, tuple)) and len(data) > 2
orig_len = None
if shorten:
orig_len = len(data)-2
data = data[:2]
result = json_encoder_reindent(lambda d: orig_render(self, d, accepted_media_type, renderer_context), data)
if shorten:
result = (result[:-2] +
('\n ...%d more elements (truncated for HTML preview)...' % orig_len).encode() +
result[-2:])
return result
# Monkey patch for nicer indentation in the django rest framework
JSONRenderer.render = nicer_renderer
| from functools import wraps
from rest_framework.renderers import JSONRenderer
from c3nav.mapdata.utils import json_encoder_reindent
orig_render = JSONRenderer.render
@wraps(JSONRenderer.render)
def nicer_renderer(self, data, accepted_media_type=None, renderer_context=None):
if self.get_indent(accepted_media_type, renderer_context) is None:
return orig_render(self, data, accepted_media_type, renderer_context)
return json_encoder_reindent(lambda d: orig_render(self, d, accepted_media_type, renderer_context), data)
# Monkey patch for nicer indentation in the django rest framework
JSONRenderer.render = nicer_renderer
| apache-2.0 | Python |
ad7199dfe666e7b988c58e9cc51be929ef3af37d | Update version. | r3c/Creep | creep/__init__.py | creep/__init__.py | #!/usr/bin/env python
__version__ = '0.3.2'
| #!/usr/bin/env python
__version__ = '0.3.1'
| mit | Python |
c82dd9e98545111df1823b4b3f682f31fcaa8448 | Update help message | thombashi/tcconfig,thombashi/tcconfig | tcconfig/_argparse_wrapper.py | tcconfig/_argparse_wrapper.py | #!/usr/bin/env python
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <gogogo.vm@gmail.com>
"""
from __future__ import absolute_import
import argparse
import logbook
from ._const import TcCoomandOutput
class ArgparseWrapper(object):
"""
wrapper class of argparse
"""
def __init__(self, version, description="", epilog=""):
self.parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description=description, epilog=epilog)
self.parser.add_argument(
"--version", action="version", version="%(prog)s " + version)
self._add_tc_command_arg_group()
self._add_log_level_argument_group()
def _add_log_level_argument_group(self):
dest = "log_level"
group = self.parser.add_mutually_exclusive_group()
group.add_argument(
"--debug", dest=dest, action="store_const",
const=logbook.DEBUG, default=logbook.INFO,
help="for debug print.")
group.add_argument(
"--quiet", dest=dest, action="store_const",
const=logbook.NOTSET, default=logbook.INFO,
help="suppress execution log messages.")
return group
def _add_tc_command_arg_group(self):
group = self.parser.add_mutually_exclusive_group()
group.add_argument(
"--tc-command", dest="tc_command_output", action="store_const",
const=TcCoomandOutput.STDOUT, default=TcCoomandOutput.NOT_SET,
help="""
display tc commands to be executed and exit.
commands are not actually executed.
""")
group.add_argument(
"--tc-script", dest="tc_command_output", action="store_const",
const=TcCoomandOutput.SCRIPT, default=TcCoomandOutput.NOT_SET,
help="""
generate a script file that described tc commands to be executed
by this command.
""")
| #!/usr/bin/env python
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <gogogo.vm@gmail.com>
"""
from __future__ import absolute_import
import argparse
import logbook
from ._const import TcCoomandOutput
class ArgparseWrapper(object):
"""
wrapper class of argparse
"""
def __init__(self, version, description="", epilog=""):
self.parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description=description, epilog=epilog)
self.parser.add_argument(
"--version", action="version", version="%(prog)s " + version)
self._add_tc_command_arg_group()
self._add_log_level_argument_group()
def _add_log_level_argument_group(self):
dest = "log_level"
group = self.parser.add_mutually_exclusive_group()
group.add_argument(
"--debug", dest=dest, action="store_const",
const=logbook.DEBUG, default=logbook.INFO,
help="for debug print.")
group.add_argument(
"--quiet", dest=dest, action="store_const",
const=logbook.NOTSET, default=logbook.INFO,
help="suppress execution log messages.")
return group
def _add_tc_command_arg_group(self):
group = self.parser.add_mutually_exclusive_group()
group.add_argument(
"--tc-command", dest="tc_command_output", action="store_const",
const=TcCoomandOutput.STDOUT, default=TcCoomandOutput.NOT_SET,
help="""
display tc commands to be executed and exit.
commands are not actually executed.
""")
group.add_argument(
"--tc-script", dest="tc_command_output", action="store_const",
const=TcCoomandOutput.SCRIPT, default=TcCoomandOutput.NOT_SET,
help="""
generate a script file that include tc command to be executed.
""")
| mit | Python |
346dd546b04634ad14bfcb98af4f7c14477e55e8 | Support non-dotted modules in argv.StoreInModule (#393) | ShaperTools/openhtf,grybmadsci/openhtf,ShaperTools/openhtf,fahhem/openhtf,fahhem/openhtf,fahhem/openhtf,jettisonjoe/openhtf,google/openhtf,grybmadsci/openhtf,fahhem/openhtf,jettisonjoe/openhtf,ShaperTools/openhtf,jettisonjoe/openhtf,jettisonjoe/openhtf,ShaperTools/openhtf,grybmadsci/openhtf,google/openhtf,grybmadsci/openhtf,google/openhtf,google/openhtf,ShaperTools/openhtf | openhtf/util/argv.py | openhtf/util/argv.py | """Utilities for handling command line arguments.
StoreInModule:
Enables emulating a gflags-esque API (flag affects global value), but one
doesn't necessarily need to use flags to set values.
Example usage:
DEFAULT_VALUE = 0
ARG_PARSER = argv.ModuleParser()
ARG_PARSER.add_argument(
'--override-value', action=argv.StoreInModule,
default=DEFAULT_VALUE, target='%s.DEFAULT_VALUE' % __name__)
Then in an entry point (main() function), use that parser as a parent:
parser = argparse.ArgumentParser(parents=[other_module.ARG_PARSER])
parser.parse_args()
"""
import argparse
def ModuleParser():
return argparse.ArgumentParser(add_help=False)
class StoreInModule(argparse.Action):
def __init__(self, *args, **kwargs):
self._tgt_mod, self._tgt_attr = kwargs.pop('target').rsplit('.', 1)
proxy_cls = kwargs.pop('proxy', None)
if proxy_cls is not None:
self._proxy = proxy_cls(*args, **kwargs)
super(StoreInModule, self).__init__(*args, **kwargs)
def __call__(self, parser, namespace, values, option_string=None):
if hasattr(self, '_proxy'):
values = self._proxy(parser, namespace, values)
if '.' in self._tgt_mod:
base, mod = self._tgt_mod.rsplit('.', 1)
module = getattr(__import__(base, fromlist=[mod]), mod)
else:
module = __import__(self._tgt_mod)
setattr(module, self._tgt_attr, values)
| """Utilities for handling command line arguments.
StoreInModule:
Enables emulating a gflags-esque API (flag affects global value), but one
doesn't necessarily need to use flags to set values.
Example usage:
DEFAULT_VALUE = 0
ARG_PARSER = argv.ModuleParser()
ARG_PARSER.add_argument(
'--override-value', action=argv.StoreInModule,
default=DEFAULT_VALUE, target='%s.DEFAULT_VALUE' % __name__)
Then in an entry point (main() function), use that parser as a parent:
parser = argparse.ArgumentParser(parents=[other_module.ARG_PARSER])
parser.parse_args()
"""
import argparse
def ModuleParser():
return argparse.ArgumentParser(add_help=False)
class StoreInModule(argparse.Action):
def __init__(self, *args, **kwargs):
self._tgt_mod, self._tgt_attr = kwargs.pop('target').rsplit('.', 1)
proxy_cls = kwargs.pop('proxy', None)
if proxy_cls is not None:
self._proxy = proxy_cls(*args, **kwargs)
super(StoreInModule, self).__init__(*args, **kwargs)
def __call__(self, parser, namespace, values, option_string=None):
if hasattr(self, '_proxy'):
values = self._proxy(parser, namespace, values)
base, mod = self._tgt_mod.rsplit('.', 1)
module = getattr(__import__(base, fromlist=[mod]), mod)
setattr(module, self._tgt_attr, values)
| apache-2.0 | Python |
e5570654f83e50b4342eba6def79b614009ae6cf | fix file path for thrift.load | uber/tchannel-python,uber/tchannel-python | tchannel/testing/vcr/proxy.py | tchannel/testing/vcr/proxy.py | # Copyright (c) 2015 Uber Technologies, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import os
import sys
from tchannel import thrift
base = os.path.dirname(__file__)
proxy = thrift.load(
path=os.path.join(base, 'proxy.thrift'),
service='proxy-server',
)
sys.modules[__name__] = proxy
| # Copyright (c) 2015 Uber Technologies, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import sys
from tchannel import thrift
proxy = thrift.load(
path='tchannel/testing/vcr/proxy.thrift',
service='proxy-server',
)
sys.modules[__name__] = proxy
| mit | Python |
a98cc338d8cc796e6f77d28317207cac1b7ce2c0 | Bump version to 1.1.0-alpha | bechtoldt/imapclient,bechtoldt/imapclient | imapclient/version.py | imapclient/version.py | # Copyright (c) 2015, Menno Smits
# Released subject to the New BSD License
# Please see http://en.wikipedia.org/wiki/BSD_licenses
from __future__ import unicode_literals
version_info = (1, 1, 0, 'alpha')
def _imapclient_version_string(vinfo):
major, minor, micro, releaselevel = vinfo
v = '%d.%d.%d' % (major, minor, micro)
if releaselevel != 'final':
v += '-' + releaselevel
return v
version = _imapclient_version_string(version_info)
author = 'Menno Smits'
author_email = 'menno@freshfoo.com'
min_pyopenssl_version = '0.15.1'
| # Copyright (c) 2015, Menno Smits
# Released subject to the New BSD License
# Please see http://en.wikipedia.org/wiki/BSD_licenses
from __future__ import unicode_literals
version_info = (1, 0, 2, 'final')
def _imapclient_version_string(vinfo):
major, minor, micro, releaselevel = vinfo
v = '%d.%d.%d' % (major, minor, micro)
if releaselevel != 'final':
v += '-' + releaselevel
return v
version = _imapclient_version_string(version_info)
author = 'Menno Smits'
author_email = 'menno@freshfoo.com'
min_pyopenssl_version = '0.15.1'
| bsd-3-clause | Python |
5f57cfd16aa541f55c246ff5d1dfb7c3bd8c208c | add a function to find Event class for given OFP class | lagopus/ryu-lagopus-ext,haniehrajabi/ryu,habibiefaried/ryu,StephenKing/ryu,John-Lin/ryu,openvapour/ryu,habibiefaried/ryu,sivaramakrishnansr/ryu,yamt/ryu,hisaharu/ryu,jazzmes/ryu,takahashiminoru/ryu,diogommartins/ryu,zangree/ryu,yamt/ryu,takahashiminoru/ryu,muzixing/ryu,iwaseyusuke/ryu,zyq001/ryu,o3project/ryu-oe,osrg/ryu,torufuru/oolhackathon,ynkjm/ryu,lagopus/ryu-lagopus-ext,jalilm/ryu,ynkjm/ryu,OpenState-SDN/ryu,John-Lin/ryu,gareging/SDN_Framework,gareging/SDN_Framework,evanscottgray/ryu,John-Lin/ryu,gopchandani/ryu,gareging/SDN_Framework,alyosha1879/ryu,fkakuma/ryu,castroflavio/ryu,lsqtongxin/ryu,John-Lin/ryu,lagopus/ryu-lagopus-ext,muzixing/ryu,yamada-h/ryu,darjus-amzn/ryu,pichuang/ryu,StephenKing/summerschool-2015-ryu,diogommartins/ryu,takahashiminoru/ryu,osrg/ryu,lagopus/ryu-lagopus-ext,osrg/ryu,ynkjm/ryu,fujita/ryu,ntts-clo/ryu,darjus-amzn/ryu,fujita/ryu,StephenKing/ryu,fujita/ryu,jalilm/ryu,openvapour/ryu,umkcdcrg01/ryu_openflow,iwaseyusuke/ryu,ntts-clo/ryu,ysywh/ryu,gopchandani/ryu,elahejalalpour/ELRyu,zangree/ryu,pichuang/ryu,umkcdcrg01/ryu_openflow,alanquillin/ryu,hisaharu/ryu,pichuang/ryu,lagopus/ryu-lagopus-ext,o3project/ryu-oe,takahashiminoru/ryu,Tejas-Subramanya/RYU_MEC,iwaseyusuke/ryu,Tejas-Subramanya/RYU_MEC,yamt/ryu,Zouyiran/ryu,lsqtongxin/ryu,StephenKing/summerschool-2015-ryu,hisaharu/ryu,habibiefaried/ryu,umkcdcrg01/ryu_openflow,umkcdcrg01/ryu_openflow,shinpeimuraoka/ryu,jkoelker/ryu,lsqtongxin/ryu,fujita/ryu,openvapour/ryu,diogommartins/ryu,TakeshiTseng/ryu,castroflavio/ryu,Tejas-Subramanya/RYU_MEC,sivaramakrishnansr/ryu,gopchandani/ryu,darjus-amzn/ryu,alyosha1879/ryu,OpenState-SDN/ryu,fkakuma/ryu,lsqtongxin/ryu,Zouyiran/ryu,hisaharu/ryu,ttsubo/ryu,jalilm/ryu,alyosha1879/ryu,lzppp/mylearning,alanquillin/ryu,sivaramakrishnansr/ryu,hisaharu/ryu,darjus-amzn/ryu,gareging/SDN_Framework,jalilm/ryu,zyq001/ryu,Zouyiran/ryu,elahejalalpour/ELRyu,zangree/ryu,StephenKing/summerschool-2015-ryu,StephenKing/ryu,openvapour/ryu,alanquillin/ryu,evanscottgray/ryu,lzppp/mylearning,TakeshiTseng/ryu,StephenKing/summerschool-2015-ryu,ynkjm/ryu,lzppp/mylearning,evanscottgray/ryu,Tesi-Luca-Davide/ryu,zyq001/ryu,gopchandani/ryu,darjus-amzn/ryu,ynkjm/ryu,muzixing/ryu,TakeshiTseng/ryu,yamt/ryu,pichuang/ryu,torufuru/oolhackathon,OpenState-SDN/ryu,StephenKing/ryu,elahejalalpour/ELRyu,ttsubo/ryu,John-Lin/ryu,fkakuma/ryu,shinpeimuraoka/ryu,shinpeimuraoka/ryu,jazzmes/ryu,zyq001/ryu,torufuru/oolhackathon,sivaramakrishnansr/ryu,Zouyiran/ryu,Zouyiran/ryu,osrg/ryu,elahejalalpour/ELRyu,haniehrajabi/ryu,lzppp/mylearning,ttsubo/ryu,alyosha1879/ryu,fkakuma/ryu,diogommartins/ryu,ysywh/ryu,ttsubo/ryu,ttsubo/ryu,shinpeimuraoka/ryu,iwaseyusuke/ryu,ysywh/ryu,zangree/ryu,osrg/ryu,gopchandani/ryu,StephenKing/ryu,ysywh/ryu,alanquillin/ryu,pichuang/ryu,Tesi-Luca-Davide/ryu,alanquillin/ryu,StephenKing/summerschool-2015-ryu,yamt/ryu,openvapour/ryu,castroflavio/ryu,zyq001/ryu,haniehrajabi/ryu,habibiefaried/ryu,jkoelker/ryu,zangree/ryu,shinpeimuraoka/ryu,muzixing/ryu,umkcdcrg01/ryu_openflow,TakeshiTseng/ryu,muzixing/ryu,sivaramakrishnansr/ryu,ntts-clo/mld-ryu,iwaseyusuke/ryu,Tejas-Subramanya/RYU_MEC,fkakuma/ryu,lsqtongxin/ryu,ysywh/ryu,Tejas-Subramanya/RYU_MEC,diogommartins/ryu,jazzmes/ryu,jkoelker/ryu,OpenState-SDN/ryu,gareging/SDN_Framework,OpenState-SDN/ryu,yamada-h/ryu,jalilm/ryu,elahejalalpour/ELRyu,haniehrajabi/ryu,fujita/ryu,ntts-clo/mld-ryu,habibiefaried/ryu,takahashiminoru/ryu,haniehrajabi/ryu,Tesi-Luca-Davide/ryu,Tesi-Luca-Davide/ryu,TakeshiTseng/ryu,lzppp/mylearning,Tesi-Luca-Davide/ryu | ryu/controller/ofp_event.py | ryu/controller/ofp_event.py | # Copyright (C) 2012 Nippon Telegraph and Telephone Corporation.
# Copyright (C) 2011 Isaku Yamahata <yamahata at valinux co jp>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import inspect
from ryu.controller import handler
from ryu import ofproto
from ryu import utils
from . import event
class EventOFPMsgBase(event.EventBase):
def __init__(self, msg):
super(EventOFPMsgBase, self).__init__()
self.msg = msg
#
# Create ofp_event type corresponding to OFP Msg
#
_OFP_MSG_EVENTS = {}
def _ofp_msg_name_to_ev_name(msg_name):
return 'Event' + msg_name
def ofp_msg_to_ev(msg):
return ofp_msg_to_ev_cls(msg.__class__)(msg)
def ofp_msg_to_ev_cls(msg_cls):
name = _ofp_msg_name_to_ev_name(msg_cls.__name__)
return _OFP_MSG_EVENTS[name]
def _create_ofp_msg_ev_class(msg_cls):
name = _ofp_msg_name_to_ev_name(msg_cls.__name__)
# print 'creating ofp_event %s' % name
if name in _OFP_MSG_EVENTS:
return
cls = type(name, (EventOFPMsgBase,),
dict(__init__=lambda self, msg:
super(self.__class__, self).__init__(msg)))
globals()[name] = cls
_OFP_MSG_EVENTS[name] = cls
def _create_ofp_msg_ev_from_module(ofp_parser):
# print mod
for _k, cls in inspect.getmembers(ofp_parser, inspect.isclass):
if not hasattr(cls, 'cls_msg_type'):
continue
_create_ofp_msg_ev_class(cls)
for ofp_mods in ofproto.get_ofp_modules().values():
ofp_parser = ofp_mods[1]
# print 'loading module %s' % ofp_parser
_create_ofp_msg_ev_from_module(ofp_parser)
class EventOFPStateChange(event.EventBase):
def __init__(self, dp):
super(EventOFPStateChange, self).__init__()
self.datapath = dp
handler.register_service('ryu.controller.ofp_handler')
| # Copyright (C) 2012 Nippon Telegraph and Telephone Corporation.
# Copyright (C) 2011 Isaku Yamahata <yamahata at valinux co jp>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import inspect
from ryu.controller import handler
from ryu import ofproto
from ryu import utils
from . import event
class EventOFPMsgBase(event.EventBase):
def __init__(self, msg):
super(EventOFPMsgBase, self).__init__()
self.msg = msg
#
# Create ofp_event type corresponding to OFP Msg
#
_OFP_MSG_EVENTS = {}
def _ofp_msg_name_to_ev_name(msg_name):
return 'Event' + msg_name
def ofp_msg_to_ev(msg):
name = _ofp_msg_name_to_ev_name(msg.__class__.__name__)
return _OFP_MSG_EVENTS[name](msg)
def _create_ofp_msg_ev_class(msg_cls):
name = _ofp_msg_name_to_ev_name(msg_cls.__name__)
# print 'creating ofp_event %s' % name
if name in _OFP_MSG_EVENTS:
return
cls = type(name, (EventOFPMsgBase,),
dict(__init__=lambda self, msg:
super(self.__class__, self).__init__(msg)))
globals()[name] = cls
_OFP_MSG_EVENTS[name] = cls
def _create_ofp_msg_ev_from_module(ofp_parser):
# print mod
for _k, cls in inspect.getmembers(ofp_parser, inspect.isclass):
if not hasattr(cls, 'cls_msg_type'):
continue
_create_ofp_msg_ev_class(cls)
for ofp_mods in ofproto.get_ofp_modules().values():
ofp_parser = ofp_mods[1]
# print 'loading module %s' % ofp_parser
_create_ofp_msg_ev_from_module(ofp_parser)
class EventOFPStateChange(event.EventBase):
def __init__(self, dp):
super(EventOFPStateChange, self).__init__()
self.datapath = dp
handler.register_service('ryu.controller.ofp_handler')
| apache-2.0 | Python |
2daeee0b9fabb4f1ad709bdbe9c8c12a6281d32d | Fix main failing on no event_callback | Kane610/axis | axis/__main__.py | axis/__main__.py | """Read events and parameters from your Axis device."""
import asyncio
import argparse
import logging
import sys
from axis import AxisDevice
async def main(args):
loop = asyncio.get_event_loop()
device = AxisDevice(
loop=loop, host=args.host, username=args.username,
password=args.password, port=args.port)
if args.params:
await loop.run_in_executor(None, device.vapix.initialize_params)
await loop.run_in_executor(None, device.vapix.initialize_ports)
await loop.run_in_executor(None, device.vapix.initialize_users)
if not args.events:
return
if args.events:
def event_handler(action, event):
print(action, event)
device.enable_events(event_callback=event_handler)
device.start()
try:
while True:
await asyncio.sleep(1)
except KeyboardInterrupt:
pass
finally:
device.stop()
if __name__ == "__main__":
logging.basicConfig(format='%(message)s', level=logging.DEBUG)
parser = argparse.ArgumentParser()
parser.add_argument('host', type=str)
parser.add_argument('username', type=str)
parser.add_argument('password', type=str)
parser.add_argument('-p', '--port', type=int, default=80)
parser.add_argument('--events', action='store_true')
parser.add_argument('--params', action='store_true')
args = parser.parse_args()
asyncio.run(main(args))
| """Read events and parameters from your Axis device."""
import asyncio
import argparse
import logging
import sys
from axis import AxisDevice
async def main(args):
loop = asyncio.get_event_loop()
device = AxisDevice(
loop=loop, host=args.host, username=args.username,
password=args.password, port=args.port)
if args.params:
await loop.run_in_executor(None, device.vapix.initialize_params)
await loop.run_in_executor(None, device.vapix.initialize_ports)
await loop.run_in_executor(None, device.vapix.initialize_users)
if not args.events:
return
if args.events:
device.start()
try:
while True:
await asyncio.sleep(1)
except KeyboardInterrupt:
pass
finally:
device.stop()
if __name__ == "__main__":
logging.basicConfig(format='%(message)s', level=logging.DEBUG)
parser = argparse.ArgumentParser()
parser.add_argument('host', type=str)
parser.add_argument('username', type=str)
parser.add_argument('password', type=str)
parser.add_argument('-p', '--port', type=int, default=80)
parser.add_argument('--events', action='store_true')
parser.add_argument('--params', action='store_true')
args = parser.parse_args()
asyncio.run(main(args))
| mit | Python |
439db29a5319cc531208f4c92a7133aa3519a50c | Fix validators for payment fields | fusionbox/satchless,fusionbox/satchless,taedori81/satchless,fusionbox/satchless | satchless/payment/fields.py | satchless/payment/fields.py | import calendar
import datetime
from django import forms
from django.core import validators
from django.utils.translation import ugettext_lazy as _
import re
from . import widgets
def mod10(number):
digits = []
even = False
if not number.isdigit():
return False
for digit in reversed(number):
digit = ord(digit) - ord('0')
if even:
digit = digit * 2
if digit >= 10:
digit = digit % 10 + digit / 10
digits.append(digit)
even = not even
return sum(digits) % 10 == 0 if digits else False
class CreditCardNumberField(forms.CharField):
widget = widgets.CreditCardNumberWidget
default_error_messages = {
'invalid': _(u'Please enter a valid card number'),
}
def __init__(self, *args, **kwargs):
kwargs['max_length'] = kwargs.pop('max_length', 32)
super(CreditCardNumberField, self).__init__(*args, **kwargs)
def to_python(self, value):
cleaned = re.sub('[\s-]', '', value)
if value and not cleaned:
raise forms.ValidationError(self.error_messages['invalid'])
return cleaned
def validate(self, value):
if value in validators.EMPTY_VALUES and self.required:
raise forms.ValidationError(self.error_messages['required'])
if value and not mod10(value):
raise forms.ValidationError(self.error_messages['invalid'])
class CreditCardExpirationField(forms.DateField):
widget = widgets.SelectMonthWidget
default_error_messages = {
'expired': _(u'This credit card has already expired'),
}
def validate(self, value):
if value in validators.EMPTY_VALUES and self.required:
raise forms.ValidationError(self.error_messages['required'])
if isinstance(value, datetime.date) and value < datetime.date.today():
raise forms.ValidationError(self.error_messages['expired'])
def to_python(self, value):
value = super(CreditCardExpirationField, self).to_python(value)
if isinstance(value, datetime.date):
first_weekday, num_days = calendar.monthrange(value.year, value.month)
value = datetime.date(value.year, value.month, num_days)
return value
| import calendar
import datetime
from django import forms
from django.core import validators
from django.utils.translation import ugettext_lazy as _
import re
from . import widgets
def mod10(number):
digits = []
even = False
if not number.isdigit():
return False
for digit in reversed(number):
digit = ord(digit) - ord('0')
if even:
digit = digit * 2
if digit >= 10:
digit = digit % 10 + digit / 10
digits.append(digit)
even = not even
return sum(digits) % 10 == 0 if digits else False
class CreditCardNumberField(forms.CharField):
widget = widgets.CreditCardNumberWidget
default_error_messages = {
'invalid': _(u'Please enter a valid card number'),
}
def __init__(self, *args, **kwargs):
kwargs['max_length'] = kwargs.pop('max_length', 32)
super(CreditCardNumberField, self).__init__(*args, **kwargs)
def to_python(self, value):
cleaned = re.sub('[\s-]', '', value)
if value and not cleaned:
raise forms.ValidationError(self.error_messages['invalid'])
return cleaned
def validate(self, value):
if value in validators.EMPTY_VALUES and self.required:
raise forms.ValidationError(self.error_messages['required'])
if value and not mod10(value):
raise forms.ValidationError(self.error_messages['invalid'])
return value
class CreditCardExpirationField(forms.DateField):
widget = widgets.SelectMonthWidget
default_error_messages = {
'expired': _(u'This credit card has already expired'),
}
def validate(self, value):
if value < datetime.date.today():
raise forms.ValidationError(self.error_messages['expired'])
return value
def to_python(self, value):
value = super(CreditCardExpirationField, self).to_python(value)
if isinstance(value, datetime.date):
first_weekday, num_days = calendar.monthrange(value.year, value.month)
value = datetime.date(value.year, value.month, num_days)
return value
| bsd-3-clause | Python |
7318ec2095b99d6e5c1df68c1fcef7ca1825220e | check if output is empty | aldanor/ipybind,aldanor/ipybind,aldanor/ipybind | ipybind/spawn.py | ipybind/spawn.py | # -*- coding: utf-8 -*-
import contextlib
import os
import subprocess
import sys
import distutils.errors
import distutils.spawn
class inject:
def __init__(self, fn):
self.orig = fn
self.set(fn)
def set(self, fn):
self.fn = fn
def reset(self):
self.fn = self.orig
def __call__(self, *args, **kwargs):
return self.fn(*args, **kwargs)
def patch_spawn():
distutils.spawn.spawn = inject(distutils.spawn.spawn)
def spawn_fn(mode, fmt=None):
def spawn(cmd, search_path=True, verbose=False, dry_run=False):
cmd = list(cmd)
if search_path:
cmd[0] = distutils.spawn.find_executable(cmd[0]) or cmd[0]
if dry_run:
return
try:
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out, _ = p.communicate()
if out and (mode == 'always' or (mode == 'on_error' and p.returncode != 0)):
if fmt is not None:
out = fmt(out.decode('utf-8')).encode('utf-8')
sys.stdout.write(out)
sys.stdout.flush()
if p.returncode != 0:
raise subprocess.CalledProcessError
except OSError as e:
raise distutils.errors.DistutilsExecError(
'command {!r} failed with exit status {}: {}'
.format(os.path.basename(cmd[0]), e.errno, e.strerror)) from None
except:
raise distutils.errors.DistutilsExecError(
'command {!r} failed'
.format(os.path.basename(cmd[0]))) from None
return spawn
@contextlib.contextmanager
def spawn_capture(mode='on_error', fmt=None):
distutils.spawn.spawn.set(spawn_fn(mode, fmt=fmt))
try:
yield
finally:
distutils.spawn.spawn.reset()
| # -*- coding: utf-8 -*-
import contextlib
import os
import subprocess
import sys
import distutils.errors
import distutils.spawn
class inject:
def __init__(self, fn):
self.orig = fn
self.set(fn)
def set(self, fn):
self.fn = fn
def reset(self):
self.fn = self.orig
def __call__(self, *args, **kwargs):
return self.fn(*args, **kwargs)
def patch_spawn():
distutils.spawn.spawn = inject(distutils.spawn.spawn)
def spawn_fn(mode, fmt=None):
def spawn(cmd, search_path=True, verbose=False, dry_run=False):
cmd = list(cmd)
if search_path:
cmd[0] = distutils.spawn.find_executable(cmd[0]) or cmd[0]
if dry_run:
return
try:
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out, _ = p.communicate()
if mode == 'always' or (mode == 'on_error' and p.returncode != 0):
if fmt is not None:
out = fmt(out.decode('utf-8')).encode('utf-8')
sys.stdout.write(out)
sys.stdout.flush()
if p.returncode != 0:
raise subprocess.CalledProcessError
except OSError as e:
raise distutils.errors.DistutilsExecError(
'command {!r} failed with exit status {}: {}'
.format(os.path.basename(cmd[0]), e.errno, e.strerror)) from None
except:
raise distutils.errors.DistutilsExecError(
'command {!r} failed'
.format(os.path.basename(cmd[0]))) from None
return spawn
@contextlib.contextmanager
def spawn_capture(mode='on_error', fmt=None):
distutils.spawn.spawn.set(spawn_fn(mode, fmt=fmt))
try:
yield
finally:
distutils.spawn.spawn.reset()
| mit | Python |
3622e7a2e79c863aa9933bf92a473ce107f48bfa | update Craven import script for parl.2017-06-08 (closes #951) | DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations,chris48s/UK-Polling-Stations | polling_stations/apps/data_collection/management/commands/import_craven.py | polling_stations/apps/data_collection/management/commands/import_craven.py | from data_collection.management.commands import BaseXpressDemocracyClubCsvImporter
class Command(BaseXpressDemocracyClubCsvImporter):
council_id = 'E07000163'
addresses_name = 'parl.2017-06-08/Version 1/Craven Democracy_Club__08June2017.tsv'
stations_name = 'parl.2017-06-08/Version 1/Craven Democracy_Club__08June2017.tsv'
elections = ['parl.2017-06-08']
csv_delimiter = '\t'
csv_encoding = 'windows-1252'
| from django.contrib.gis.geos import Point
from data_collection.base_importers import BaseCsvStationsCsvAddressesImporter
class Command(BaseCsvStationsCsvAddressesImporter):
council_id = 'E07000163'
addresses_name = 'Craven Property List with UPRNs.csv'
stations_name = 'CravePolling Station List.csv'
elections = [
'local.north-yorkshire.2017-05-04',
'parl.2017-06-08'
]
csv_encoding = 'windows-1252'
def get_station_point(self, record):
return Point(
float(record.easting),
float(record.northing),
srid=27700
)
def station_record_to_dict(self, record):
location = self.get_station_point(record)
return {
'internal_council_id': record.polling_station_id.strip(),
'postcode' : '',
'address' : record.name.strip() + "\n" + record.address.strip(),
'location' : location
}
def address_record_to_dict(self, record):
if record.polling_station_id.strip() == '':
return None
address = ", ".join([
record.propertyaddress1,
record.propertyaddress2,
record.propertyaddress3,
record.propertyaddress4,
record.propertyaddress5,
])
while ", , " in address:
address = address.replace(", , ", ", ")
if address[-2:] == ', ':
address = address[:-2]
return {
'address' : address.strip(),
'postcode' : record.propertypostcode.strip(),
'polling_station_id': record.polling_station_id.strip()
}
| bsd-3-clause | Python |
b6e934ef32e6591ad60636e3fe167d0e3e9aa5d4 | Fix URL KeyError | catapult-project/catapult,catapult-project/catapult,catapult-project/catapult,catapult-project/catapult,catapult-project/catapult,catapult-project/catapult,catapult-project/catapult | telemetry/telemetry/internal/backends/chrome_inspector/inspector_console.py | telemetry/telemetry/internal/backends/chrome_inspector/inspector_console.py | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from __future__ import absolute_import
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
from telemetry.internal.backends.chrome_inspector import websocket
class InspectorConsole(object):
def __init__(self, inspector_websocket):
self._inspector_websocket = inspector_websocket
self._inspector_websocket.RegisterDomain('Console', self._OnNotification)
self._message_output_stream = None
self._last_message = None
self._console_enabled = False
def _OnNotification(self, msg):
if msg['method'] == 'Console.messageAdded':
assert self._message_output_stream
message = msg['params']['message']
if message.get('url') == 'chrome://newtab/':
return
self._last_message = '(%s) %s:%i: %s' % (
message.get('level', 'unknown_log_level'),
message.get('url', 'unknown_url'),
message.get('line', 0),
message.get('text', 'no text provided by console message'))
self._message_output_stream.write(
'%s\n' % self._last_message)
def GetCurrentConsoleOutputBuffer(self, timeout=10):
self._message_output_stream = StringIO()
self._EnableConsoleOutputStream(timeout)
try:
self._inspector_websocket.DispatchNotifications(timeout)
return self._message_output_stream.getvalue()
except websocket.WebSocketTimeoutException:
return self._message_output_stream.getvalue()
finally:
self._DisableConsoleOutputStream(timeout)
self._message_output_stream.close()
self._message_output_stream = None
def _EnableConsoleOutputStream(self, timeout):
self._inspector_websocket.SyncRequest({'method': 'Console.enable'}, timeout)
def _DisableConsoleOutputStream(self, timeout):
self._inspector_websocket.SyncRequest(
{'method': 'Console.disable'}, timeout)
| # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from __future__ import absolute_import
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
from telemetry.internal.backends.chrome_inspector import websocket
class InspectorConsole(object):
def __init__(self, inspector_websocket):
self._inspector_websocket = inspector_websocket
self._inspector_websocket.RegisterDomain('Console', self._OnNotification)
self._message_output_stream = None
self._last_message = None
self._console_enabled = False
def _OnNotification(self, msg):
if msg['method'] == 'Console.messageAdded':
assert self._message_output_stream
if msg['params']['message']['url'] == 'chrome://newtab/':
return
self._last_message = '(%s) %s:%i: %s' % (
msg['params']['message']['level'],
msg['params']['message']['url'],
msg['params']['message']['line'],
msg['params']['message']['text'])
self._message_output_stream.write(
'%s\n' % self._last_message)
def GetCurrentConsoleOutputBuffer(self, timeout=10):
self._message_output_stream = StringIO()
self._EnableConsoleOutputStream(timeout)
try:
self._inspector_websocket.DispatchNotifications(timeout)
return self._message_output_stream.getvalue()
except websocket.WebSocketTimeoutException:
return self._message_output_stream.getvalue()
finally:
self._DisableConsoleOutputStream(timeout)
self._message_output_stream.close()
self._message_output_stream = None
def _EnableConsoleOutputStream(self, timeout):
self._inspector_websocket.SyncRequest({'method': 'Console.enable'}, timeout)
def _DisableConsoleOutputStream(self, timeout):
self._inspector_websocket.SyncRequest(
{'method': 'Console.disable'}, timeout)
| bsd-3-clause | Python |
dd8a1f0041acbaab6bf51103cc9fd699a605adaf | rework this | akrherz/iem,akrherz/iem,akrherz/iem,akrherz/iem,akrherz/iem | scripts/cache/warn_cache.py | scripts/cache/warn_cache.py | # Need something to cache the warnings GIS files, since they are so
# huge.
import os
import mx.DateTime
FINAL = "/mesonet/share/pickup/wwa/"
URL = "http://iem21.local/cgi-bin/request/gis/watchwarn.py"
def get_files(sts, ets):
year = mx.DateTime.now().year
cmd = 'wget --timeout=60000 -q -O %s/%s_all.zip "%s?year1=%s&month1=1&day1=1&hour1=0&minute1=0&year2=%s&month2=%s&day2=%s&hour2=0&minute2=0"' % (FINAL,
sts.year, URL, sts.year, ets.year, ets.month, ets.day)
os.system(cmd)
cmd = 'wget --timeout=60000 -q -O %s/%s_tsmf.zip "%s?year1=%s&month1=1&day1=1&hour1=0&minute1=0&year2=%s&month2=%s&day2=%s&hour2=0&minute2=0&limit0=yes"' % (FINAL,
sts.year, URL, sts.year, ets.year, ets.month, ets.day)
os.system(cmd)
if year > 2001:
cmd = 'wget --timeout=60000 -q -O %s/%s_tsmf_sbw.zip "%s?year1=%s&month1=1&day1=1&hour1=0&minute1=0&year2=%s&month2=%s&day2=%s&hour2=0&minute2=0&limit0=yes&limit1=yes"' % (FINAL,
sts.year, URL, sts.year, ets.year, ets.month, ets.day)
os.system(cmd)
if __name__ == "__main__":
sts = mx.DateTime.now() + mx.DateTime.RelativeDateTime(day=1,month=1,hour=0,minute=0)
ets = mx.DateTime.now() + mx.DateTime.RelativeDateTime(days=1,hour=0,minute=0)
get_files(sts, ets)
| # Need something to cache the warnings GIS files, since they are so
# huge.
import os
import mx.DateTime
FINAL = "/mesonet/share/pickup/wwa/"
#for year in range(1986,2010):
year = mx.DateTime.now().year
cmd = 'wget --timeout=60000 -q -O %s/%s_all.zip "http://iem50.local/cgi-bin/request/gis/watchwarn.py?year1=%s&month1=1&day1=1&hour1=0&minute1=0&year2=%s&month2=1&day2=1&hour2=0&minute2=0"' % (FINAL, year, year, year+1)
os.system(cmd)
cmd = 'wget --timeout=60000 -q -O %s/%s_tsmf.zip "http://iem50.local/cgi-bin/request/gis/watchwarn.py?year1=%s&month1=1&day1=1&hour1=0&minute1=0&year2=%s&month2=1&day2=1&hour2=0&minute2=0&limit0=yes"' % (FINAL, year, year, year+1)
os.system(cmd)
if year > 2001:
cmd = 'wget --timeout=60000 -q -O %s/%s_tsmf_sbw.zip "http://iem50.local/cgi-bin/request/gis/watchwarn.py?year1=%s&month1=1&day1=1&hour1=0&minute1=0&year2=%s&month2=1&day2=1&hour2=0&minute2=0&limit0=yes&limit1=yes"' % (FINAL, year, year, year+1)
os.system(cmd)
| mit | Python |
bc02981c4ad0dc424c3721856565ce1adbfd6411 | Update verifyAuthenticode.py | jgstew/tools,jgstew/tools,jgstew/tools,jgstew/tools | Python/verifyAuthenticode.py | Python/verifyAuthenticode.py |
# This is Windows Only and Python2 only: https://github.com/hakril/PythonForWindows/projects/1
# https://twitter.com/jgstew/status/1012509162540974080
# https://programtalk.com/vs2/python/5137/PythonForWindows/windows/wintrust.py/
# https://github.com/hakril/PythonForWindows/blob/master/samples/crypto/wintrust.py
import windows.wintrust
# git clone https://github.com/hakril/PythonForWindows.git
# cd PythonForWindows && python .\setup.py install
sFileName = r"C:\Windows\explorer.exe"
print "signed: " + str( windows.wintrust.is_signed(sFileName) )
print "0 is valid: " + str( windows.wintrust.check_signature(sFileName) )
|
# This is Windows Only
# https://twitter.com/jgstew/status/1012509162540974080
# https://programtalk.com/vs2/python/5137/PythonForWindows/windows/wintrust.py/
# https://github.com/hakril/PythonForWindows/blob/master/samples/crypto/wintrust.py
import windows.wintrust
# git clone https://github.com/hakril/PythonForWindows.git
# cd PythonForWindows && python .\setup.py install
sFileName = r"C:\Windows\explorer.exe"
print "signed: " + str( windows.wintrust.is_signed(sFileName) )
print "0 is valid: " + str( windows.wintrust.check_signature(sFileName) )
| mit | Python |
14dd325a4beeade048c25994c72a5afb3df4ff50 | bump workers | chrander/pygameday | tests/test_GameDayClient.py | tests/test_GameDayClient.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
from datetime import datetime
from pygameday import GameDayClient
class TestGameDayClient(unittest.TestCase):
def test_ingest(self):
start_date = datetime(2018, 4, 1)
end_date = datetime(2018, 4, 1)
database_uri = "sqlite:///gameday.db"
n_workers = 8
client = GameDayClient(database_uri, n_workers=n_workers)
client.db_stats()
client.process_date_range(start_date, end_date)
client.db_stats()
if __name__ == '__main__':
unittest.main() | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
from datetime import datetime
from pygameday import GameDayClient
class TestGameDayClient(unittest.TestCase):
def test_ingest(self):
start_date = datetime(2016, 4, 8)
end_date = datetime(2016, 4, 8)
database_uri = "sqlite:///gameday.db"
client = GameDayClient(database_uri)
client.db_stats()
client.process_date_range(start_date, end_date)
client.db_stats()
if __name__ == '__main__':
unittest.main() | mit | Python |
9301ff04f97cce8713463b870f6140e7549c854f | Test jaccard | jakirkham/dask-distance | tests/test_dask_distance.py | tests/test_dask_distance.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import numpy as np
import scipy.spatial.distance as spdist
import dask.array as da
import pytest
import dask_distance
@pytest.mark.parametrize(
"funcname", [
"dice",
"hamming",
"jaccard",
]
)
@pytest.mark.parametrize(
"seed", [
0,
137,
34,
]
)
@pytest.mark.parametrize(
"size, chunks", [
(10, 5),
]
)
def test_1d_bool_dist(funcname, seed, size, chunks):
np.random.seed(seed)
a_u = np.random.randint(0, 2, (size,), dtype=bool)
a_v = np.random.randint(0, 2, (size,), dtype=bool)
d_u = da.from_array(a_u, chunks=chunks)
d_v = da.from_array(a_v, chunks=chunks)
sp_func = getattr(spdist, funcname)
da_func = getattr(dask_distance, funcname)
a_r = sp_func(a_u, a_v)
d_r = da_func(d_u, d_v)
assert np.array(d_r) == a_r
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import numpy as np
import scipy.spatial.distance as spdist
import dask.array as da
import pytest
import dask_distance
@pytest.mark.parametrize(
"funcname", [
"dice",
"hamming",
]
)
@pytest.mark.parametrize(
"seed", [
0,
137,
34,
]
)
@pytest.mark.parametrize(
"size, chunks", [
(10, 5),
]
)
def test_1d_bool_dist(funcname, seed, size, chunks):
np.random.seed(seed)
a_u = np.random.randint(0, 2, (size,), dtype=bool)
a_v = np.random.randint(0, 2, (size,), dtype=bool)
d_u = da.from_array(a_u, chunks=chunks)
d_v = da.from_array(a_v, chunks=chunks)
sp_func = getattr(spdist, funcname)
da_func = getattr(dask_distance, funcname)
a_r = sp_func(a_u, a_v)
d_r = da_func(d_u, d_v)
assert np.array(d_r) == a_r
| bsd-3-clause | Python |
e131ef5b7ed7df341d23c1b295a9d6caef5b0fe6 | Test on collinear vectors | ppb/ppb-vector,ppb/ppb-vector | tests/test_vector2_angle.py | tests/test_vector2_angle.py | from ppb_vector import Vector2
from math import isclose
import pytest # type: ignore
from hypothesis import assume, given, note
from hypothesis.strategies import floats
from utils import angle_isclose, vectors
@pytest.mark.parametrize("left, right, expected", [
(Vector2(1, 1), Vector2(0, -1), -135),
(Vector2(1, 1), Vector2(-1, 0), 135),
(Vector2(0, 1), Vector2(0, -1), 180),
(Vector2(-1, -1), Vector2(1, 0), 135),
(Vector2(-1, -1), Vector2(-1, 0), -45),
(Vector2(1, 0), Vector2(0, 1), 90),
(Vector2(1, 0), Vector2(1, 0), 0),
])
def test_angle(left, right, expected):
lr = left.angle(right)
rl = right.angle(left)
assert -180 < lr <= 180
assert -180 < rl <= 180
assert isclose(lr, expected)
assert isclose(rl, 180 if expected == 180 else -expected)
@given(
left=vectors(),
right=vectors(),
)
def test_angle_range(left, right):
lr = left.angle(right)
rl = right.angle(left)
assert -180 < lr <= 180
assert -180 < rl <= 180
assert angle_isclose(lr, -rl)
@given(
left=vectors(),
middle=vectors(),
right=vectors(),
)
def test_angle_additive(left, middle, right):
lm = left.angle(middle)
mr = middle.angle(right)
lr = left.angle(right)
assert angle_isclose(lm + mr, lr)
@given(
x=vectors(max_magnitude=1e150),
l=floats(min_value=-1e150, max_value=1e150),
)
def test_angle_aligned(x: Vector2, l: float):
assume(l != 0)
y = l * x
assert angle_isclose(x.angle(y), 0 if l > 0 else 180)
| from ppb_vector import Vector2
from math import isclose
import pytest # type: ignore
from hypothesis import assume, given, note
from utils import angle_isclose, vectors
@pytest.mark.parametrize("left, right, expected", [
(Vector2(1, 1), Vector2(0, -1), -135),
(Vector2(1, 1), Vector2(-1, 0), 135),
(Vector2(0, 1), Vector2(0, -1), 180),
(Vector2(-1, -1), Vector2(1, 0), 135),
(Vector2(-1, -1), Vector2(-1, 0), -45),
(Vector2(1, 0), Vector2(0, 1), 90),
(Vector2(1, 0), Vector2(1, 0), 0),
])
def test_angle(left, right, expected):
lr = left.angle(right)
rl = right.angle(left)
assert -180 < lr <= 180
assert -180 < rl <= 180
assert isclose(lr, expected)
assert isclose(rl, 180 if expected == 180 else -expected)
@given(
left=vectors(),
right=vectors(),
)
def test_angle_range(left, right):
lr = left.angle(right)
rl = right.angle(left)
assert -180 < lr <= 180
assert -180 < rl <= 180
assert angle_isclose(lr, -rl)
@given(
left=vectors(),
middle=vectors(),
right=vectors(),
)
def test_angle_additive(left, middle, right):
lm = left.angle(middle)
mr = middle.angle(right)
lr = left.angle(right)
assert angle_isclose(lm + mr, lr)
| artistic-2.0 | Python |
b2d8def3f735e7a0464d7ffe89b140574d65759e | Fix tests | p2pu/learning-circles,p2pu/learning-circles,p2pu/learning-circles,p2pu/learning-circles | surveys/tests.py | surveys/tests.py | from django.test import TestCase
from studygroups.models import Course
from .community_feedback import calculate_course_ratings
from .community_feedback import calculate_course_tagdorsements
class TestCommunityFeedback(TestCase):
fixtures = ['test_courses.json', 'test_studygroups.json', 'test_applications.json', 'test_survey_responses.json']
def test_calculate_course_ratings(self):
course = Course.objects.get(pk=3)
self.assertEqual(course.overall_rating, 0)
self.assertEqual(course.rating_step_counts, "{}")
self.assertEqual(course.total_ratings, 0)
calculate_course_ratings(course)
expected_rating_step_counts = '{"5": 2, "4": 1, "3": 0, "2": 0, "1": 0}'
self.assertEqual(course.overall_rating, 4.67)
self.assertEqual(course.rating_step_counts, expected_rating_step_counts)
self.assertEqual(course.total_ratings, 3)
def test_calculate_course_tagdorsements(self):
course = Course.objects.get(pk=3)
self.assertEqual(course.tagdorsement_counts, "{}")
self.assertEqual(course.tagdorsements, "")
self.assertEqual(course.total_reviewers, 0)
calculate_course_tagdorsements(course)
self.assertEqual(course.tagdorsement_counts, '{"Easy to use": 1, "Good for first time facilitators": 0, "Great for beginners": 1, "Engaging material": 1, "Led to great discussions": 1}')
self.assertEqual(course.tagdorsements, 'Easy to use, Great for beginners, Engaging material, Led to great discussions')
self.assertEqual(course.total_reviewers, 1)
| from django.test import TestCase
from studygroups.models import Course
from .community_feedback import calculate_course_ratings
from .community_feedback import calculate_course_tagdorsements
class TestCommunityFeedback(TestCase):
fixtures = ['test_courses.json', 'test_studygroups.json', 'test_applications.json', 'test_survey_responses.json']
def test_calculate_course_ratings(self):
course = Course.objects.get(pk=3)
self.assertEqual(course.overall_rating, 0)
self.assertEqual(course.rating_step_counts, "{}")
self.assertEqual(course.total_ratings, None)
calculate_course_ratings(course)
expected_rating_step_counts = '{"5": 2, "4": 1, "3": 0, "2": 0, "1": 0}'
self.assertEqual(course.overall_rating, 4.67)
self.assertEqual(course.rating_step_counts, expected_rating_step_counts)
self.assertEqual(course.total_ratings, 3)
def test_calculate_course_tagdorsements(self):
course = Course.objects.get(pk=3)
self.assertEqual(course.tagdorsement_counts, "{}")
self.assertEqual(course.tagdorsements, "")
self.assertEqual(course.total_reviewers, None)
calculate_course_tagdorsements(course)
self.assertEqual(course.tagdorsement_counts, '{"Easy to use": 1, "Good for first time facilitators": 0, "Great for beginners": 1, "Engaging material": 1, "Led to great discussions": 1}')
self.assertEqual(course.tagdorsements, 'Easy to use, Great for beginners, Engaging material, Led to great discussions')
self.assertEqual(course.total_reviewers, 1)
| mit | Python |
06cfc43383e6130d74668b214c214e93e7d031c9 | Bump GTK+ to 2.24.5, update Mac patchset | mono/bockbuild,BansheeMediaPlayer/bockbuild,mono/bockbuild,BansheeMediaPlayer/bockbuild,bl8/bockbuild,bl8/bockbuild,BansheeMediaPlayer/bockbuild,bl8/bockbuild | packages/gtk+.py | packages/gtk+.py | class GtkPackage (GnomePackage):
def __init__ (self):
GnomePackage.__init__ (self, 'gtk+',
version_major = '2.24',
version_minor = '5',
configure_flags = [
'--with-gdktarget=%{gdk_target}',
'--disable-cups',
]
)
self.gdk_target = 'x11'
if Package.profile.name == 'darwin':
self.gdk_target = 'quartz'
self.sources.extend ([
# input_window_destroy/input_window_crossing stubs
'http://github.com/jralls/gtk-osx-build/raw/master/patches/gdk-quartz-input-window.patch',
# Bug 346609 - [PATCH] Quartz backend has no support for one-button mice
# https://bugzilla.gnome.org/show_bug.cgi?id=346609
'http://git.dronelabs.com/gtk+/patch/?id=729cbea7a2b27c4b8f2062316c0f406ab4c01dac',
# Bug 655074 - [PATCH] Fix crash with undecorated windows on MacOS Lion
# https://bugzilla.gnome.org/show_bug.cgi?id=655074
'https://bugzilla.gnome.org/attachment.cgi?id=192427',
# patches from gtk-2-24-quartz branch
# Handle alt/option modifier key with GDK_MOD5_MASK so that it's recognized as a
# modifier key for accelerators.
'http://git.gnome.org/browse/gtk+/patch?id=689ee935c157d63df8ce11d9e8ce2f8da36da2cd'
# Implement relocatable paths for quartz, similar to those in Win32
'http://git.gnome.org/browse/gtk+/patch?id=d15c82a4652cf23be5e6c40473c4b7302b513dd6',
# Enable using those standard mac deadkeys which are supported by the simple IM.
'http://git.gnome.org/browse/gtk+/patch?id=7fb399d44f5ca741e3257a1f03d978e18ef2fd1a',
# these patches break linking
# # Bug 571582: GtkSelection implementation for quartz.
# 'http://git.gnome.org/browse/gtk+/patch?id=8231c7d1c292c3d24181b42c464c266fa3283a9a',
#
# # Bug 628396: Gtk build fails because of objective-c elements
# 'http://git.gnome.org/browse/gtk+/patch?id=e4b0cbe3184af2d4472f29c61d0bf6e93747d78e',
# Return a NULL context and don't queue up another drag_begin_idle rather than
# asserting if the static drag_context isn't NULL (meaning that a drag is already
# in progress).
'http://git.gnome.org/browse/gtk+/patch?id=8bec04aed6112c6190efa473ce0aef742b13f776',
# Move the retrieval of the NSEvent to the beginning of drag_begin_internal in a
# (probably vain) effort to reduce the race condition caused by deferring actually
# starting the drag until idle time.
'http://git.gnome.org/browse/gtk+/patch?id=609548d995c93ac1d161135be60c012335f125f7',
# # Force an ownerChanged notification when we destroy a widget with a selection, and
# # act on the notification in DnD. This should prevent CF trying to retrieve any
# # deferred pasteboard types later, and DnD crashing if it does.
# 'http://git.gnome.org/browse/gtk+/patch?id=de82a1d0aa03750864af17a9fe34011d22da8c80',
# Fix refresh of static autorelease_pool so that it doesn't happen in gtk-nested loops.
'http://git.gnome.org/browse/gtk+/patch?id=b5046c24ed0681bf0bbf1d1e2872897f84eae06e',
# Fix typo in gdk_event_check
'http://git.gnome.org/browse/gtk+/patch?id=90c970b541d32631c22ca1c2ddfeee4188a8f278',
# Use a synthesized mouse nsevent to start a drag instead of grabbing the most
# recent event from the nswindow (which may well not be the right event).
'http://git.gnome.org/browse/gtk+/patch?id=c297c31732216be7d60b3b11244855d7f7f98003',
])
def prep (self):
Package.prep (self)
if Package.profile.name == 'darwin':
for p in range (2, len (self.sources)):
self.sh ('patch -p1 < "%{sources[' + str (p) + ']}"')
GtkPackage ()
| class GtkPackage (GnomePackage):
def __init__ (self):
GnomePackage.__init__ (self, 'gtk+',
version_major = '2.24',
version_minor = '4',
configure_flags = [
'--with-gdktarget=%{gdk_target}',
'--disable-cups',
]
)
self.gdk_target = 'x11'
if Package.profile.name == 'darwin':
self.gdk_target = 'quartz'
self.sources.extend ([
'http://github.com/jralls/gtk-osx-build/raw/master/patches/gdk-quartz-input-window.patch',
'http://git.dronelabs.com/gtk+/patch/?id=729cbea7a2b27c4b8f2062316c0f406ab4c01dac',
'https://bugzilla.gnome.org/attachment.cgi?id=192416'
])
def prep (self):
Package.prep (self)
if Package.profile.name == 'darwin':
for p in range (2, len (self.sources)):
self.sh ('patch -p1 < "%{sources[' + str (p) + ']}"')
GtkPackage ()
| mit | Python |
21b6360ddc198fb70c3973031e55baa74e5db764 | Make FindRoot use both the current working directory and the paths in sys.path to search for the root directory. This allows us to run scripts outside the repository directory using PYTHONPATH and things should still work. | sirikata/sirikata,sirikata/sirikata,sirikata/sirikata,sirikata/sirikata,sirikata/sirikata,sirikata/sirikata,sirikata/sirikata,sirikata/sirikata | scripts/util/cbr_wrapper.py | scripts/util/cbr_wrapper.py | #!/usr/bin/python
import sys
import os
import os.path
import invoke
import stdio
CBR_WRAPPER = "scripts/util/cbr_wrapper.sh"
def FindRoot(start):
search_offsets = ['.', '..', '../..', '../../..']
# Search directories: locals preferred first
search_dirs = [os.getcwd()]
# Otherwise search system dirs
search_dirs.extend(sys.path)
for sdir in search_dirs:
for soffset in search_offsets:
test_file = os.path.join(sdir, soffset, 'install-deps.sh')
if (os.path.isfile(test_file)):
return os.path.join(sdir, soffset)
return None
def RunCBR(args, io=None, **kwargs):
# Get the *scripts* directory, then just run the wrapper
root_dir = FindRoot( os.getcwd() )
if root_dir == None:
print "RunCBR: Couldn't find root directory from current directory."
return
cmd = [os.path.join(root_dir, CBR_WRAPPER)]
cmd.extend(args)
# Setup our IO, using default IO but overriding with parameters
if io == None:
io = stdio.StdIO()
our_io = stdio.StdIO(io.stdin, io.stdout, io.stderr)
if ('stdin' in kwargs):
our_io.stdin = kwargs['stdin']
del kwargs['stdin']
if ('stdout' in kwargs):
our_io.stdout = kwargs['stdout']
del kwargs['stdout']
if ('stderr' in kwargs):
our_io.stderr = kwargs['stderr']
del kwargs['stderr']
invoke.invoke(cmd, io=our_io, **kwargs)
if __name__ == "__main__":
RunCBR(sys.argv)
| #!/usr/bin/python
import sys
import os
import os.path
import invoke
import stdio
CBR_WRAPPER = "scripts/util/cbr_wrapper.sh"
def FindRoot(start):
for offset in ['.', '..', '../..', '../../..']:
offset_dir = start + '/' + offset
test_file = offset_dir + '/install-deps.sh'
if (os.path.isfile(test_file)):
return offset_dir
return None
def RunCBR(args, io=None, **kwargs):
# Get the *scripts* directory, then just run the wrapper
root_dir = FindRoot( os.getcwd() )
if root_dir == None:
print "RunCBR: Couldn't find root directory from current directory."
return
cmd = [root_dir + '/' + CBR_WRAPPER]
cmd.extend(args)
# Setup our IO, using default IO but overriding with parameters
if io == None:
io = stdio.StdIO()
our_io = stdio.StdIO(io.stdin, io.stdout, io.stderr)
if ('stdin' in kwargs):
our_io.stdin = kwargs['stdin']
del kwargs['stdin']
if ('stdout' in kwargs):
our_io.stdout = kwargs['stdout']
del kwargs['stdout']
if ('stderr' in kwargs):
our_io.stderr = kwargs['stderr']
del kwargs['stderr']
invoke.invoke(cmd, io=our_io, **kwargs)
if __name__ == "__main__":
RunCBR(sys.argv)
| bsd-3-clause | Python |
5bd912545ac56af4793d0af6bd58276b77bf11f6 | FIX dependencies | ingadhoc/website | website_sale_taxes_included/__openerp__.py | website_sale_taxes_included/__openerp__.py | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 ADHOC SA (http://www.adhoc.com.ar)
# All Rights Reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Product Price Taxes Included or Not',
'version': '8.0.0.0.0',
'category': 'Product',
'sequence': 14,
'summary': '',
'description': """
Product Price Taxes Included or Not
===================================
""",
'author': 'ADHOC SA',
'website': 'www.adhoc.com.ar',
'license': 'AGPL-3',
'images': [
],
'depends': [
# 'website_sale',
'website_sale_delivery',
'product_price_taxes_included',
],
'data': [
],
'demo': [
],
'test': [
],
'installable': True,
'auto_install': False,
'application': False,
}
| # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 ADHOC SA (http://www.adhoc.com.ar)
# All Rights Reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Product Price Taxes Included or Not',
'version': '8.0.0.0.0',
'category': 'Product',
'sequence': 14,
'summary': '',
'description': """
Product Price Taxes Included or Not
===================================
""",
'author': 'ADHOC SA',
'website': 'www.adhoc.com.ar',
'license': 'AGPL-3',
'images': [
],
'depends': [
'website_sale',
# 'website_sale_delivery',
'product_price_taxes_included',
],
'data': [
],
'demo': [
],
'test': [
],
'installable': True,
'auto_install': False,
'application': False,
}
| agpl-3.0 | Python |
73a03c70d2d7350d5bbb5b21dcb46890d1777824 | Use lxml only when installed | elfxiong/wiktionary-translations-parser | parser/helper.py | parser/helper.py | """
The common methods in different editions
"""
import string
import re
import requests
from bs4 import BeautifulSoup, Tag
html_parser = 'html.parser'
try:
import lxml
html_parser = 'lxml'
except ImportError:
pass
HEADING_TAG = re.compile(r'^h(?P<level>[1-6])$', re.I)
COMMA_OR_SEMICOLON = re.compile('[,;]')
PARENTHESIS_WITH_TEXT = re.compile(r'\([^()]*\)') # no nesting
def infer_edition_from_url(url):
# print(url)
result = re.match(r'.*//(?P<edition>\w{2,3})\..+', url)
return result.group('edition')
def get_heading_level(tag):
"""If the tag is a heading tag, return its level (1 through 6).
Otherwise, return `None`."""
heading_match = HEADING_TAG.match(tag)
if heading_match:
return int(heading_match.group('level'))
return None
def get_html_tree_from_url(url):
html = requests.get(url)
# print(html.content)
soup = BeautifulSoup(html.content, html_parser)
return soup
def get_html_tree_from_string(html):
return BeautifulSoup(html, html_parser)
def remove_parenthesis2(string):
"""Remove parentheses and text within them.
For nested parentheses, only the innermost one is removed.
"""
return re.sub(PARENTHESIS_WITH_TEXT, '', string=string)
def remove_parenthesis(string):
"""Remove parentheses and text within them.
For nested parentheses, removes the whole thing.
"""
ret = ''
skip1c = 0
skip2c = 0
for i in string:
if i == '[':
skip1c += 1
elif i == '(':
skip2c += 1
elif i == ']' and skip1c > 0:
skip1c -= 1
elif i == ')' and skip2c > 0:
skip2c -= 1
elif skip1c == 0 and skip2c == 0:
ret += i
return ret
def remove_all_punctuation(line):
punc = str.maketrans('', '', string.punctuation)
return line.translate(punc).replace('→', '').strip()
def remove_comma_period(line):
return re.sub('[,.]', '', line)
| """
The common methods in different editions
"""
import string
import re
import requests
from bs4 import BeautifulSoup, Tag
HEADING_TAG = re.compile(r'^h(?P<level>[1-6])$', re.I)
COMMA_OR_SEMICOLON = re.compile('[,;]')
PARENTHESIS_WITH_TEXT = re.compile(r'\([^()]*\)') # no nesting
def infer_edition_from_url(url):
# print(url)
result = re.match(r'.*//(?P<edition>\w{2,3})\..+', url)
return result.group('edition')
def get_heading_level(tag):
"""If the tag is a heading tag, return its level (1 through 6).
Otherwise, return `None`."""
heading_match = HEADING_TAG.match(tag)
if heading_match:
return int(heading_match.group('level'))
return None
def get_html_tree_from_url(url):
html = requests.get(url)
# print(html.content)
soup = BeautifulSoup(html.content, 'lxml')
return soup
def get_html_tree_from_string(html):
return BeautifulSoup(html, 'html.parser')
def remove_parenthesis2(string):
"""Remove parentheses and text within them.
For nested parentheses, only the innermost one is removed.
"""
return re.sub(PARENTHESIS_WITH_TEXT, '', string=string)
def remove_parenthesis(string):
"""Remove parentheses and text within them.
For nested parentheses, removes the whole thing.
"""
ret = ''
skip1c = 0
skip2c = 0
for i in string:
if i == '[':
skip1c += 1
elif i == '(':
skip2c += 1
elif i == ']' and skip1c > 0:
skip1c -= 1
elif i == ')' and skip2c > 0:
skip2c -= 1
elif skip1c == 0 and skip2c == 0:
ret += i
return ret
def remove_all_punctuation(line):
punc = str.maketrans('', '', string.punctuation)
return line.translate(punc).replace('→', '').strip()
def remove_comma_period(line):
return re.sub('[,.]', '', line)
| mit | Python |
cf2caee3136820b2b160bf9c038aa7c1ce32ca24 | Add not implemented error raise | bio2bel/hmdb | src/bio2bel_hmdb/manager.py | src/bio2bel_hmdb/manager.py | # -*- coding: utf-8 -*-
"""
Work in progress
- import database model
- change get_data() to work with xml parser
- write populate function
"""
import configparser
import logging
import os
import requests
import zipfile
from io import BytesIO
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from .models import Base #import database tables
from .constants import (
DATA_URL,
HMDB_SQLITE_PATH,
HMDB_CONFIG_FILE_PATH,
)
log = logging.getLogger(__name__)
def get_data(source=None):
"""Download HMDB data"""
req = requests.get(DATA_URL)
hmdb_zip = zipfile.ZipFile(BytesIO(req.content))
hmdb_text = hmdb_zip.open("hmdb_metabolites.xml").read()
hmdb_text = hmdb_text.decode('UTF-8')
return hmdb_text
class Manager(object):
def __init__(self, connection=None):
self.connection = self.get_connection(connection)
self.engine = create_engine(self.connection)
self.sessionmake = sessionmaker(bind=self.engine, autoflush=False, expire_on_commit=False)
self.session = self.sessionmake()
self.make_tables()
@staticmethod
def get_connection(connection=None):
"""Return the SQLAlchemy connection string if it is set
:param connection: get the SQLAlchemy connection string
:rtype: str
"""
if connection:
return connection
config = configparser.ConfigParser()
cfp = HMDB_CONFIG_FILE_PATH
if os.path.exists(cfp):
log.info('fetch database configuration from {}'.format(cfp))
config.read(cfp)
connection = config['database']['sqlalchemy_connection_string']
log.info('load connection string from {}: {}'.format(cfp, connection))
return connection
with open(cfp, 'w') as config_file:
config['database'] = {'sqlalchemy_connection_string': HMDB_SQLITE_PATH}
config.write(config_file)
log.info('create configuration file {}'.format(cfp))
return HMDB_SQLITE_PATH
def make_tables(self, check_first=True):
"""Create tables from model.py"""
Base.metadata.create_all(self.engine, checkfirst=check_first)
def populate(self, source=None):
"""Populate database with HMDB data"""
if not source:
text = get_data()
else:
pass
raise NotImplementedError
| # -*- coding: utf-8 -*-
"""
Work in progress
- import database model
- change get_data() to work with xml parser
- write populate function
"""
import configparser
import logging
import os
import requests
import zipfile
from io import BytesIO
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from .models import Base #import database tables
from .constants import (
DATA_URL,
HMDB_SQLITE_PATH,
HMDB_CONFIG_FILE_PATH,
)
log = logging.getLogger(__name__)
def get_data(source=None):
"""Download HMDB data"""
req = requests.get(DATA_URL)
hmdb_zip = zipfile.ZipFile(BytesIO(req.content))
hmdb_text = hmdb_zip.open("hmdb_metabolites.xml").read()
hmdb_text = hmdb_text.decode('UTF-8')
return hmdb_text
class Manager(object):
def __init__(self, connection=None):
self.connection = self.get_connection(connection)
self.engine = create_engine(self.connection)
self.sessionmake = sessionmaker(bind=self.engine, autoflush=False, expire_on_commit=False)
self.session = self.sessionmake()
self.make_tables()
@staticmethod
def get_connection(connection=None):
"""Return the SQLAlchemy connection string if it is set
:param connection: get the SQLAlchemy connection string
:rtype: str
"""
if connection:
return connection
config = configparser.ConfigParser()
cfp = HMDB_CONFIG_FILE_PATH
if os.path.exists(cfp):
log.info('fetch database configuration from {}'.format(cfp))
config.read(cfp)
connection = config['database']['sqlalchemy_connection_string']
log.info('load connection string from {}: {}'.format(cfp, connection))
return connection
with open(cfp, 'w') as config_file:
config['database'] = {'sqlalchemy_connection_string': HMDB_SQLITE_PATH}
config.write(config_file)
log.info('create configuration file {}'.format(cfp))
return HMDB_SQLITE_PATH
def make_tables(self, check_first=True):
"""Create tables from model.py"""
Base.metadata.create_all(self.engine, checkfirst=check_first)
def populate(self, source=None):
"""Populate database with HMDB data"""
if not source:
text = get_data()
else:
| mit | Python |
4abe9f180f2edf6e573ba82fd559e17a589e096c | add time guard in notice_all | Answeror/torabot,Answeror/torabot,Answeror/torabot | torabot/tasks/notice.py | torabot/tasks/notice.py | from .engine import make as make_engine
from ..ut.connection import ccontext
from ..ut.guard import timeguard
from ..db import get_pending_notices
from ..core.notice import send_notice
@timeguard
def notice_all(conf):
engine = make_engine(conf)
with ccontext(engine=engine) as conn:
notices = get_pending_notices(conn)
for notice in notices:
with ccontext(commit=True, engine=engine) as conn:
send_notice(
conf=conf,
notice=notice,
conn=conn,
)
| from .engine import make as make_engine
from ..ut.session import makesession
from ..db import get_pending_notices
from ..core.notice import send_notice
def notice_all(conf):
engine = make_engine(conf)
with makesession(engine=engine) as session:
for notice in get_pending_notices(session.connection()):
send_notice(
conf=conf,
notice=notice,
conn=session.connection(),
)
session.commit()
| mit | Python |
4d494d009990adbd52cd69b0408d24cb0898592e | Update generic to get filename | vparitskiy/data-importer,vparitskiy/data-importer | data_importer/importers/generic.py | data_importer/importers/generic.py | # -*- coding: utf-8 -*-
from data_importer.readers.xls_reader import XLSReader
from data_importer.readers.xlsx_reader import XLSXReader
from data_importer.readers.csv_reader import CSVReader
from data_importer.readers.xml_reader import XMLReader
from data_importer.core.exceptions import UnsuportedFile
from .base import BaseImporter
class GenericImporter(BaseImporter):
"""
An implementation of BaseImporter that sets the right reader
by file extension.
Probably the best choice for almost all implementation cases
"""
def set_reader(self):
reader = self.get_reader_class()
# default importers configurations
extra_values = {
'xlsx': {'user_iterator': True, 'data_only': True},
'xls': {'sheet_by_name': self.Meta.sheet_name or None, 'sheet_by_index': self.Meta.sheet_index or 0},
'csv': {'delimiter': self.Meta.delimiter or ';'},
'xml': {},
}
selected_extra_values = extra_values[self.get_source_file_extension()]
self._reader = reader(self, **selected_extra_values)
def get_reader_class(self):
"""
Gets the right file reader class by source file extension
"""
readers = {
'xls': XLSReader,
'xlsx': XLSXReader,
'xml': XMLReader,
'csv': CSVReader,
}
source_file_extension = self.get_source_file_extension()
# No reader for invalid extensions
if source_file_extension not in readers.keys():
raise UnsuportedFile("Unsuported File")
return readers[source_file_extension]
def get_source_file_extension(self):
"""
Gets the source file extension. Used to choose the right reader
"""
if hasattr(self.source, 'file'):
filename = self.source.file.name # DataImporter.FileHistory instances
elif hasattr(self.source, 'file_upload'):
filename = self.source.file_upload.name # Default Python opened file
elif hasattr(self.source, 'name'):
filename = self.source.name
else:
filename = self.source
ext = filename.split('.')[-1]
return ext.lower()
| # -*- coding: utf-8 -*-
from data_importer.readers.xls_reader import XLSReader
from data_importer.readers.xlsx_reader import XLSXReader
from data_importer.readers.csv_reader import CSVReader
from data_importer.readers.xml_reader import XMLReader
from data_importer.core.exceptions import UnsuportedFile
from .base import BaseImporter
class GenericImporter(BaseImporter):
"""
An implementation of BaseImporter that sets the right reader
by file extension.
Probably the best choice for almost all implementation cases
"""
def set_reader(self):
reader = self.get_reader_class()
# default importers configurations
extra_values = {
'xlsx': {'user_iterator': True, 'data_only': True},
'xls': {'sheet_by_name': self.Meta.sheet_name or None, 'sheet_by_index': self.Meta.sheet_index or 0},
'csv': {'delimiter': self.Meta.delimiter or ';'},
'xml': {},
}
selected_extra_values = extra_values[self.get_source_file_extension()]
self._reader = reader(self, **selected_extra_values)
def get_reader_class(self):
"""
Gets the right file reader class by source file extension
"""
readers = {
'xls': XLSReader,
'xlsx': XLSXReader,
'xml': XMLReader,
'csv': CSVReader,
}
source_file_extension = self.get_source_file_extension()
# No reader for invalid extensions
if source_file_extension not in readers.keys():
raise UnsuportedFile("Unsuported File")
return readers[source_file_extension]
def get_source_file_extension(self):
"""
Gets the source file extension. Used to choose the right reader
"""
if isinstance(self.source, str):
filename = self.source
else:
try:
filename = self.source.file.name # DataImporter.FileHistory instances
except AttributeError:
filename = self.source.name # Default Python opened file
ext = filename.split('.')[-1]
return ext.lower()
| bsd-2-clause | Python |
d59635e9542ca54b39eaa10f31b53da46df68e61 | Remove extra space in help string | gkotton/vmware-nsx,gkotton/vmware-nsx | neutron/plugins/ml2/drivers/mlnx/config.py | neutron/plugins/ml2/drivers/mlnx/config.py | # Copyright (c) 2014 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo.config import cfg
from neutron.extensions import portbindings
eswitch_opts = [
cfg.StrOpt('vnic_type',
default=portbindings.VIF_TYPE_MLNX_DIRECT,
help=_("Type of VM network interface: mlnx_direct or "
"hostdev")),
cfg.BoolOpt('apply_profile_patch',
default=False,
help=_("Enable server compatibility with old nova")),
]
cfg.CONF.register_opts(eswitch_opts, "ESWITCH")
| # Copyright (c) 2014 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo.config import cfg
from neutron.extensions import portbindings
eswitch_opts = [
cfg.StrOpt('vnic_type',
default=portbindings.VIF_TYPE_MLNX_DIRECT,
help=_("Type of VM network interface: mlnx_direct or "
"hostdev")),
cfg.BoolOpt('apply_profile_patch',
default=False,
help=_("Enable server compatibility with old nova ")),
]
cfg.CONF.register_opts(eswitch_opts, "ESWITCH")
| apache-2.0 | Python |
579fb65aef6e0d8461d79db9c1cbf8e0dd46445d | Add test purchases not allowed for any project | opennode/nodeconductor,opennode/nodeconductor,opennode/nodeconductor | nodeconductor/iaas/tests/test_purchases.py | nodeconductor/iaas/tests/test_purchases.py | from rest_framework import test
from rest_framework import status
from rest_framework.reverse import reverse
from nodeconductor.iaas.tests import factories as iaas_factories
from nodeconductor.structure.models import Role
from nodeconductor.structure.tests import factories as structure_factories
class PurchasePermissionTest(test.APISimpleTestCase):
def setUp(self):
self.user = structure_factories.UserFactory.create()
self.client.force_authenticate(user=self.user)
admined_project = structure_factories.ProjectFactory()
managed_project = structure_factories.ProjectFactory()
inaccessible_project = structure_factories.ProjectFactory()
admined_project.add_user(self.user, Role.ADMINISTRATOR)
managed_project.add_user(self.user, Role.MANAGER)
self.admined_purchase = iaas_factories.PurchaseFactory(project=admined_project)
self.managed_purchase = iaas_factories.PurchaseFactory(project=managed_project)
self.inaccessible_purchase = iaas_factories.PurchaseFactory(project=inaccessible_project)
# List filtration tests
def test_user_can_list_purchase_history_of_project_he_is_administrator_of(self):
response = self.client.get(reverse('purchase-list'))
self.assertEqual(response.status_code, status.HTTP_200_OK)
purchase_url = self._get_purchase_url(self.admined_purchase)
self.assertIn(purchase_url, [purchase['url'] for purchase in response.data])
def test_user_can_list_purchase_history_of_project_he_is_manager_of(self):
response = self.client.get(reverse('purchase-list'))
self.assertEqual(response.status_code, status.HTTP_200_OK)
purchase_url = self._get_purchase_url(self.managed_purchase)
self.assertIn(purchase_url, [purchase['url'] for purchase in response.data])
def test_user_cannot_list_purchase_history_of_project_he_has_no_role_in(self):
response = self.client.get(reverse('purchase-list'))
self.assertEqual(response.status_code, status.HTTP_200_OK)
purchase_url = self._get_purchase_url(self.inaccessible_purchase)
self.assertNotIn(purchase_url, [purchase['url'] for purchase in response.data])
def test_user_cannot_list_purchases_not_allowed_for_any_project(self):
inaccessible_purchase = iaas_factories.PurchaseFactory()
response = self.client.get(reverse('purchase-list'))
self.assertEqual(response.status_code, status.HTTP_200_OK)
purchase_url = self._get_purchase_url(inaccessible_purchase)
self.assertNotIn(purchase_url, [instance['url'] for instance in response.data])
# Helper methods
def _get_purchase_url(self, purchase):
return 'http://testserver' + reverse('purchase-detail', kwargs={'uuid': purchase.uuid}) | from rest_framework import test
from rest_framework import status
from rest_framework.reverse import reverse
from nodeconductor.iaas.tests import factories as iaas_factories
from nodeconductor.structure.models import Role
from nodeconductor.structure.tests import factories as structure_factories
class PurchasePermissionTest(test.APISimpleTestCase):
def setUp(self):
self.user = structure_factories.UserFactory.create()
self.client.force_authenticate(user=self.user)
admined_project = structure_factories.ProjectFactory()
managed_project = structure_factories.ProjectFactory()
admined_project.add_user(self.user, Role.ADMINISTRATOR)
managed_project.add_user(self.user, Role.MANAGER)
self.admined_purchase = iaas_factories.PurchaseFactory(project=admined_project)
self.managed_purchase = iaas_factories.PurchaseFactory(project=managed_project)
self.inaccessible_purchase = iaas_factories.PurchaseFactory()
def test_user_can_list_purchase_history_of_project_he_is_administrator_of(self):
response = self.client.get(reverse('purchase-list'))
self.assertEqual(response.status_code, status.HTTP_200_OK)
purchase_url = self._get_purchase_url(self.admined_purchase)
self.assertIn(purchase_url, [purchase['url'] for purchase in response.data])
def test_user_can_list_purchase_history_of_project_he_is_manager_of(self):
response = self.client.get(reverse('purchase-list'))
self.assertEqual(response.status_code, status.HTTP_200_OK)
purchase_url = self._get_purchase_url(self.managed_purchase)
self.assertIn(purchase_url, [purchase['url'] for purchase in response.data])
def test_user_cannot_list_purchase_history_of_project_he_has_no_role_in(self):
response = self.client.get(reverse('purchase-list'))
self.assertEqual(response.status_code, status.HTTP_200_OK)
purchase_url = self._get_purchase_url(self.inaccessible_purchase)
self.assertNotIn(purchase_url, [purchase['url'] for purchase in response.data])
# Helper methods
def _get_purchase_url(self, purchase):
return 'http://testserver' + reverse('purchase-detail', kwargs={'uuid': purchase.uuid}) | mit | Python |
3d88eb92c190d26693691f779f911fcbf936433e | change default max fee to $25 | lbryio/lbry,lbryio/lbry,zestyr/lbry,zestyr/lbry,DaveA50/lbry,DaveA50/lbry,zestyr/lbry,lbryio/lbry | lbrynet/conf.py | lbrynet/conf.py | """
Some network wide and also application specific parameters
"""
MAX_HANDSHAKE_SIZE = 2**16
MAX_REQUEST_SIZE = 2**16
MAX_BLOB_REQUEST_SIZE = 2**16
MAX_RESPONSE_INFO_SIZE = 2**16
MAX_BLOB_INFOS_TO_REQUEST = 20
BLOBFILES_DIR = ".blobfiles"
BLOB_SIZE = 2**21
MIN_BLOB_DATA_PAYMENT_RATE = .005 # points/megabyte
MIN_BLOB_INFO_PAYMENT_RATE = .02 # points/1000 infos
MIN_VALUABLE_BLOB_INFO_PAYMENT_RATE = .05 # points/1000 infos
MIN_VALUABLE_BLOB_HASH_PAYMENT_RATE = .05 # points/1000 infos
MAX_CONNECTIONS_PER_STREAM = 5
KNOWN_DHT_NODES = [('104.236.42.182', 4000),
('lbrynet1.lbry.io', 4444),
('lbrynet2.lbry.io', 4444),
('lbrynet3.lbry.io', 4444)]
POINTTRADER_SERVER = 'http://ec2-54-187-192-68.us-west-2.compute.amazonaws.com:2424'
#POINTTRADER_SERVER = 'http://127.0.0.1:2424'
LOG_FILE_NAME = "lbrynet.log"
LOG_POST_URL = "https://lbry.io/log-upload"
CRYPTSD_FILE_EXTENSION = ".cryptsd"
API_INTERFACE = "localhost"
API_ADDRESS = "lbryapi"
API_PORT = 5279
ICON_PATH = "app.icns"
APP_NAME = "LBRY"
API_CONNECTION_STRING = "http://%s:%i/%s" % (API_INTERFACE, API_PORT, API_ADDRESS)
UI_ADDRESS = "http://%s:%i" % (API_INTERFACE, API_PORT)
PROTOCOL_PREFIX = "lbry"
DEFAULT_WALLET = "lbryum"
WALLET_TYPES = ["lbryum", "lbrycrd"]
DEFAULT_TIMEOUT = 30
DEFAULT_MAX_SEARCH_RESULTS = 25
DEFAULT_MAX_KEY_FEE = {'USD': {'amount': 25.0, 'address': ''}}
DEFAULT_SEARCH_TIMEOUT = 3.0
DEFAULT_CACHE_TIME = 3600
DEFAULT_UI_BRANCH = "master"
SOURCE_TYPES = ['lbry_sd_hash', 'url', 'btih']
CURRENCIES = {
'BTC': {'type': 'crypto'},
'LBC': {'type': 'crypto'},
'USD': {'type': 'fiat'},
}
| """
Some network wide and also application specific parameters
"""
MAX_HANDSHAKE_SIZE = 2**16
MAX_REQUEST_SIZE = 2**16
MAX_BLOB_REQUEST_SIZE = 2**16
MAX_RESPONSE_INFO_SIZE = 2**16
MAX_BLOB_INFOS_TO_REQUEST = 20
BLOBFILES_DIR = ".blobfiles"
BLOB_SIZE = 2**21
MIN_BLOB_DATA_PAYMENT_RATE = .005 # points/megabyte
MIN_BLOB_INFO_PAYMENT_RATE = .02 # points/1000 infos
MIN_VALUABLE_BLOB_INFO_PAYMENT_RATE = .05 # points/1000 infos
MIN_VALUABLE_BLOB_HASH_PAYMENT_RATE = .05 # points/1000 infos
MAX_CONNECTIONS_PER_STREAM = 5
KNOWN_DHT_NODES = [('104.236.42.182', 4000),
('lbrynet1.lbry.io', 4444),
('lbrynet2.lbry.io', 4444),
('lbrynet3.lbry.io', 4444)]
POINTTRADER_SERVER = 'http://ec2-54-187-192-68.us-west-2.compute.amazonaws.com:2424'
#POINTTRADER_SERVER = 'http://127.0.0.1:2424'
LOG_FILE_NAME = "lbrynet.log"
LOG_POST_URL = "https://lbry.io/log-upload"
CRYPTSD_FILE_EXTENSION = ".cryptsd"
API_INTERFACE = "localhost"
API_ADDRESS = "lbryapi"
API_PORT = 5279
ICON_PATH = "app.icns"
APP_NAME = "LBRY"
API_CONNECTION_STRING = "http://%s:%i/%s" % (API_INTERFACE, API_PORT, API_ADDRESS)
UI_ADDRESS = "http://%s:%i" % (API_INTERFACE, API_PORT)
PROTOCOL_PREFIX = "lbry"
DEFAULT_WALLET = "lbryum"
WALLET_TYPES = ["lbryum", "lbrycrd"]
DEFAULT_TIMEOUT = 30
DEFAULT_MAX_SEARCH_RESULTS = 25
DEFAULT_MAX_KEY_FEE = {'BTC': {'amount': 0.025}}
DEFAULT_SEARCH_TIMEOUT = 3.0
DEFAULT_CACHE_TIME = 3600
DEFAULT_UI_BRANCH = "master"
SOURCE_TYPES = ['lbry_sd_hash', 'url', 'btih']
CURRENCIES = {
'BTC': {'type': 'crypto'},
'LBC': {'type': 'crypto'},
'USD': {'type': 'fiat'},
}
| mit | Python |
6b62c281b926bff305e74175fe3598ac0046d983 | Prepare 0.3.0 | silentsokolov/django-treasuremap,silentsokolov/django-treasuremap,silentsokolov/django-treasuremap | treasuremap/__init__.py | treasuremap/__init__.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
__author__ = 'Dmitriy Sokolov'
__version__ = '0.3.0'
default_app_config = 'treasuremap.apps.TreasureMapConfig'
VERSION = __version__
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
__author__ = 'Dmitriy Sokolov'
__version__ = '0.2.7'
default_app_config = 'treasuremap.apps.TreasureMapConfig'
VERSION = __version__
| mit | Python |
88d963f3d0ba033ff6eefd85c5913f173abeed8b | Remove unused vars in favour of underscores | wintersandroid/tvrenamr,ghickman/tvrenamr | tvrenamr/cli/helpers.py | tvrenamr/cli/helpers.py | import os
import sys
from tvrenamr.config import Config
def build_file_list(paths, recursive=False, ignore_filelist=()):
"""Finds files from a list of paths"""
for path in paths:
if os.path.isfile(path):
yield os.path.split(path)
if os.path.isdir(path):
for root, _, files in os.walk(path):
for fname in files:
path = os.path.join(root, fname)
if path not in ignore_filelist:
yield os.path.split(path)
if not recursive:
break
def get_config(path=None):
"""Get the first viable config from the list of possiblities"""
def exists(x):
return x is not None and os.path.exists(x)
possible_configs = iter(filter(exists, (
path,
os.path.join(sys.path[0], 'config.yml'),
os.path.expanduser('~/.tvrenamr/config.yml'),
)))
location = next(possible_configs, None)
return Config(location)
def sanitise_log(log, longest):
dt, name = log.split('Renamed: ')
dt = dt.split(' ')[0].replace('T', ' ')
show, number, _ = name.split(' - ')
name = (name.replace(show, show.lstrip('"').strip().ljust(longest), 1)
.replace(number, number.ljust(4), 1)
.replace(' - ', ' | '))
return '{} | {}'.format(dt, name.rstrip('"\n'))
def start_dry_run(logger):
logger('Dry Run beginning.')
logger('-' * 70)
logger('')
def stop_dry_run(logger):
logger('')
logger('-' * 70)
logger('Dry Run complete. No files were harmed in the process.')
logger('')
| import os
import sys
from tvrenamr.config import Config
def build_file_list(paths, recursive=False, ignore_filelist=()):
"""Finds files from a list of paths"""
for path in paths:
if os.path.isfile(path):
yield os.path.split(path)
if os.path.isdir(path):
for root, dirs, files in os.walk(path):
for fname in files:
path = os.path.join(root, fname)
if path not in ignore_filelist:
yield os.path.split(path)
if not recursive:
break
def get_config(path=None):
"""Get the first viable config from the list of possiblities"""
def exists(x):
return x is not None and os.path.exists(x)
possible_configs = iter(filter(exists, (
path,
os.path.join(sys.path[0], 'config.yml'),
os.path.expanduser('~/.tvrenamr/config.yml'),
)))
location = next(possible_configs, None)
return Config(location)
def sanitise_log(log, longest):
dt, name = log.split('Renamed: ')
dt = dt.split(' ')[0].replace('T', ' ')
show, number, title = name.split(' - ')
name = (name.replace(show, show.lstrip('"').strip().ljust(longest), 1)
.replace(number, number.ljust(4), 1)
.replace(' - ', ' | '))
return '{} | {}'.format(dt, name.rstrip('"\n'))
def start_dry_run(logger):
logger('Dry Run beginning.')
logger('-' * 70)
logger('')
def stop_dry_run(logger):
logger('')
logger('-' * 70)
logger('Dry Run complete. No files were harmed in the process.')
logger('')
| mit | Python |
0304eb228c008f2d74695eec46ef3950e5da3ef7 | Add missing import. | PCManticore/argus-ci,stefan-caraiman/cloudbase-init-ci,cmin764/argus-ci,micumatei/cloudbase-init-ci,cloudbase/cloudbase-init-ci,AlexandruTudose/cloudbase-init-ci | argus/shell.py | argus/shell.py | # Copyright 2014 Cloudbase Solutions Srl
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sys
from argus import runner
def main():
failures = runner.run_scenarios()
sys.exit(1 if failures else 0)
if __name__ == "__main__":
main()
| # Copyright 2014 Cloudbase Solutions Srl
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from argus import runner
def main():
failures = runner.run_scenarios()
sys.exit(1 if failures else 0)
if __name__ == "__main__":
main()
| apache-2.0 | Python |
aa3029f962e47a6c0cd3023192616acb8eff5b75 | Insert third_party into the second slot of sys.path rather than the last slot | protron/namebench,google/namebench,rogers0/namebench,google/namebench,google/namebench | third_party/__init__.py | third_party/__init__.py | import os.path
import sys
# This bit of evil should inject third_party into the path for relative imports.
sys.path.insert(1, os.path.dirname(__file__))
| import os.path
import sys
# This bit of evil should inject third_party into the path for relative imports.
sys.path.append(os.path.dirname(__file__))
| apache-2.0 | Python |
dfd683d5e1a2aa479848a565f221a760b18bf198 | Add some logging to MeshFileHandler | onitake/Uranium,onitake/Uranium | Cura/MeshHandling/MeshFileHandler.py | Cura/MeshHandling/MeshFileHandler.py | from Cura.Logger import Logger
## Central class for reading and writing meshes.
#
# This class is created by Application and handles reading and writing mesh files.
class MeshFileHandler(object):
def __init__(self):
self._mesh_readers = []
self._mesh_writers = []
# Try to read the mesh_data from a file. Based on the extension in the file a correct meshreader is selected.
# \param file_name The name of the mesh to load.
# \param storage_device The StorageDevice where the mesh can be found.
# \returns MeshData if it was able to read the file, None otherwise.
def read(self, file_name, storage_device):
try:
for reader in self._mesh_readers:
result = reader.read(file_name, storage_device)
if(result is not None):
return result
except OSError as e:
Logger.log("e", e)
Logger.log("w", "Unable to read file %s", file_name)
return None #unable to read
# Try to write the mesh_data to file. Based on the extension in the file_name a correct meshwriter is selected.
# \param file_name The name of the file to write.
# \param storage_device The StorageDevice where the file should be written to.
# \param mesh_data
# \returns True if it was able to create the file, otherwise False
def write(self, file_name, storage_location, mesh_data):
if(mesh_data is None):
return False
for writer in self._mesh_writers:
if(writer.write(storage_location, mesh_data)):
return True
return False
# Get list of all supported filetypes for writing.
# \returns List of strings with all supported filetypes.
def getSupportedFileTypesWrite(self):
supported_types = []
for writer in self._mesh_writer:
supported_types.append(writer.getSupportedExtension())
return supported_types
# Get list of all supported filetypes for reading.
# \returns List of strings with all supported filetypes.
def getSupportedFileTypesRead(self):
supported_types = []
for reader in self._mesh_readers:
supported_types.append(reader.getSupportedExtension())
return supported_types
def addWriter(self, writer):
self._mesh_writers.append(writer)
def addReader(self, reader):
self._mesh_readers.append(reader)
|
## Central class for reading and writing meshes.
#
# This class is created by Application and handles reading and writing mesh files.
class MeshFileHandler(object):
def __init__(self):
self._mesh_readers = []
self._mesh_writers = []
# Try to read the mesh_data from a file. Based on the extension in the file a correct meshreader is selected.
# \param file_name The name of the mesh to load.
# \param storage_device The StorageDevice where the mesh can be found.
# \returns MeshData if it was able to read the file, None otherwise.
def read(self, file_name, storage_device):
try:
for reader in self._mesh_readers:
result = reader.read(file_name, storage_device)
if(result is not None):
return result
except OSError as e:
print(e)
return None #unable to read
# Try to write the mesh_data to file. Based on the extension in the file_name a correct meshwriter is selected.
# \param file_name The name of the file to write.
# \param storage_device The StorageDevice where the file should be written to.
# \param mesh_data
# \returns True if it was able to create the file, otherwise False
def write(self, file_name, storage_location, mesh_data):
if(mesh_data is None):
return False
for writer in self._mesh_writers:
if(writer.write(storage_location, mesh_data)):
return True
return False
# Get list of all supported filetypes for writing.
# \returns List of strings with all supported filetypes.
def getSupportedFileTypesWrite(self):
supported_types = []
for writer in self._mesh_writer:
supported_types.append(writer.getSupportedExtension())
return supported_types
# Get list of all supported filetypes for reading.
# \returns List of strings with all supported filetypes.
def getSupportedFileTypesRead(self):
supported_types = []
for reader in self._mesh_readers:
supported_types.append(reader.getSupportedExtension())
return supported_types
def addWriter(self, writer):
self._mesh_writers.append(writer)
def addReader(self, reader):
self._mesh_readers.append(reader)
| agpl-3.0 | Python |
51fabb65d702e3099031567e93b9ee2b0dc65d5d | change platform to config | tLDP/python-tldp,tLDP/python-tldp,tLDP/python-tldp | tldp/doctypes/common.py | tldp/doctypes/common.py | #! /usr/bin/python
# -*- coding: utf8 -*-
from __future__ import absolute_import, division, print_function
import os
from ..utils import logger
class SignatureChecker(object):
@classmethod
def signatureLocation(cls, f):
f.seek(0)
buf = f.read(1024).lower()
for sig in cls.signatures:
try:
sindex = buf.index(sig.lower())
logger.debug("Found signature %s in %s at %s; doctype %s.",
sig, f.name, sindex, cls)
return sindex
except ValueError:
logger.debug("Signature %s not found in %s for type %s",
sig, f.name, cls.__name__)
return None
class BaseDoctype(object):
def __init__(self, *args, **kwargs):
self.source = kwargs.get('source', None)
self.output = kwargs.get('output', None)
self.config = kwargs.get('config', None)
assert None not in (self.source, self.output, self.config)
def generate(self):
self.output.prebuild_hook()
os.chdir(self.output.dirname)
vector = [self.config_check(),
self.create_htmls(),
self.create_pdf(),
self.create_txt(),
self.create_html(),
]
result = all(vector)
if result:
self.output.build_success_hook()
else:
self.output.build_failure_hook()
return all(vector)
#
# -- end of file
| #! /usr/bin/python
# -*- coding: utf8 -*-
from __future__ import absolute_import, division, print_function
import os
from ..utils import logger
class SignatureChecker(object):
@classmethod
def signatureLocation(cls, f):
f.seek(0)
buf = f.read(1024).lower()
for sig in cls.signatures:
try:
sindex = buf.index(sig.lower())
logger.debug("Found signature %s in %s at %s; doctype %s.",
sig, f.name, sindex, cls)
return sindex
except ValueError:
logger.debug("Signature %s not found in %s for type %s",
sig, f.name, cls.__name__)
return None
class BaseDoctype(object):
def __init__(self, *args, **kwargs):
self.source = kwargs.get('source', None)
self.output = kwargs.get('output', None)
self.platform = kwargs.get('platform', None)
assert None not in (self.source, self.output, self.platform)
def generate(self):
self.output.prebuild_hook()
os.chdir(self.output.dirname)
vector = [self.platform_check(),
self.create_htmls(),
self.create_pdf(),
self.create_txt(),
self.create_html(),
]
result = all(vector)
if result:
self.output.build_success_hook()
else:
self.output.build_failure_hook()
return all(vector)
#
# -- end of file
| mit | Python |
1da66730632257b47ba004ceeddcc2213cae662f | Fix error hydration bug. | pombreda/py2neo,pombreda/py2neo,technige/py2neo,nigelsmall/py2neo,nigelsmall/py2neo,nicolewhite/py2neo,fpieper/py2neo,pombreda/py2neo,fpieper/py2neo,technige/py2neo,fpieper/py2neo,technige/py2neo,nicolewhite/py2neo,nicolewhite/py2neo | py2neo/error.py | py2neo/error.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2011-2014, Nigel Small
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import unicode_literals
__all__ = ["BindError", "JoinError", "GraphError", "ConstraintViolation"]
class BindError(Exception):
""" Raised when a local graph entity is not or cannot be bound to a remote graph entity.
"""
class JoinError(Exception):
""" Raised when two graph entities cannot be joined together.
"""
class GraphError(Exception):
""" Default exception class for all errors returned by the
Neo4j server. See also `CypherError` subclass and `BatchError`
wrapper class which contain additional qualifying information.
"""
@classmethod
def hydrate(cls, data):
full_name = data.get("fullname")
try:
error_cls = static_error_classes[full_name]
except KeyError:
try:
exception = data["exception"]
try:
error_cls = type(exception, (cls,), {})
except TypeError:
# for Python 2.x
error_cls = type(str(exception), (cls,), {})
except KeyError:
error_cls = cls
message = data.pop("message", None)
return error_cls(message, **data)
def __init__(self, message, **kwargs):
Exception.__init__(self, message)
self.message = message
self.exception = kwargs.get("exception")
self.full_name = kwargs.get("fullname")
self.request = kwargs.get("request")
self.response = kwargs.get("response")
self.stack_trace = kwargs.get("stacktrace")
try:
self.cause = self.hydrate(kwargs["cause"])
except Exception:
self.cause = None
class ConstraintViolation(GraphError):
pass
static_error_classes = {
"org.neo4j.graphdb.ConstraintViolationException": ConstraintViolation,
}
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2011-2014, Nigel Small
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import unicode_literals
__all__ = ["BindError", "JoinError", "GraphError", "ConstraintViolation"]
class BindError(Exception):
""" Raised when a local graph entity is not or cannot be bound to a remote graph entity.
"""
class JoinError(Exception):
""" Raised when two graph entities cannot be joined together.
"""
class GraphError(Exception):
""" Default exception class for all errors returned by the
Neo4j server. See also `CypherError` subclass and `BatchError`
wrapper class which contain additional qualifying information.
"""
@classmethod
def hydrate(cls, data):
full_name = data["fullname"]
try:
error_cls = static_error_classes[full_name]
except KeyError:
try:
exception = data["exception"]
try:
error_cls = type(exception, (cls,), {})
except TypeError:
# for Python 2.x
error_cls = type(str(exception), (cls,), {})
except KeyError:
error_cls = cls
message = data.pop("message", None)
return error_cls(message, **data)
def __init__(self, message, **kwargs):
Exception.__init__(self, message)
self.message = message
self.exception = kwargs.get("exception")
self.full_name = kwargs.get("fullname")
self.request = kwargs.get("request")
self.response = kwargs.get("response")
self.stack_trace = kwargs.get("stacktrace")
try:
self.cause = self.hydrate(kwargs["cause"])
except Exception:
self.cause = None
class ConstraintViolation(GraphError):
pass
static_error_classes = {
"org.neo4j.graphdb.ConstraintViolationException": ConstraintViolation,
}
| apache-2.0 | Python |
6b23430b27d6df955166c22a91e843d5efce74b0 | Make sure client_id for IPython is not None | cyrixhero/powerline,bezhermoso/powerline,dragon788/powerline,seanfisk/powerline,kenrachynski/powerline,russellb/powerline,lukw00/powerline,prvnkumar/powerline,S0lll0s/powerline,kenrachynski/powerline,Luffin/powerline,blindFS/powerline,lukw00/powerline,junix/powerline,s0undt3ch/powerline,Liangjianghao/powerline,DoctorJellyface/powerline,xfumihiro/powerline,darac/powerline,S0lll0s/powerline,EricSB/powerline,IvanAli/powerline,bartvm/powerline,blindFS/powerline,QuLogic/powerline,russellb/powerline,QuLogic/powerline,dragon788/powerline,blindFS/powerline,IvanAli/powerline,S0lll0s/powerline,darac/powerline,dragon788/powerline,Luffin/powerline,bezhermoso/powerline,bartvm/powerline,junix/powerline,cyrixhero/powerline,DoctorJellyface/powerline,Liangjianghao/powerline,QuLogic/powerline,lukw00/powerline,DoctorJellyface/powerline,prvnkumar/powerline,s0undt3ch/powerline,Luffin/powerline,IvanAli/powerline,bezhermoso/powerline,areteix/powerline,prvnkumar/powerline,bartvm/powerline,xfumihiro/powerline,russellb/powerline,kenrachynski/powerline,darac/powerline,xxxhycl2010/powerline,xxxhycl2010/powerline,s0undt3ch/powerline,xfumihiro/powerline,seanfisk/powerline,areteix/powerline,EricSB/powerline,junix/powerline,seanfisk/powerline,EricSB/powerline,cyrixhero/powerline,areteix/powerline,xxxhycl2010/powerline,Liangjianghao/powerline | powerline/renderers/ipython/__init__.py | powerline/renderers/ipython/__init__.py | # vim:fileencoding=utf-8:noet
from __future__ import (unicode_literals, division, absolute_import, print_function)
from powerline.renderers.shell import ShellRenderer
from powerline.theme import Theme
class IPythonRenderer(ShellRenderer):
'''Powerline ipython segment renderer.'''
def get_segment_info(self, segment_info, mode):
r = self.segment_info.copy()
r['ipython'] = segment_info
return r
def get_theme(self, matcher_info):
if matcher_info == 'in':
return self.theme
else:
match = self.local_themes[matcher_info]
try:
return match['theme']
except KeyError:
match['theme'] = Theme(
theme_config=match['config'],
main_theme_config=self.theme_config,
**self.theme_kwargs
)
return match['theme']
def shutdown(self):
self.theme.shutdown()
for match in self.local_themes.values():
if 'theme' in match:
match['theme'].shutdown()
def render(self, **kwargs):
# XXX super(ShellRenderer), *not* super(IPythonRenderer)
return super(ShellRenderer, self).render(**kwargs)
def do_render(self, segment_info, **kwargs):
segment_info.update(client_id='ipython')
return super(IPythonRenderer, self).do_render(
segment_info=segment_info,
**kwargs
)
class IPythonPromptRenderer(IPythonRenderer):
'''Powerline ipython prompt (in and in2) renderer'''
escape_hl_start = '\x01'
escape_hl_end = '\x02'
class IPythonNonPromptRenderer(IPythonRenderer):
'''Powerline ipython non-prompt (out and rewrite) renderer'''
pass
class RendererProxy(object):
'''Powerline IPython renderer proxy which chooses appropriate renderer
Instantiates two renderer objects: one will be used for prompts and the
other for non-prompts.
'''
def __init__(self, **kwargs):
old_widths = {}
self.non_prompt_renderer = IPythonNonPromptRenderer(old_widths=old_widths, **kwargs)
self.prompt_renderer = IPythonPromptRenderer(old_widths=old_widths, **kwargs)
def render_above_lines(self, *args, **kwargs):
return self.non_prompt_renderer.render_above_lines(*args, **kwargs)
def render(self, is_prompt, *args, **kwargs):
return (self.prompt_renderer if is_prompt else self.non_prompt_renderer).render(
*args, **kwargs)
def shutdown(self, *args, **kwargs):
self.prompt_renderer.shutdown(*args, **kwargs)
self.non_prompt_renderer.shutdown(*args, **kwargs)
renderer = RendererProxy
| # vim:fileencoding=utf-8:noet
from __future__ import (unicode_literals, division, absolute_import, print_function)
from powerline.renderers.shell import ShellRenderer
from powerline.theme import Theme
class IPythonRenderer(ShellRenderer):
'''Powerline ipython segment renderer.'''
def get_segment_info(self, segment_info, mode):
r = self.segment_info.copy()
r['ipython'] = segment_info
return r
def get_theme(self, matcher_info):
if matcher_info == 'in':
return self.theme
else:
match = self.local_themes[matcher_info]
try:
return match['theme']
except KeyError:
match['theme'] = Theme(
theme_config=match['config'],
main_theme_config=self.theme_config,
**self.theme_kwargs
)
return match['theme']
def shutdown(self):
self.theme.shutdown()
for match in self.local_themes.values():
if 'theme' in match:
match['theme'].shutdown()
def render(self, *args, **kwargs):
# XXX super(ShellRenderer), *not* super(IPythonRenderer)
return super(ShellRenderer, self).render(*args, **kwargs)
class IPythonPromptRenderer(IPythonRenderer):
'''Powerline ipython prompt (in and in2) renderer'''
escape_hl_start = '\x01'
escape_hl_end = '\x02'
class IPythonNonPromptRenderer(IPythonRenderer):
'''Powerline ipython non-prompt (out and rewrite) renderer'''
pass
class RendererProxy(object):
'''Powerline IPython renderer proxy which chooses appropriate renderer
Instantiates two renderer objects: one will be used for prompts and the
other for non-prompts.
'''
def __init__(self, **kwargs):
old_widths = {}
self.non_prompt_renderer = IPythonNonPromptRenderer(old_widths=old_widths, **kwargs)
self.prompt_renderer = IPythonPromptRenderer(old_widths=old_widths, **kwargs)
def render_above_lines(self, *args, **kwargs):
return self.non_prompt_renderer.render_above_lines(*args, **kwargs)
def render(self, is_prompt, *args, **kwargs):
return (self.prompt_renderer if is_prompt else self.non_prompt_renderer).render(
*args, **kwargs)
def shutdown(self, *args, **kwargs):
self.prompt_renderer.shutdown(*args, **kwargs)
self.non_prompt_renderer.shutdown(*args, **kwargs)
renderer = RendererProxy
| mit | Python |
ff42a4595549f347a4aa6d219af79beec8ca9f31 | bump version | rtqichen/torchdiffeq | torchdiffeq/__init__.py | torchdiffeq/__init__.py | from ._impl import odeint
from ._impl import odeint_adjoint
from ._impl import odeint_event
__version__ = "0.2.1"
| from ._impl import odeint
from ._impl import odeint_adjoint
from ._impl import odeint_event
__version__ = "0.2.0"
| mit | Python |
a2606be16408f0e1614059cf45915eaad01a694a | use RLock | Zex/juicemachine,Zex/juicemachine,Zex/juicemachine | python/timer.py | python/timer.py | #! /usr/bin/python
# Timer.py
import threading
def timer_cb():
print('timer_cb ...')
"""
threading.Timer(3.0, timer_cb).run()
obj = threading.Timer(3.0, timer_cb)
obj.run()
"""
class Timer(threading.Thread):
def __init__(self, intv=None, func=None, *args):
self._intv = intv
self._func = func
self._running = True
threading.Thread.__init__(self)
self.setDaemon(True)
def run(self, *args):
while self._running and self._func:
threading._sleep(self._intv)
with threading.RLock() as l:
self._func(*args)
def cancel(self):
print('cancel ...')
self._running = False
if __name__ == '__main__':
tm = Timer(2, timer_cb)
tm.run()
| #! /usr/bin/python
# Timer.py
import threading
def timer_cb():
print('timer_cb ...')
"""
threading.Timer(3.0, timer_cb).run()
obj = threading.Timer(3.0, timer_cb)
obj.run()
"""
class Timer(threading.Thread):
def __init__(self, intv=None, func=None, *args):
self.__intv = intv
self.__func = func
self.__running = True
threading.Thread.__init__(self)
self.setDaemon(True)
def run(self, *args):
while self.__running and self.__func:
with threading.Lock() as l:
threading._sleep(self.__intv)
self.__func(*args)
def cancel(self):
print('cancel ...')
self.__running = False
if __name__ == '__main__':
tm = Timer(2, timer_cb)
tm.run()
| mit | Python |
00ca3102ed7daca12098f09214ff25c890a602c0 | increase num_elements in MapDefunBenchmark | karllessard/tensorflow,paolodedios/tensorflow,gautam1858/tensorflow,karllessard/tensorflow,petewarden/tensorflow,sarvex/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-Corporation/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,frreiss/tensorflow-fred,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,Intel-tensorflow/tensorflow,gautam1858/tensorflow,petewarden/tensorflow,sarvex/tensorflow,gautam1858/tensorflow,karllessard/tensorflow,paolodedios/tensorflow,Intel-tensorflow/tensorflow,frreiss/tensorflow-fred,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-Corporation/tensorflow,paolodedios/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,paolodedios/tensorflow,frreiss/tensorflow-fred,frreiss/tensorflow-fred,Intel-tensorflow/tensorflow,gautam1858/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,sarvex/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,annarev/tensorflow,yongtang/tensorflow,Intel-Corporation/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow-experimental_link_static_libraries_once,sarvex/tensorflow,tensorflow/tensorflow,gautam1858/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,petewarden/tensorflow,sarvex/tensorflow,gautam1858/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,gautam1858/tensorflow,gautam1858/tensorflow,karllessard/tensorflow,yongtang/tensorflow,petewarden/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,petewarden/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,petewarden/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,Intel-tensorflow/tensorflow,frreiss/tensorflow-fred,frreiss/tensorflow-fred,annarev/tensorflow,tensorflow/tensorflow,Intel-Corporation/tensorflow,Intel-Corporation/tensorflow,paolodedios/tensorflow,Intel-Corporation/tensorflow,karllessard/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,annarev/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,annarev/tensorflow,annarev/tensorflow,petewarden/tensorflow,yongtang/tensorflow,annarev/tensorflow,gautam1858/tensorflow,petewarden/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,tensorflow/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-Corporation/tensorflow,sarvex/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,frreiss/tensorflow-fred,petewarden/tensorflow,karllessard/tensorflow,sarvex/tensorflow,annarev/tensorflow,frreiss/tensorflow-fred,annarev/tensorflow,frreiss/tensorflow-fred,paolodedios/tensorflow,frreiss/tensorflow-fred,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,Intel-Corporation/tensorflow,petewarden/tensorflow,gautam1858/tensorflow,annarev/tensorflow,annarev/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,Intel-tensorflow/tensorflow,petewarden/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,tensorflow/tensorflow,sarvex/tensorflow,karllessard/tensorflow,tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,annarev/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,karllessard/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,petewarden/tensorflow | tensorflow/python/data/experimental/benchmarks/map_defun_benchmark.py | tensorflow/python/data/experimental/benchmarks/map_defun_benchmark.py | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Benchmarks for MapDefunOp."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.data.experimental.ops import map_defun
from tensorflow.python.data.benchmarks import benchmark_base
from tensorflow.python.eager import function
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import tensor_spec
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import map_fn
from tensorflow.python.ops import math_ops
class MapDefunBenchmark(benchmark_base.DatasetBenchmarkBase):
"""Benchmarks for MapDefunOp."""
def _run(self, op, name=None, num_iters=3000):
wall_time = self.run_op_benchmark(
op=op,
iters=num_iters,
warmup=True
)
self.report_benchmark(
name=name,
iters=num_iters,
wall_time=wall_time,
extras={"examples_per_sec": float(1 / wall_time)})
def benchmark_defun_vs_map_fn(self):
"""Benchmarks to compare the performance of MapDefun vs tf.map_fn."""
@function.defun(input_signature=[tensor_spec.TensorSpec([], dtypes.int32)])
def defun(x):
return array_ops.identity(x)
def fn(x):
return array_ops.identity(x)
base = math_ops.range(10000)
for input_size in [10, 100, 1000, 10000]:
num_iters = 10000 // input_size
map_defun_op = map_defun.map_defun(defun, [base], [dtypes.int32], [()])
map_fn_op = map_fn.map_fn(fn, base)
self._run(
op=map_defun_op,
name="with_defun_size_%d" % input_size,
num_iters=num_iters
)
self._run(
op=map_fn_op,
name="without_defun_size_%d" % input_size,
num_iters=num_iters
)
if __name__ == "__main__":
benchmark_base.test.main()
| # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Benchmarks for MapDefunOp."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.data.experimental.ops import map_defun
from tensorflow.python.data.benchmarks import benchmark_base
from tensorflow.python.eager import function
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import tensor_spec
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import map_fn
from tensorflow.python.ops import math_ops
class MapDefunBenchmark(benchmark_base.DatasetBenchmarkBase):
"""Benchmarks for MapDefunOp."""
def _run(self, op, name=None, num_iters=3000):
wall_time = self.run_op_benchmark(
op=op,
iters=num_iters,
warmup=True
)
self.report_benchmark(
name=name,
iters=num_iters,
wall_time=wall_time,
extras={"examples_per_sec": float(1 / wall_time)})
def benchmark_defun_vs_map_fn(self):
"""Benchmarks to compare the performance of MapDefun vs tf.map_fn."""
@function.defun(input_signature=[tensor_spec.TensorSpec([], dtypes.int32)])
def defun(x):
return array_ops.identity(x)
def fn(x):
return array_ops.identity(x)
base = math_ops.range(100)
for input_size in [10, 100, 1000, 10000]:
num_iters = 100000 // input_size
map_defun_op = map_defun.map_defun(defun, [base], [dtypes.int32], [()])
map_fn_op = map_fn.map_fn(fn, base)
self._run(
op=map_defun_op,
name="with_defun_size_%d" % input_size,
num_iters=num_iters
)
self._run(
op=map_fn_op,
name="without_defun_size_%d" % input_size,
num_iters=num_iters
)
if __name__ == "__main__":
benchmark_base.test.main()
| apache-2.0 | Python |
5afd8346a8d710d09591c36780bd17c46192a2f2 | edit ... | ryanrhymes/scandex | cache.py | cache.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Cache abstraction.
#
# Liang Wang @ Computer Lab, Cambridge University
# 2015.06.15
class Cache():
def __init__(self, quota):
"""Init the cache."""
self._quota = quota
pass
def add(self):
"""Add a new item to cache."""
pass
def remove(self):
"""Remove an item from cache."""
pass
def is_full(self):
"""Check if the cache is full."""
b = False
return b
pass
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Cache abstraction.
#
# Liang Wang @ Computer Lab, Cambridge University
# 2015.06.15
class Cache():
def __init__(self, quota):
self._quota = quota
pass
def add(self):
pass
def remove(self):
pass
def is_full(self):
b = False
return b
pass
| mit | Python |
ab21fb971378b9c8f587a27c7ba556b3b8cf5d6f | Bump Version | messente/verigator-python | messente/verigator/__init__.py | messente/verigator/__init__.py | __version__ = "1.0.2"
| __version__ = "1.0.1"
| apache-2.0 | Python |
6591ecb735c8f52c4e9aee24310ad89b0d4030b6 | Print arguments in the debug pipeline | merutak/python-social-auth,rsteca/python-social-auth,VishvajitP/python-social-auth,VishvajitP/python-social-auth,mrwags/python-social-auth,DhiaEddineSaidi/python-social-auth,mark-adams/python-social-auth,webjunkie/python-social-auth,jeyraof/python-social-auth,robbiet480/python-social-auth,merutak/python-social-auth,cmichal/python-social-auth,tkajtoch/python-social-auth,lneoe/python-social-auth,msampathkumar/python-social-auth,DhiaEddineSaidi/python-social-auth,Andygmb/python-social-auth,lneoe/python-social-auth,mathspace/python-social-auth,msampathkumar/python-social-auth,ByteInternet/python-social-auth,lawrence34/python-social-auth,rsteca/python-social-auth,fearlessspider/python-social-auth,joelstanner/python-social-auth,iruga090/python-social-auth,chandolia/python-social-auth,cjltsod/python-social-auth,mchdks/python-social-auth,contracode/python-social-auth,merutak/python-social-auth,python-social-auth/social-docs,rsteca/python-social-auth,michael-borisov/python-social-auth,noodle-learns-programming/python-social-auth,nirmalvp/python-social-auth,jeyraof/python-social-auth,mchdks/python-social-auth,jameslittle/python-social-auth,drxos/python-social-auth,clef/python-social-auth,S01780/python-social-auth,nirmalvp/python-social-auth,python-social-auth/social-app-django,falcon1kr/python-social-auth,michael-borisov/python-social-auth,cmichal/python-social-auth,ononeor12/python-social-auth,jneves/python-social-auth,lamby/python-social-auth,ByteInternet/python-social-auth,joelstanner/python-social-auth,ononeor12/python-social-auth,alrusdi/python-social-auth,chandolia/python-social-auth,ariestiyansyah/python-social-auth,python-social-auth/social-app-django,firstjob/python-social-auth,firstjob/python-social-auth,drxos/python-social-auth,ByteInternet/python-social-auth,degs098/python-social-auth,rsalmaso/python-social-auth,mrwags/python-social-auth,SeanHayes/python-social-auth,robbiet480/python-social-auth,mchdks/python-social-auth,msampathkumar/python-social-auth,webjunkie/python-social-auth,iruga090/python-social-auth,nirmalvp/python-social-auth,python-social-auth/social-storage-sqlalchemy,joelstanner/python-social-auth,yprez/python-social-auth,bjorand/python-social-auth,python-social-auth/social-app-cherrypy,henocdz/python-social-auth,JJediny/python-social-auth,muhammad-ammar/python-social-auth,contracode/python-social-auth,cjltsod/python-social-auth,alrusdi/python-social-auth,mathspace/python-social-auth,degs098/python-social-auth,JerzySpendel/python-social-auth,yprez/python-social-auth,san-mate/python-social-auth,S01780/python-social-auth,daniula/python-social-auth,daniula/python-social-auth,daniula/python-social-auth,wildtetris/python-social-auth,firstjob/python-social-auth,garrett-schlesinger/python-social-auth,mathspace/python-social-auth,ariestiyansyah/python-social-auth,S01780/python-social-auth,lamby/python-social-auth,MSOpenTech/python-social-auth,muhammad-ammar/python-social-auth,python-social-auth/social-core,frankier/python-social-auth,clef/python-social-auth,michael-borisov/python-social-auth,VishvajitP/python-social-auth,noodle-learns-programming/python-social-auth,SeanHayes/python-social-auth,falcon1kr/python-social-auth,JJediny/python-social-auth,jneves/python-social-auth,henocdz/python-social-auth,lamby/python-social-auth,barseghyanartur/python-social-auth,ariestiyansyah/python-social-auth,jeyraof/python-social-auth,hsr-ba-fs15-dat/python-social-auth,MSOpenTech/python-social-auth,san-mate/python-social-auth,drxos/python-social-auth,bjorand/python-social-auth,barseghyanartur/python-social-auth,jameslittle/python-social-auth,jneves/python-social-auth,robbiet480/python-social-auth,bjorand/python-social-auth,falcon1kr/python-social-auth,cmichal/python-social-auth,jameslittle/python-social-auth,yprez/python-social-auth,DhiaEddineSaidi/python-social-auth,fearlessspider/python-social-auth,wildtetris/python-social-auth,hsr-ba-fs15-dat/python-social-auth,MSOpenTech/python-social-auth,python-social-auth/social-core,JJediny/python-social-auth,garrett-schlesinger/python-social-auth,lawrence34/python-social-auth,Andygmb/python-social-auth,tobias47n9e/social-core,san-mate/python-social-auth,chandolia/python-social-auth,muhammad-ammar/python-social-auth,clef/python-social-auth,noodle-learns-programming/python-social-auth,mark-adams/python-social-auth,webjunkie/python-social-auth,python-social-auth/social-app-django,iruga090/python-social-auth,degs098/python-social-auth,barseghyanartur/python-social-auth,alrusdi/python-social-auth,ononeor12/python-social-auth,frankier/python-social-auth,fearlessspider/python-social-auth,mrwags/python-social-auth,JerzySpendel/python-social-auth,mark-adams/python-social-auth,lneoe/python-social-auth,rsalmaso/python-social-auth,tkajtoch/python-social-auth,henocdz/python-social-auth,hsr-ba-fs15-dat/python-social-auth,JerzySpendel/python-social-auth,Andygmb/python-social-auth,tkajtoch/python-social-auth,wildtetris/python-social-auth,lawrence34/python-social-auth,contracode/python-social-auth | social/pipeline/debug.py | social/pipeline/debug.py | from pprint import pprint
def debug(response, details, *args, **kwargs):
print('=' * 80)
pprint(response)
print('=' * 80)
pprint(details)
print('=' * 80)
pprint(args)
print('=' * 80)
pprint(kwargs)
print('=' * 80)
| from pprint import pprint
def debug(response, details, *args, **kwargs):
print('=' * 80)
pprint(response)
print('=' * 80)
pprint(details)
print('=' * 80)
| bsd-3-clause | Python |
761e47b6c240251fde15991abcd37846a702692e | Update test_graphviz.py | sdpython/mlstatpy | _unittests/ut_graph/test_graphviz.py | _unittests/ut_graph/test_graphviz.py | """
@brief test log(time=2s)
"""
import os
import unittest
from pyquickhelper.pycode import get_temp_folder, skipif_travis, skipif_appveyor
from mlstatpy.graph.graphviz_helper import draw_graph_graphviz
class TestGraphviz(unittest.TestCase):
@skipif_appveyor("no graphviz")
@skipif_travis("no graphviz")
def test_draw_graph_graphviz(self):
temp = get_temp_folder(__file__, "temp_graphviz")
fout = os.path.join(temp, "image.png")
draw_graph_graphviz([(1, "eee", "red")],
[(1, 2, "blue"), (3, 4), (1, 3)], fout)
self.assertTrue(os.path.exists(fout))
self.assertTrue(os.path.exists(fout + ".gv"))
def test_draw_graph_graphviz_no_image(self):
res = draw_graph_graphviz([(1, "eee", "red")],
[(1, 2, "blue"), (3, 4), (1, 3)],
image=None)
self.assertIn('[label="eee"', res)
if __name__ == "__main__":
unittest.main()
| """
@brief test log(time=2s)
"""
import os
import unittest
from pyquickhelper.pycode import get_temp_folder, skipif_travis, skipif_appveyor
from mlstatpy.graph.graphviz_helper import draw_graph_graphviz
class TestGraphviz(unittest.TestCase):
@skipif_appveyor("no graphviz")
@skipif_travis("no graphviz")
def test_draw_graph_graphviz(self):
temp = get_temp_folder(__file__, "temp_graphviz")
fout = os.path.join(temp, "image.png")
draw_graph_graphviz([(1, "eee", "red")],
[(1, 2, "blue"), (3, 4), (1, 3)], fout)
self.assertTrue(os.path.exists(fout))
self.assertTrue(os.path.exists(fout + ".gv"))
def test_draw_graph_graphviz_no_image(self):
temp = get_temp_folder(__file__, "temp_graphviz")
fout = os.path.join(temp, "image.png")
res = draw_graph_graphviz([(1, "eee", "red")],
[(1, 2, "blue"), (3, 4), (1, 3)],
image=None)
self.assertIn('[label="eee"', res)
if __name__ == "__main__":
unittest.main()
| mit | Python |
bbeb9b780908cf1322722669f1c68259345fe261 | Add documentation to the root view of BrowsableAPI | rtfd/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org | readthedocs/v3/routers.py | readthedocs/v3/routers.py | from rest_framework.routers import DefaultRouter, APIRootView
from rest_framework_extensions.routers import NestedRouterMixin
class DocsAPIRootView(APIRootView):
# Overridden only to add documentation for BrowsableAPIRenderer.
"""
Read the Docs APIv3 root endpoint.
Full documentation at [https://docs.readthedocs.io/en/latest/api/v3.html](https://docs.readthedocs.io/en/latest/api/v3.html).
"""
def get_view_name(self):
return 'Read the Docs APIv3'
class DefaultRouterWithNesting(NestedRouterMixin, DefaultRouter):
APIRootView = DocsAPIRootView
root_view_name = 'api-v3-root'
| from rest_framework.routers import DefaultRouter
from rest_framework_extensions.routers import NestedRouterMixin
class DefaultRouterWithNesting(NestedRouterMixin, DefaultRouter):
pass
| mit | Python |
32d9d1c78a3ef6a62eadfa3930ad921c583d6d26 | Update setup.py | SYNHAK/spiff,SYNHAK/spiff,SYNHAK/spiff | client/python/setup.py | client/python/setup.py | #!/usr/bin/env python
from distutils.core import setup
setup(name='Spiff',
version='0.1.4',
description="API to Spaceman Spiff",
author='Torrie Fischer',
author_email='tdfischer@hackerbots.net',
url='http://github.com/synhak/spiff',
packages=['spiff'],
requires=['requests'],
)
| #!/usr/bin/env python
from distutils.core import setup
setup(name='Spiff',
version='0.1.4',
description="API to Spaceman Spiff",
author='Torrie Fischer',
author_email='tdfischer@hackerbots.net',
url='http://github.com/synhak/spiff',
py_modules=['spiff'],
requires=['requests'],
)
| agpl-3.0 | Python |
5f5e61d95182a0e31fc45cec48fcb6db9ced1f62 | fix "shadowing from outer scope variables" issue. | ImmobilienScout24/aws-monocyte,ImmobilienScout24/aws-monocyte | src/main/python/monocyte/__init__.py | src/main/python/monocyte/__init__.py | from __future__ import print_function
import sys
from monocyte.handler import aws_handler
REMOVE_WARNING = "WARNING: region '%s' not allowed!"
IGNORED_REGIONS = ["cn-north-1", "us-gov-west-1", "us-east-1", "us-west-2"]
ALLOWED_REGIONS_STARTS_WITH = "eu"
class Monocyte(object):
def is_region_allowed(self, region):
return region.lower().startswith(ALLOWED_REGIONS_STARTS_WITH)
def is_region_ignored(self, region):
return region.lower() in IGNORED_REGIONS
def is_region_handled(self, region):
return self.is_region_allowed(region) or not self.is_region_ignored(region)
def search_and_destroy_unwanted_resources(self, dry_run=True):
if dry_run:
print(" DRY RUN " * 8)
print()
specific_handlers = [handler_cls(self.is_region_handled, dry_run) for handler_cls in aws_handler.all]
for specific_handler in specific_handlers:
if not hasattr(specific_handler, "order"):
specific_handler.order = sys.maxsize
specific_handlers = sorted(specific_handlers, key=lambda handler_item: handler_item.order)
print(" order of aws handlers: {}".format(
" -> ".join([specific_handler.name for specific_handler in specific_handlers])))
print("allowed regions start with: {}".format(ALLOWED_REGIONS_STARTS_WITH))
print(" ignored regions: {}".format(" ".join(IGNORED_REGIONS)))
for specific_handler in specific_handlers:
print("\n---- checking %s resources" % specific_handler.name)
self.handle_service(specific_handler)
def handle_service(self, specific_handler):
for resource in specific_handler.fetch_unwanted_resources():
if not self.is_region_allowed(resource.region):
print("\n%s\n\t%s" % (
specific_handler.to_string(resource),
REMOVE_WARNING % resource.region))
specific_handler.delete(resource)
| from __future__ import print_function
import sys
from monocyte.handler import aws_handler
REMOVE_WARNING = "WARNING: region '%s' not allowed!"
IGNORED_REGIONS = ["cn-north-1", "us-gov-west-1", "us-east-1", "us-west-2"]
ALLOWED_REGIONS_STARTS_WITH = "eu"
class Monocyte(object):
def is_region_allowed(self, region):
return region.lower().startswith(ALLOWED_REGIONS_STARTS_WITH)
def is_region_ignored(self, region):
return region.lower() in IGNORED_REGIONS
def is_region_handled(self, region):
return self.is_region_allowed(region) or not self.is_region_ignored(region)
def search_and_destroy_unwanted_resources(self, dry_run=True):
if dry_run:
print(" DRY RUN " * 8)
print()
specific_handlers = [handler_cls(self.is_region_handled, dry_run) for handler_cls in aws_handler.all]
for handler in specific_handlers:
if not hasattr(handler, "order"):
handler.order = sys.maxsize
specific_handlers = sorted(specific_handlers, key=lambda handler: handler.order)
print(" order of aws handlers: {}".format(
" -> ".join([handler.name for handler in specific_handlers])))
print("allowed regions start with: {}".format(ALLOWED_REGIONS_STARTS_WITH))
print(" ignored regions: {}".format(" ".join(IGNORED_REGIONS)))
for handler in specific_handlers:
print("\n---- checking %s resources" % handler.name)
self.handle_service(handler)
def handle_service(self, specific_handler):
for resource in specific_handler.fetch_unwanted_resources():
if not self.is_region_allowed(resource.region):
print("\n%s\n\t%s" % (
specific_handler.to_string(resource),
REMOVE_WARNING % resource.region))
specific_handler.delete(resource)
| apache-2.0 | Python |
119aef19df7cdaa191101e58403b04c8320c085e | bump version (#39) | logdna/python | _version.py | _version.py | __version__ = '1.5.1'
| __version__ = '1.5.0'
| mit | Python |
e0ef5541c792c91529b3957b09194a1948a3e1e0 | Update __init__.py | infin8/spitfire,infin8/spitfire,infin8/spitfire,infin8/spitfire | spitfire/__init__.py | spitfire/__init__.py | __author__ = 'Mike Solomon'
__author_email__ = '<mas63 @t cornell d0t edu>'
__version__ = '0.7.15'
__license__ = 'BSD License'
import imp
from compiler.util import Compiler, add_common_options
class Template(object):
def __init__(self, txt):
self.txt = txt
self.tmp = None
def __call__(self, namespaces=[], *args, **kwargs):
if self.tmp is None:
self.compile()
return self.tmp.template(search_list=namespaces).main()
def compile(self, **kwargs):
options = {'include_path': '.', 'verbose': False, 'optimizer_flags': [], 'extract_message_catalogue': False, 'message_catalogue_file': None, 'output_file': None, 'xspt_mode': False, 'output_directory': '', 'base_extends_package': None, 'locale': '', 'version': False, 'x_psyco': True, 'ignore_optional_whitespace': True, 'normalize_whitespace': False, 'tune_gc': None, 'function_registry_file': None, 'x_psyco_profile': None, 'enable_filters': True, 'optimizer_level': 0}
options.update(kwargs)
compiler_args = Compiler.args_from_optparse(options)
compiler = Compiler(**compiler_args)
self.tmp = imp.new_module('tmp')
src = compiler.compile_template(self.txt, 'template')
exec src in self.tmp.__dict__
return self.tmp.template
| __author__ = 'Mike Solomon'
__author_email__ = '<mas63 @t cornell d0t edu>'
__version__ = '0.7.15'
__license__ = 'BSD License'
| bsd-3-clause | Python |
cd8e592ccf963863507f4468cbafb0bfabf945bf | update bot command char | ArchStrike/archstrike-installer | asinstaller/irc.py | asinstaller/irc.py | import socket
import ssl
from config import *
logger = setup_logger(__name__)
class LogHandler(object):
def __init__(self, nick, logs):
self.server = IRC_SERVER
self.port = IRC_PORT
self.bot_nick = IRC_BOT_NICK
self.nick = nick
self.connect()
self.send_logs(logs)
self.disconnect()
def send(self, msg):
self.sock.sendall('{0}\r\n'.format(msg))
def connect(self):
# Create context for verifying host
context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2)
context.verify_mode = ssl.CERT_REQUIRED
context.check_hostname = True
context.load_default_certs()
# Create a socket
base_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
base_sock.settimeout(10)
# Setup SSL Socket
self.sock = context.wrap_socket(base_sock,
server_hostname=self.server)
self.sock.connect((self.server, self.port))
self.send('NICK {0}'.format(self.nick))
self.send('USER {0} 8 * :ArchStrike Installer'.format(self.nick))
# RECEIVE IRC Server Info
while True:
try:
data = self.sock.recv(1024)
except ssl.SSLError:
break
def send_logs(self, links):
logs = ' '.join(links)
self.send('PRIVMSG {0} :.asinstaller {1}'.format(self.bot_nick, logs))
def disconnect(self):
self.send('QUIT')
self.sock.close()
| import socket
import ssl
from config import *
logger = setup_logger(__name__)
class LogHandler(object):
def __init__(self, nick, logs):
self.server = IRC_SERVER
self.port = IRC_PORT
self.bot_nick = IRC_BOT_NICK
self.nick = nick
self.connect()
self.send_logs(logs)
self.disconnect()
def send(self, msg):
self.sock.sendall('{0}\r\n'.format(msg))
def connect(self):
# Create context for verifying host
context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2)
context.verify_mode = ssl.CERT_REQUIRED
context.check_hostname = True
context.load_default_certs()
# Create a socket
base_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
base_sock.settimeout(10)
# Setup SSL Socket
self.sock = context.wrap_socket(base_sock,
server_hostname=self.server)
self.sock.connect((self.server, self.port))
self.send('NICK {0}'.format(self.nick))
self.send('USER {0} 8 * :ArchStrike Installer'.format(self.nick))
# RECEIVE IRC Server Info
while True:
try:
data = self.sock.recv(1024)
except ssl.SSLError:
break
def send_logs(self, links):
logs = ' '.join(links)
self.send('PRIVMSG {0} :>asinstaller {1}'.format(self.bot_nick, logs))
def disconnect(self):
self.send('QUIT')
self.sock.close()
| mit | Python |
a900305ef043ade2f3e77bb0e6164cec307e1b38 | simplify ansys test | nschloe/meshio | test/test_ansys.py | test/test_ansys.py | # -*- coding: utf-8 -*-
#
import meshio
import pytest
import helpers
@pytest.mark.parametrize('mesh', [
helpers.tri_mesh,
helpers.quad_mesh,
helpers.tri_quad_mesh,
helpers.tet_mesh,
helpers.hex_mesh,
])
@pytest.mark.parametrize('write_binary', [False, True])
def test(mesh, write_binary):
def writer(*args, **kwargs):
return meshio.ansys_io.write(
*args, write_binary=write_binary, **kwargs
)
helpers.write_read(writer, meshio.ansys_io.read, mesh, 1.0e-15)
return
| # -*- coding: utf-8 -*-
#
import meshio
import pytest
import helpers
@pytest.mark.parametrize('mesh', [
helpers.tri_mesh,
helpers.quad_mesh,
helpers.tri_quad_mesh,
helpers.tet_mesh,
helpers.hex_mesh,
])
def test_ascii(mesh):
def writer(*args, **kwargs):
return meshio.ansys_io.write(*args, write_binary=False, **kwargs)
helpers.write_read(writer, meshio.ansys_io.read, mesh, 1.0e-15)
return
@pytest.mark.parametrize('mesh', [
helpers.tri_mesh,
helpers.quad_mesh,
helpers.tri_quad_mesh,
helpers.tet_mesh,
helpers.hex_mesh,
])
def test_binary(mesh):
def writer(*args, **kwargs):
return meshio.ansys_io.write(*args, write_binary=True, **kwargs)
helpers.write_read(writer, meshio.ansys_io.read, mesh, 1.0e-15)
return
| mit | Python |
833c3f4003cd015d57b154d941a07c29645432c6 | clean up reinforce_grid | openego/dingo,openego/dingo | dingo/flexopt/reinforce_grid.py | dingo/flexopt/reinforce_grid.py | from .check_tech_constraints import check_load, check_voltage
from .reinforce_measures import reinforce_branches_current, reinforce_branches_voltage, extend_substation, new_substation
def reinforce_grid(grid, mode):
""" Evaluates grid reinforcement needs and performs measures
Args:
grid: GridDingo object
mode: kind of grid ('MV' or 'LV')
Returns:
Notes:
References:
.. [1] dena VNS
.. [2] Ackermann et al. (RP VNS)
"""
# kind of grid to be evaluated (MV or LV)
if mode == 'MV':
crit_branches, crit_stations = check_load(grid, mode)
# do reinforcement
reinforce_branches_current(grid, crit_branches)
# if branches or stations have been reinforced: run PF again to check for voltage issues
if crit_branches or crit_stations:
grid.network.run_powerflow(conn=None, method='onthefly')
crit_nodes = check_voltage(grid, mode)
crit_nodes_count_prev_step = len(crit_nodes)
# as long as there are voltage issues, do reinforcement
while crit_nodes:
# determine all branches on the way from HV-MV substation to crit. nodes
crit_branches_v = grid.find_and_union_paths(grid.station(), crit_nodes)
# do reinforcement
reinforce_branches_voltage(grid, crit_branches_v)
# run PF
grid.network.run_powerflow(conn=None, method='onthefly')
crit_nodes = check_voltage(grid, mode)
# if there are critical nodes left but no larger cable available, stop reinforcement
if len(crit_nodes) == crit_nodes_count_prev_step:
print('==> There are', len(grid.find_and_union_paths(grid.station(), crit_nodes)),
'branches that cannot be reinforced (no appropriate cable available).')
break
crit_nodes_count_prev_step = len(crit_nodes)
if not crit_nodes:
print('==> All voltage issues could be solved using reinforcement.')
elif mode == 'LV':
raise NotImplementedError
| from .check_tech_constraints import check_load, check_voltage
from .reinforce_measures import reinforce_branches_current, reinforce_branches_voltage, extend_substation, new_substation
def reinforce_grid(grid, mode):
""" Evaluates grid reinforcement needs and performs measures
Args:
grid: GridDingo object
mode: kind of grid ('MV' or 'LV')
Returns:
Notes:
References:
.. [1] dena VNS
.. [2] Ackermann et al. (RP VNS)
"""
# kind of grid to be evaluated (MV or LV)
if mode == 'MV':
crit_branches, crit_stations = check_load(grid, mode)
#print(crit_branches)
# do reinforcement
reinforce_branches_current(grid, crit_branches)
# if branches or stations have been reinforced: run PF again to check for voltage issues
if crit_branches or crit_stations:
grid.network.run_powerflow(conn=None, method='onthefly')
crit_nodes = check_voltage(grid, mode)
crit_nodes_count_prev_step = len(crit_nodes)
#print(crit_nodes)
# as long as there are voltage issues, do reinforcement
while crit_nodes:
# determine all branches on the way from HV-MV substation to crit. nodes
crit_branches_v = grid.find_and_union_paths(grid.station(), crit_nodes)
# do reinforcement
reinforce_branches_voltage(grid, crit_branches_v)
# grid.graph_draw()
grid.network.run_powerflow(conn=None, method='onthefly')
crit_nodes = check_voltage(grid, mode)
# if there are critical nodes left but no larger cable available, stop reinforcement
if len(crit_nodes) == crit_nodes_count_prev_step:
print('==> There are', len(grid.find_and_union_paths(grid.station(), crit_nodes)),
'branches that cannot be reinforced (no appropriate cable available).')
break
crit_nodes_count_prev_step = len(crit_nodes)
if not crit_nodes:
print('==> All voltage issues could be solved using reinforcement.')
elif mode == 'LV':
raise NotImplementedError
#check_load(grid, mode)
#nodes = grid.
#check_voltage(grid, mode, nodes)
| agpl-3.0 | Python |
6ceeb6e522228a1b1059a2919d26955af7576318 | Fix unit tests | DanNixon/Sakuya,DanNixon/Sakuya | pc_client/test/test_notification_centre.py | pc_client/test/test_notification_centre.py | import os
import unittest
from sakuyaclient.jenkins import JenkinsClient
from sakuyaclient.trac import TracClient
from sakuyaclient.notification_centre import NotificationCentre
class NotificationCentreTest(unittest.TestCase):
def setUp(self):
self._trac_cache_file = 'ticket_cache.txt'
self._builds_cache_file = 'builds_cache.txt'
self._jenkins = JenkinsClient('http://builds.mantidproject.org', self._builds_cache_file, ['develop_clean'])
self._trac = TracClient('http://trac.mantidproject.org/mantid', self._trac_cache_file)
self._trac.set_subscriptions(['Dan Nixon'])
self._notifications = NotificationCentre(300)
self._notifications.add_notification_source('tickets', self._trac)
self._notifications.add_notification_source('builds', self._jenkins)
def tearDown(self):
if os.path.exists(self._trac_cache_file):
os.remove(self._trac_cache_file)
if os.path.exists(self._builds_cache_file):
os.remove(self._builds_cache_file)
def test_poll(self):
results = self._notifications.poll()
self.assertIsNotNone(results)
self.assertTrue('tickets' in results)
self.assertTrue('builds' in results)
print results
def test_selective_poll(self):
results = self._notifications.poll(['builds'])
self.assertIsNotNone(results)
self.assertTrue('tickets' not in results)
self.assertTrue('builds' in results)
print results
def test_multi_poll(self):
# First poll should return diffs
results = self._notifications.poll()
for key in results.keys():
self.assertNotEqual(len(results[key]), 0)
# Second poll should return no diffs
results = self._notifications.poll()
for key in results.keys():
self.assertEqual(len(results[key]), 0)
if __name__ == '__main__':
unittest.main()
| import os
import unittest
from sakuyaclient.jenkins import JenkinsClient
from sakuyaclient.trac import TracClient
from sakuyaclient.notification_centre import NotificationCentre
class NotificationCentreTest(unittest.TestCase):
def setUp(self):
self._trac_cache_file = 'ticket_cache.txt'
self._builds_cache_file = 'builds_cache.txt'
self._jenkins = JenkinsClient('http://builds.mantidproject.org', self._builds_cache_file, ['develop_clean'])
self._trac = TracClient('http://trac.mantidproject.org/mantid', self._trac_cache_file)
self._trac.set_subscriptions(['Dan Nixon'])
self._notifications = NotificationCentre()
self._notifications.add_notification_source('tickets', self._trac)
self._notifications.add_notification_source('builds', self._jenkins)
def tearDown(self):
if os.path.exists(self._trac_cache_file):
os.remove(self._trac_cache_file)
if os.path.exists(self._builds_cache_file):
os.remove(self._builds_cache_file)
def test_poll(self):
results = self._notifications.poll()
self.assertIsNotNone(results)
self.assertTrue('tickets' in results)
self.assertTrue('builds' in results)
print results
def test_selective_poll(self):
results = self._notifications.poll(['builds'])
self.assertIsNotNone(results)
self.assertTrue('tickets' not in results)
self.assertTrue('builds' in results)
print results
def test_multi_poll(self):
# First poll should return diffs
results = self._notifications.poll()
for key in results.keys():
self.assertNotEqual(len(results[key]), 0)
# Second poll should return no diffs
results = self._notifications.poll()
for key in results.keys():
self.assertEqual(len(results[key]), 0)
if __name__ == '__main__':
unittest.main()
| apache-2.0 | Python |
a3364a991b3f3966e7ea0d31cea4e594dff0befc | Bump version to 1.3.2. | 1313e/CMasher | cmasher/__version__.py | cmasher/__version__.py | # -*- coding: utf-8 -*-
"""
CMasher Version
===============
Stores the different versions of the *CMasher* package.
"""
# %% VERSIONS
# Default/Latest/Current version
__version__ = '1.3.2'
| # -*- coding: utf-8 -*-
"""
CMasher Version
===============
Stores the different versions of the *CMasher* package.
"""
# %% VERSIONS
# Default/Latest/Current version
__version__ = '1.3.1'
| bsd-3-clause | Python |
83f63f17c9ce707749609aa466cd4d7152e5f5ed | clean up variable/class names | nkoep/pymanopt,nkoep/pymanopt,pymanopt/pymanopt,pymanopt/pymanopt,nkoep/pymanopt | pymanopt/autodiff/backends/_callable.py | pymanopt/autodiff/backends/_callable.py | from ._backend import Backend, assert_backend_available
from .. import make_tracing_backend_decorator
class _CallableBackend(Backend):
def __str__(self):
return "callable"
@staticmethod
def is_available():
return True
@assert_backend_available
def is_compatible(self, objective, argument):
return callable(objective)
@assert_backend_available
def compile_function(self, objective, argument):
return objective
def _raise_not_implemented_error(self, objective, argument):
raise NotImplementedError("No autodiff support available for the "
"canonical 'Callable' backend")
compute_gradient = compute_hessian = _raise_not_implemented_error
Callable = make_tracing_backend_decorator(_CallableBackend)
| from ._backend import Backend, assert_backend_available
from .. import make_tracing_backend_decorator
class CallableBackend(Backend):
def __str__(self):
return "callable"
@staticmethod
def is_available():
return True
@assert_backend_available
def is_compatible(self, objective, argument):
return callable(objective)
@assert_backend_available
def compile_function(self, objective, argument):
return objective
@assert_backend_available
def __not_implemented(self, objective, argument):
raise NotImplementedError("No autodiff support available for the "
"canonical callable backend")
compute_gradient = compute_hessian = __not_implemented
Callable = make_tracing_backend_decorator(CallableBackend)
| bsd-3-clause | Python |
0cae1ae6ca372df40864feb2c45763bc2640890b | Disable migrations | djangocon/2017.djangocon.eu,djangocon/2017.djangocon.eu,djangocon/2017.djangocon.eu,djangocon/2017.djangocon.eu,djangocon/2017.djangocon.eu | djangocon_europe/settings_tests.py | djangocon_europe/settings_tests.py | # -*- coding: utf-8 -*-
from .settings import * # NOQA
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': '',
'ADMINUSER': 'admin',
},
}
RAVEN_CONFIG = {}
class DisableMigrations(object):
def __contains__(self, item):
return True
def __getitem__(self, item):
return 'notmigrations'
MIGRATION_MODULES = DisableMigrations()
| # -*- coding: utf-8 -*-
from .settings import * # NOQA
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': '',
'ADMINUSER': 'admin',
},
}
RAVEN_CONFIG = {}
| bsd-3-clause | Python |
5aad76a5005ab094f762d909549db3bb643b6a95 | Update is_transaction_effect_satisfied test | hackaugusto/raiden,hackaugusto/raiden | raiden/tests/unit/transfer/test_node.py | raiden/tests/unit/transfer/test_node.py | from raiden.constants import EMPTY_MERKLE_ROOT
from raiden.tests.utils.factories import HOP1, HOP2, UNIT_SECRETHASH, make_block_hash
from raiden.transfer.events import ContractSendChannelBatchUnlock
from raiden.transfer.node import is_transaction_effect_satisfied, state_transition
from raiden.transfer.state_change import (
ContractReceiveChannelBatchUnlock,
ContractReceiveChannelSettled,
)
def test_is_transaction_effect_satisfied(
chain_state,
token_network_state,
token_network_id,
netting_channel_state,
):
canonical_identifier = netting_channel_state.canonical_identifier
assert token_network_id == canonical_identifier.token_network_address
transaction = ContractSendChannelBatchUnlock(
token_address=token_network_state.token_address,
canonical_identifier=canonical_identifier,
participant=netting_channel_state.partner_state.address,
triggered_by_block_hash=make_block_hash(),
)
state_change = ContractReceiveChannelBatchUnlock(
transaction_hash=UNIT_SECRETHASH,
canonical_identifier=canonical_identifier,
participant=HOP1,
partner=HOP2,
locksroot=EMPTY_MERKLE_ROOT,
unlocked_amount=0,
returned_tokens=0,
block_number=1,
block_hash=make_block_hash(),
)
# unlock for a channel in which this node is not a participant must return False
assert not is_transaction_effect_satisfied(chain_state, transaction, state_change)
# now call normally with us being the partner and not the participant
state_change.partner = netting_channel_state.partner_state.address
state_change.participant = netting_channel_state.our_state.address
assert not is_transaction_effect_satisfied(chain_state, transaction, state_change)
# finally call with us being the participant and not the partner which should check out
state_change.participant = netting_channel_state.partner_state.address
state_change.partner = netting_channel_state.our_state.address
# ContractSendChannelBatchUnlock would only be satisfied if both sides are unlocked
# and if the channel was cleared
channel_settled = ContractReceiveChannelSettled(
transaction_hash=bytes(32),
canonical_identifier=canonical_identifier,
our_onchain_locksroot=EMPTY_MERKLE_ROOT,
partner_onchain_locksroot=EMPTY_MERKLE_ROOT,
block_number=1,
block_hash=make_block_hash(),
)
iteration = state_transition(
chain_state=chain_state,
state_change=channel_settled,
)
assert is_transaction_effect_satisfied(iteration.new_state, transaction, state_change)
| from raiden.constants import EMPTY_MERKLE_ROOT
from raiden.tests.utils.factories import HOP1, HOP2, UNIT_SECRETHASH, make_block_hash
from raiden.transfer.events import ContractSendChannelBatchUnlock
from raiden.transfer.node import is_transaction_effect_satisfied
from raiden.transfer.state_change import ContractReceiveChannelBatchUnlock
def test_is_transaction_effect_satisfied(
chain_state,
token_network_state,
token_network_id,
netting_channel_state,
):
canonical_identifier = netting_channel_state.canonical_identifier
assert token_network_id == canonical_identifier.token_network_address
transaction = ContractSendChannelBatchUnlock(
token_address=token_network_state.token_address,
canonical_identifier=canonical_identifier,
participant=netting_channel_state.partner_state.address,
triggered_by_block_hash=make_block_hash(),
)
state_change = ContractReceiveChannelBatchUnlock(
transaction_hash=UNIT_SECRETHASH,
canonical_identifier=canonical_identifier,
participant=HOP1,
partner=HOP2,
locksroot=EMPTY_MERKLE_ROOT,
unlocked_amount=0,
returned_tokens=0,
block_number=1,
block_hash=make_block_hash(),
)
# unlock for a channel in which this node is not a participant must return False
assert not is_transaction_effect_satisfied(chain_state, transaction, state_change)
# now call normally with us being the partner and not the participant
state_change.partner = netting_channel_state.partner_state.address
state_change.participant = netting_channel_state.our_state.address
assert not is_transaction_effect_satisfied(chain_state, transaction, state_change)
# finally call with us being the participant and not the partner which should check out
state_change.participant = netting_channel_state.partner_state.address
state_change.partner = netting_channel_state.our_state.address
assert is_transaction_effect_satisfied(chain_state, transaction, state_change)
| mit | Python |
4573fe1bbf1ebcbcea2ed65f3545c3c9c46eb3e6 | comment edit | DarkEnergySurvey/ugali,kadrlica/ugali | ugali/utils/__init__.py | ugali/utils/__init__.py | """
This is the UGaLi utils sub-package.
Various helpful classes and functions live here.
"""
| """
This is the UGaLi utils sub-package.
Various helpful classes and functions live here.
Modules
plotting :
"""
| mit | Python |
6efa6fc6480e8436f1c58df46e84db9f9e2d3705 | remove unused client_d | garbas/mozilla-releng-services,djmitche/build-relengapi,lundjordan/services,lundjordan/services,djmitche/build-relengapi,srfraser/services,garbas/mozilla-releng-services,La0/mozilla-relengapi,lundjordan/services,djmitche/build-relengapi,mozilla/build-relengapi,mozilla-releng/services,srfraser/services,garbas/mozilla-releng-services,lundjordan/services,lundjordan/build-relengapi,andrei987/services,mozilla/build-relengapi,La0/mozilla-relengapi,mozilla/build-relengapi,lundjordan/build-relengapi,garbas/mozilla-releng-services,andrei987/services,mozilla-releng/services,mozilla/build-relengapi,mozilla-releng/services,lundjordan/build-relengapi,hwine/build-relengapi,andrei987/services,La0/mozilla-relengapi,hwine/build-relengapi,hwine/build-relengapi,srfraser/services,Callek/build-relengapi,hwine/build-relengapi,Callek/build-relengapi,lundjordan/build-relengapi,andrei987/services,djmitche/build-relengapi,Callek/build-relengapi,srfraser/services,Callek/build-relengapi,mozilla-releng/services,La0/mozilla-relengapi | relengapi/blueprints/tokenauth/types.py | relengapi/blueprints/tokenauth/types.py | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import wsme.types
from datetime import datetime
from relengapi.lib.api import jsonObject
class JsonToken(wsme.types.Base):
"""A token granting the bearer a limited set of permissions.
In all cases except creating a new token, the ``token`` attribute is empty.
There is no way to recover a lost token string except for revoking and
re-issuing the token.
"""
_name = 'Token'
#: token type (short string). This defaults to ``prm`` for backward
#: compatibility, but should always be specified.
typ = wsme.types.wsattr(
wsme.types.Enum(unicode, 'prm', 'tmp', 'usr'),
mandatory=False,
default='prm')
#: token ID for revokable tokens
id = wsme.types.wsattr(int, mandatory=False)
#: not-before time for limited-duration tokens (see
#: :ref:`Datetime-Format` for format information)
not_before = wsme.types.wsattr(datetime, mandatory=False)
#: expiration time for limited-duration tokens
expires = wsme.types.wsattr(datetime, mandatory=False)
#: metadata fro limited-duration tokens (arbitrary JSON object)
metadata = wsme.types.wsattr(jsonObject, mandatory=False)
#: if true, the token is disabled because the associated user's
#: permissions are no longer sufficient.
disabled = wsme.types.wsattr(bool, mandatory=False)
#: list of permissions this token grants
permissions = wsme.types.wsattr([unicode], mandatory=True)
#: the user-supplied token description for revokable tokens
description = wsme.types.wsattr(unicode, mandatory=False)
#: user email for user-associated tokens
user = wsme.types.wsattr(unicode, mandatory=False)
#: the opaque token string (only set on new tokens)
token = wsme.types.wsattr(unicode, mandatory=False)
| # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import wsme.types
from datetime import datetime
from relengapi.lib.api import jsonObject
class JsonToken(wsme.types.Base):
"""A token granting the bearer a limited set of permissions.
In all cases except creating a new token, the ``token`` attribute is empty.
There is no way to recover a lost token string except for revoking and
re-issuing the token.
"""
_name = 'Token'
#: token type (short string). This defaults to ``prm`` for backward
#: compatibility, but should always be specified.
typ = wsme.types.wsattr(
wsme.types.Enum(unicode, 'prm', 'tmp', 'usr'),
mandatory=False,
default='prm')
#: token ID for revokable tokens
id = wsme.types.wsattr(int, mandatory=False)
#: not-before time for limited-duration tokens (see
#: :ref:`Datetime-Format` for format information)
not_before = wsme.types.wsattr(datetime, mandatory=False)
#: expiration time for limited-duration tokens
expires = wsme.types.wsattr(datetime, mandatory=False)
#: metadata fro limited-duration tokens (arbitrary JSON object)
metadata = wsme.types.wsattr(jsonObject, mandatory=False)
#: if true, the token is disabled because the associated user's
#: permissions are no longer sufficient.
disabled = wsme.types.wsattr(bool, mandatory=False)
#: list of permissions this token grants
permissions = wsme.types.wsattr([unicode], mandatory=True)
#: the user-supplied token description for revokable tokens
description = wsme.types.wsattr(unicode, mandatory=False)
#: user email for user-associated tokens
user = wsme.types.wsattr(unicode, mandatory=False)
#: client id for client-associated tokens
client_id = wsme.types.wsattr(int, mandatory=False)
#: the opaque token string (only set on new tokens)
token = wsme.types.wsattr(unicode, mandatory=False)
| mpl-2.0 | Python |
20696d6f236afc1bc0e2b3db570363540e70ca84 | Use ip instead of localhost for travis | witchard/grole | test/test_serve.py | test/test_serve.py | import unittest
import asyncio
import io
import multiprocessing
import urllib.request
import time
import grole
def simple_server():
app = grole.Grole()
@app.route('/')
def hello(env, req):
return 'Hello, World!'
app.run(host='127.0.0.1')
class TestServe(unittest.TestCase):
def test_simple(self):
p = multiprocessing.Process(target=simple_server)
p.start()
time.sleep(0.1)
with urllib.request.urlopen('http://127.0.0.1:1234') as response:
html = response.read()
self.assertEqual(html, b'Hello, World!')
p.terminate()
def test_fileserver(self):
p = multiprocessing.Process(target=grole.main, args=[['-a', '127.0.0.1']])
p.start()
time.sleep(0.1)
with urllib.request.urlopen('http://127.0.0.1:1234/test/test.dat') as response:
html = response.read()
self.assertEqual(html, b'foo\n')
p.terminate()
| import unittest
import asyncio
import io
import multiprocessing
import urllib.request
import time
import grole
def simple_server():
app = grole.Grole()
@app.route('/')
def hello(env, req):
return 'Hello, World!'
app.run()
class TestServe(unittest.TestCase):
def test_simple(self):
p = multiprocessing.Process(target=simple_server)
p.start()
time.sleep(0.1)
with urllib.request.urlopen('http://localhost:1234') as response:
html = response.read()
self.assertEqual(html, b'Hello, World!')
p.terminate()
def test_fileserver(self):
p = multiprocessing.Process(target=grole.main, args=[[]])
p.start()
time.sleep(0.1)
with urllib.request.urlopen('http://localhost:1234/test/test.dat') as response:
html = response.read()
self.assertEqual(html, b'foo\n')
p.terminate()
| mit | Python |
67524ff595ff39611981c459330ec5b947a9e754 | fix missing import | HPI-SWA-Lab/RSqueak,HPI-SWA-Lab/RSqueak,HPI-SWA-Lab/RSqueak,HPI-SWA-Lab/RSqueak | spyvm/plugins/large_integer.py | spyvm/plugins/large_integer.py | from spyvm import model
from spyvm.primitives import prim_table, \
BIT_AND, BIT_OR, BIT_XOR, BIT_SHIFT, ADD, SUBTRACT, DIVIDE, MULTIPLY
from spyvm.error import PrimitiveFailedError
from spyvm.plugins.plugin import Plugin
LargeIntegerPlugin = Plugin()
ops = {
'primDigitBitAnd': BIT_AND,
'primDigitBitOr': BIT_OR,
'primDigitBitXor': BIT_XOR,
'primDigitBitShiftMagnitude': BIT_SHIFT,
'primDigitAdd': ADD,
'primDigitSubtract': SUBTRACT,
}
for (name, primitive) in ops.items():
def make_func(primitive):
primfunc = prim_table[primitive]
def func(interp, s_frame, argcount):
return primfunc(interp, s_frame, argcount)
func.func_name = name
LargeIntegerPlugin.expose_primitive(clean_stack=False, no_result=True)(func)
make_func(primitive)
negOps = {
'primDigitDivNegative': DIVIDE,
'primDigitMultiplyNegative': MULTIPLY
}
for (name, primitive) in arithOps.items():
def make_func(primitive):
primfunc = prim_table[primitive]
def func(interp, s_frame, argcount):
if argcount != 3:
raise PrimitiveFailedError
neg = space.unwrap_bool(s_frame.pop())
return primfunc(interp, s_frame, 2)
func.func_name = name
LargeIntegerPlugin.expose_primitive(clean_stack=False, no_result=True)(func)
make_func(primitive)
| from spyvm import model
from spyvm.primitives import prim_table, \
BIT_AND, BIT_OR, BIT_XOR, BIT_SHIFT, ADD, SUBTRACT
from spyvm.error import PrimitiveFailedError
from spyvm.plugins.plugin import Plugin
LargeIntegerPlugin = Plugin()
ops = {
'primDigitBitAnd': BIT_AND,
'primDigitBitOr': BIT_OR,
'primDigitBitXor': BIT_XOR,
'primDigitBitShiftMagnitude': BIT_SHIFT,
'primDigitAdd': ADD,
'primDigitSubtract': SUBTRACT,
}
for (name, primitive) in ops.items():
def make_func(primitive):
primfunc = prim_table[primitive]
def func(interp, s_frame, argcount):
return primfunc(interp, s_frame, argcount)
func.func_name = name
LargeIntegerPlugin.expose_primitive(clean_stack=False, no_result=True)(func)
make_func(primitive)
negOps = {
'primDigitDivNegative': DIVIDE,
'primDigitMultiplyNegative': MULTIPLY
}
for (name, primitive) in arithOps.items():
def make_func(primitive):
primfunc = prim_table[primitive]
def func(interp, s_frame, argcount):
if argcount != 3:
raise PrimitiveFailedError
neg = space.unwrap_bool(s_frame.pop())
return primfunc(interp, s_frame, 2)
func.func_name = name
LargeIntegerPlugin.expose_primitive(clean_stack=False, no_result=True)(func)
make_func(primitive)
| bsd-3-clause | Python |
b811f3fd8e114514ca0cac6b2e134ce8a66d4fdb | Update __init__.py | rouxcode/django-cms-plugins,rouxcode/django-cms-plugins,rouxcode/django-cms-plugins | cmsplugins/__init__.py | cmsplugins/__init__.py | __version__ = '0.4.0'
__author__ = 'alaric'
| __version__ = '0.3.18'
__author__ = 'alaric'
| mit | Python |
049df6bbc6805312df4bbf86fc40f1603547d334 | enable tracking STARTED for workers picking up job (baking) (#198) | Connexions/cnx-publishing,Connexions/cnx-publishing,Connexions/cnx-publishing | cnxpublishing/tasks.py | cnxpublishing/tasks.py | # -*- coding: utf-8 -*-
"""\
Implementation of the Celery framework within a Pyramid application.
Use the ``task`` decorator provided by this module where the celery
documentation says to use ``@app.task``. It is used to register a function as
a task without making the celery application a global object.
"""
from __future__ import absolute_import
import celery
import venusian
from kombu import Queue
from pyramid.scripting import prepare
class PyramidAwareTask(celery.Task):
"""A Pyramid aware version of ``celery.task.Task``.
This sets up the pyramid application within the thread, thus allowing
``pyramid.threadlocal`` functions to work as expected.
"""
def __call__(self, *args, **kwargs):
# Prepare the pyramid environment.
if 'pyramid_config' in self.app.conf:
pyramid_config = self.app.conf['pyramid_config']
env = prepare(registry=pyramid_config.registry) # noqa
# Now run the original...
return super(PyramidAwareTask, self).__call__(*args, **kwargs)
def task(**kwargs):
"""A function task decorator used in place of ``@celery_app.task``."""
def wrapper(wrapped):
def callback(scanner, name, obj):
celery_app = scanner.config.registry.celery_app
celery_app.task(**kwargs)(obj)
venusian.attach(wrapped, callback)
return wrapped
return wrapper
def _make_celery_app(config):
"""This exposes the celery app. The app is actually created as part
of the configuration. However, this does make the celery app functional
as a stand-alone celery application.
This puts the pyramid configuration object on the celery app to be
used for making the registry available to tasks running inside the
celery worker process pool. See ``CustomTask.__call__``.
"""
# Tack the pyramid config on the celery app for later use.
config.registry.celery_app.conf['pyramid_config'] = config
return config.registry.celery_app
def includeme(config):
settings = config.registry.settings
config.registry.celery_app = celery.Celery('tasks')
config.registry.celery_app.conf.update(
broker_url=settings['celery.broker'],
result_backend=settings['celery.backend'],
result_persistent=True,
task_track_started=True,
task_default_queue='default',
task_queues=(
Queue('default'),
Queue('deferred'),
),
)
# Override the existing Task class.
config.registry.celery_app.Task = PyramidAwareTask
# Set the default celery app so that the AsyncResult class is able
# to assume the celery backend.
config.registry.celery_app.set_default()
config.add_directive('make_celery_app', _make_celery_app)
| # -*- coding: utf-8 -*-
"""\
Implementation of the Celery framework within a Pyramid application.
Use the ``task`` decorator provided by this module where the celery
documentation says to use ``@app.task``. It is used to register a function as
a task without making the celery application a global object.
"""
from __future__ import absolute_import
import celery
import venusian
from kombu import Queue
from pyramid.scripting import prepare
class PyramidAwareTask(celery.Task):
"""A Pyramid aware version of ``celery.task.Task``.
This sets up the pyramid application within the thread, thus allowing
``pyramid.threadlocal`` functions to work as expected.
"""
def __call__(self, *args, **kwargs):
# Prepare the pyramid environment.
if 'pyramid_config' in self.app.conf:
pyramid_config = self.app.conf['pyramid_config']
env = prepare(registry=pyramid_config.registry)
# Now run the original...
return super(PyramidAwareTask, self).__call__(*args, **kwargs)
def task(**kwargs):
"""A function task decorator used in place of ``@celery_app.task``."""
def wrapper(wrapped):
def callback(scanner, name, obj):
celery_app = scanner.config.registry.celery_app
celery_app.task(**kwargs)(obj)
venusian.attach(wrapped, callback)
return wrapped
return wrapper
def _make_celery_app(config):
"""This exposes the celery app. The app is actually created as part
of the configuration. However, this does make the celery app functional
as a stand-alone celery application.
This puts the pyramid configuration object on the celery app to be
used for making the registry available to tasks running inside the
celery worker process pool. See ``CustomTask.__call__``.
"""
# Tack the pyramid config on the celery app for later use.
config.registry.celery_app.conf['pyramid_config'] = config
return config.registry.celery_app
def includeme(config):
settings = config.registry.settings
config.registry.celery_app = celery.Celery('tasks')
config.registry.celery_app.conf.update(
broker_url=settings['celery.broker'],
result_backend=settings['celery.backend'],
result_persistent=True,
task_default_queue='default',
task_queues=(
Queue('default'),
Queue('deferred'),
),
)
# Override the existing Task class.
config.registry.celery_app.Task = PyramidAwareTask
# Set the default celery app so that the AsyncResult class is able
# to assume the celery backend.
config.registry.celery_app.set_default()
config.add_directive('make_celery_app', _make_celery_app)
| agpl-3.0 | Python |
bacdf53702306172063971aca5f122380960db03 | Bump version number to 1.2f1 | jayfk/django-comments-xtd,jayfk/django-comments-xtd,agilosoftware/django-comments-xtd,danirus/django-comments-xtd,danirus/django-comments-xtd,agilosoftware/django-comments-xtd,danirus/django-comments-xtd,agilosoftware/django-comments-xtd,danirus/django-comments-xtd | django_comments_xtd/__init__.py | django_comments_xtd/__init__.py | from django_comments_xtd.models import XtdComment
from django_comments_xtd.forms import XtdCommentForm
def get_model():
return XtdComment
def get_form():
return XtdCommentForm
VERSION = (1, 2, 0, 'f', 1) # following PEP 386
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
if VERSION[3] != 'f':
version = '%s%s%s' % (version, VERSION[3], VERSION[4])
return version
| from django_comments_xtd.models import XtdComment
from django_comments_xtd.forms import XtdCommentForm
def get_model():
return XtdComment
def get_form():
return XtdCommentForm
VERSION = (1, 2, 0, 'f', 0) # following PEP 386
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
if VERSION[3] != 'f':
version = '%s%s%s' % (version, VERSION[3], VERSION[4])
return version
| bsd-2-clause | Python |
8a6fe23ac93ecc3f7d08d41857a4752c1396fdff | Bump version | stoneworksolutions/django-rq-dashboard,brutasse/django-rq-dashboard,coffenbacher/django-rq-dashboard,brutasse/django-rq-dashboard,stoneworksolutions/django-rq-dashboard,spapas/django-rq-dashboard,coffenbacher/django-rq-dashboard,spapas/django-rq-dashboard,brutasse/django-rq-dashboard,spapas/django-rq-dashboard,stoneworksolutions/django-rq-dashboard | django_rq_dashboard/__init__.py | django_rq_dashboard/__init__.py | VERSION = (0, 2, 1)
def get_version():
return ".".join(map(str, VERSION))
__version__ = get_version()
| VERSION = (0, 2)
def get_version():
return ".".join(map(str, VERSION))
__version__ = get_version()
| bsd-3-clause | Python |
296951f29e42b79b239f287beba5d525c523dd0d | use max_int instead of any_int in `random` | shellphish/driller | driller/simprocedures.py | driller/simprocedures.py | #!/usr/bin/env pypy
import simuvex
import itertools
rand_count = itertools.count()
class random(simuvex.SimProcedure):
#pylint:disable=arguments-differ
def run(self, buf, count, rnd_bytes):
# return code
r = self.state.se.ite_cases((
(self.state.cgc.addr_invalid(buf), self.state.cgc.EFAULT),
(self.state.se.And(rnd_bytes != 0, self.state.cgc.addr_invalid(rnd_bytes)), self.state.cgc.EFAULT),
), self.state.se.BVV(0, self.state.arch.bits))
if self.state.satisfiable(extra_constraints=[count!=0]):
self.state.memory.store(buf, self.state.BVV("A" * self.state.se.max_int(count)), size=count)
self.state.memory.store(rnd_bytes, count, endness='Iend_LE', condition=rnd_bytes != 0)
return r
cgc_simprocedures = [("random", random)]
| #!/usr/bin/env pypy
import simuvex
import itertools
rand_count = itertools.count()
class random(simuvex.SimProcedure):
#pylint:disable=arguments-differ
def run(self, buf, count, rnd_bytes):
# return code
r = self.state.se.ite_cases((
(self.state.cgc.addr_invalid(buf), self.state.cgc.EFAULT),
(self.state.se.And(rnd_bytes != 0, self.state.cgc.addr_invalid(rnd_bytes)), self.state.cgc.EFAULT),
), self.state.se.BVV(0, self.state.arch.bits))
if self.state.satisfiable(extra_constraints=[count!=0]):
self.state.memory.store(buf, self.state.BVV("A" * self.state.se.any_int(count)), size=count)
self.state.memory.store(rnd_bytes, count, endness='Iend_LE', condition=rnd_bytes != 0)
return r
cgc_simprocedures = [("random", random)]
| bsd-2-clause | Python |
27308921de842f388d49cde8ff8ad9b258059b47 | Make code block stripping regex make sense (#46) | executablebooks/mdformat | mdformat/_util.py | mdformat/_util.py | import re
from typing import Any, Iterable, Mapping
from markdown_it import MarkdownIt
import mdformat.plugins
def is_md_equal(
md1: str,
md2: str,
options: Mapping[str, Any],
*,
extensions: Iterable[str] = (),
codeformatters: Iterable[str] = (),
) -> bool:
"""Check if two Markdown produce the same HTML.
Renders HTML from both Markdown strings, strips whitespace and
checks equality. Note that this is not a perfect solution, as there
can be meaningful whitespace in HTML, e.g. in a <code> block.
"""
html_texts = {}
mdit = MarkdownIt()
mdit.options["mdformat"] = options
for extension in extensions:
mdformat.plugins.PARSER_EXTENSIONS[extension].update_mdit(mdit)
for key, text in [("md1", md1), ("md2", md2)]:
html = mdit.render(text)
for codeclass in codeformatters:
html = re.sub(f'<code class="language-{codeclass}">.*</code>', "", html)
html = re.sub(r"\s+", "", html)
html_texts[key] = html
return html_texts["md1"] == html_texts["md2"]
| import re
from typing import Any, Iterable, Mapping
from markdown_it import MarkdownIt
import mdformat.plugins
def is_md_equal(
md1: str,
md2: str,
options: Mapping[str, Any],
*,
extensions: Iterable[str] = (),
codeformatters: Iterable[str] = (),
) -> bool:
"""Check if two Markdown produce the same HTML.
Renders HTML from both Markdown strings, strips whitespace and
checks equality. Note that this is not a perfect solution, as there
can be meaningful whitespace in HTML, e.g. in a <code> block.
"""
html_texts = {}
mdit = MarkdownIt()
mdit.options["mdformat"] = options
for extension in extensions:
mdformat.plugins.PARSER_EXTENSIONS[extension].update_mdit(mdit)
for key, text in [("md1", md1), ("md2", md2)]:
html = mdit.render(text)
html = re.sub(r"\s+", "", html)
for codeclass in codeformatters:
html = re.sub(rf'<codeclass="language-{codeclass}">.*</pre>', "", html)
html_texts[key] = html
return html_texts["md1"] == html_texts["md2"]
| mit | Python |
314d7a293ac875fec5656ce997ff6b1808f64bf0 | Mark oq dbserver restart as broken | gem/oq-engine,gem/oq-engine,gem/oq-engine,gem/oq-engine,gem/oq-engine | openquake/commands/dbserver.py | openquake/commands/dbserver.py | # -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (C) 2016-2019 GEM Foundation
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
import os
import sys
import signal
import getpass
from openquake.baselib import sap, config
from openquake.commonlib import logs
from openquake.server import dbserver as dbs
@sap.script
def dbserver(cmd, dbhostport=None,
dbpath=os.path.expanduser(config.dbserver.file),
loglevel='INFO', foreground=False):
"""
start/stop/restart the database server, or return its status
"""
if config.dbserver.multi_user and getpass.getuser() != 'openquake':
sys.exit('oq dbserver only works in single user mode')
status = dbs.get_status()
if cmd == 'status':
print('dbserver ' + status)
elif cmd == 'stop':
if status == 'running':
pid = logs.dbcmd('getpid')
os.kill(pid, signal.SIGINT) # this is trapped by the DbServer
else:
print('dbserver already stopped')
elif cmd == 'start':
if status == 'not-running':
dbs.run_server(dbpath, dbhostport, loglevel, foreground)
else:
print('dbserver already running')
elif cmd == 'restart':
print('please use oq dbserver start/stop')
# FIXME restart is currently broken
# if status == 'running':
# pid = logs.dbcmd('getpid')
# os.kill(pid, signal.SIGINT)
# dbs.run_server(dbpath, dbhostport, loglevel, foreground)
dbserver.arg('cmd', 'dbserver command',
choices='start stop status restart'.split())
dbserver.arg('dbhostport', 'dbhost:port')
dbserver.arg('dbpath', 'dbpath')
dbserver.opt('loglevel', 'DEBUG|INFO|WARN')
dbserver.flg('foreground', 'stay in foreground')
| # -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (C) 2016-2019 GEM Foundation
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
import os
import sys
import signal
import getpass
from openquake.baselib import sap, config
from openquake.commonlib import logs
from openquake.server import dbserver as dbs
@sap.script
def dbserver(cmd, dbhostport=None,
dbpath=os.path.expanduser(config.dbserver.file),
loglevel='INFO', foreground=False):
"""
start/stop/restart the database server, or return its status
"""
if config.dbserver.multi_user and getpass.getuser() != 'openquake':
sys.exit('oq dbserver only works in single user mode')
status = dbs.get_status()
if cmd == 'status':
print('dbserver ' + status)
elif cmd == 'stop':
if status == 'running':
pid = logs.dbcmd('getpid')
os.kill(pid, signal.SIGINT) # this is trapped by the DbServer
else:
print('dbserver already stopped')
elif cmd == 'start':
if status == 'not-running':
dbs.run_server(dbpath, dbhostport, loglevel, foreground)
else:
print('dbserver already running')
elif cmd == 'restart':
if status == 'running':
pid = logs.dbcmd('getpid')
os.kill(pid, signal.SIGINT)
dbs.run_server(dbpath, dbhostport, loglevel, foreground)
dbserver.arg('cmd', 'dbserver command',
choices='start stop status restart'.split())
dbserver.arg('dbhostport', 'dbhost:port')
dbserver.arg('dbpath', 'dbpath')
dbserver.opt('loglevel', 'DEBUG|INFO|WARN')
dbserver.flg('foreground', 'stay in foreground')
| agpl-3.0 | Python |
508f940755f3fdc2aca2ccd32fd4092dcf618423 | Update test_bugs.py | PyPSA/PyPSA | test/test_bugs.py | test/test_bugs.py | # -*- coding: utf-8 -*-
import numpy as np
import pypsa
def test_344():
"""
Overridden multi-links but empty n.links.
"""
override = pypsa.descriptors.Dict(
{k: v.copy() for k, v in pypsa.components.component_attrs.items()}
)
override["Link"].loc["bus2"] = [
"string",
np.nan,
np.nan,
"2nd bus",
"Input (optional)",
]
override["Link"].loc["efficiency2"] = [
"static or series",
"per unit",
1.0,
"2nd bus efficiency",
"Input (optional)",
]
override["Link"].loc["p2"] = ["series", "MW", 0.0, "2nd bus output", "Output"]
network = pypsa.Network(override_component_attrs=override)
network.add("Bus", "a")
network.add("Load", "a", bus="a", p_set=5)
network.add("Generator", "a", bus="a", p_nom=5)
network.lopf(pyomo=False)
def test_331():
n = pypsa.Network()
n.add("Bus", "bus")
n.add("Load", "load", bus="bus", p_set=10)
n.add("Generator", "generator1", bus="bus", p_nom=15, marginal_cost=10)
n.lopf(pyomo=False)
n.add("Generator", "generator2", bus="bus", p_nom=5, marginal_cost=5)
n.lopf(pyomo=False)
assert "generator2" in n.generators_t.p
def test_nomansland_bus(caplog):
n = pypsa.Network()
n.add("Bus", "bus")
n.add("Load", "load", bus="bus", p_set=10)
n.add("Generator", "generator1", bus="bus", p_nom=15, marginal_cost=10)
n.consistency_check()
assert "The following buses have no attached components" not in caplog.text, "warning should not trigger..."
n.add("Bus", "extrabus")
n.consistency_check()
assert "The following buses have no attached components" in caplog.text, "warning is not working..."
try:
n.lopf(pyomo=False)
except:
print("to be fixed - unconnected bus throws error in non-pyomo version.")
try:
n.lopf(pyomo=True)
except:
print("to be fixed - unconnected bus throws error in pyomo version.")
return True
| # -*- coding: utf-8 -*-
import numpy as np
import pypsa
def test_344():
"""
Overridden multi-links but empty n.links.
"""
override = pypsa.descriptors.Dict(
{k: v.copy() for k, v in pypsa.components.component_attrs.items()}
)
override["Link"].loc["bus2"] = [
"string",
np.nan,
np.nan,
"2nd bus",
"Input (optional)",
]
override["Link"].loc["efficiency2"] = [
"static or series",
"per unit",
1.0,
"2nd bus efficiency",
"Input (optional)",
]
override["Link"].loc["p2"] = ["series", "MW", 0.0, "2nd bus output", "Output"]
network = pypsa.Network(override_component_attrs=override)
network.add("Bus", "a")
network.add("Load", "a", bus="a", p_set=5)
network.add("Generator", "a", bus="a", p_nom=5)
network.lopf(pyomo=False)
def test_331():
n = pypsa.Network()
n.add("Bus", "bus")
n.add("Load", "load", bus="bus", p_set=10)
n.add("Generator", "generator1", bus="bus", p_nom=15, marginal_cost=10)
n.lopf(pyomo=False)
n.add("Generator", "generator2", bus="bus", p_nom=5, marginal_cost=5)
n.lopf(pyomo=False)
assert "generator2" in n.generators_t.p
def test_nomansland_bus(caplog):
n = pypsa.Network()
n.add("Bus", "bus")
n.add("Load", "load", bus="bus", p_set=10)
n.add("Generator", "generator1", bus="bus", p_nom=15, marginal_cost=10)
n.consistency_check()
assert "buses are not connected" not in caplog.text, "warning should not trigger..."
n.add("Bus", "extrabus")
n.consistency_check()
assert "buses are not connected" in caplog.text, "warning is not working..."
try:
n.lopf(pyomo=False)
except:
print("to be fixed - unconnected bus throws error in non-pyomo version.")
try:
n.lopf(pyomo=True)
except:
print("to be fixed - unconnected bus throws error in pyomo version.")
return True
| mit | Python |
a1bcadf74fcaaedaa0d21abafce8e3a11fb3191a | Modify test to pass. Fixes #51 test failure | gouthambs/Flask-Blogging,gouthambs/Flask-Blogging | test/test_core.py | test/test_core.py | try:
from builtins import str, range
except ImportError:
pass
from unittest import TestCase
from flask.ext.blogging import BloggingEngine, PostProcessor
from markdown.extensions.codehilite import CodeHiliteExtension
sample_markdown = """
##This is a test
:::python
print("Hello, World")
"""
expected_markup = u'<h2>This is a test</h2>\n' \
u'<div class="codehilite"><pre>' \
u'<span class="k">print</span>' \
u'<span class="p">(</span><span class="s">' \
u'"Hello, World"</span><span class="p">)' \
u'</span>\n' \
u'</pre></div>'
class TestCore(TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_custom_md_extension(self):
extn = CodeHiliteExtension({})
engine = BloggingEngine(extensions=[extn])
extns = engine.post_processor.all_extensions()
self.assertEqual(len(extns), 3)
self.assertTrue(isinstance(extns[-1], CodeHiliteExtension))
#post = {"text": sample_markdown}
#PostProcessor.render_text(post)
| try:
from builtins import str, range
except ImportError:
pass
from unittest import TestCase
from flask.ext.blogging import BloggingEngine, PostProcessor
from markdown.extensions.codehilite import CodeHiliteExtension
sample_markdown = """
##This is a test
:::python
print("Hello, World")
"""
expected_markup = u'<h2>This is a test</h2>\n' \
u'<div class="codehilite"><pre>' \
u'<span class="k">print</span>' \
u'<span class="p">(</span><span class="s">' \
u'"Hello, World"</span><span class="p">)' \
u'</span>\n' \
u'</pre></div>'
class TestCore(TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_custom_md_extension(self):
extn = CodeHiliteExtension({})
engine = BloggingEngine(extensions=[extn])
extns = engine.post_processor.all_extensions()
assert len(extns) == 3
post = {"text": sample_markdown}
PostProcessor.render_text(post)
self.assertEqual(post["rendered_text"], expected_markup)
| mit | Python |
5324e8159e0a1114b597cf06e1eab6237dae752b | Fix problem with inline flowables near end of line | brechtm/rinohtype,brechtm/rinohtype,beni55/rinohtype,brechtm/rinohtype,beni55/rinohtype | rinoh/inline.py | rinoh/inline.py | # This file is part of RinohType, the Python document preparation system.
#
# Copyright (c) Brecht Machiels.
#
# Use of this source code is subject to the terms of the GNU Affero General
# Public License v3. See the LICENSE file or http://www.gnu.org/licenses/.
from .element import DocumentElement
from .flowable import Flowable
from .layout import VirtualContainer
__all__ = ['InlineFlowableException', 'InlineFlowable']
class InlineFlowableException(Exception):
pass
class InlineFlowable(Flowable):
def font(self, document):
raise InlineFlowableException
def y_offset(self, document):
return 0
def spans(self, document):
yield self
def split(self, container):
yield self
def flow_inline(self, container, last_descender, state=None):
virtual_container = VirtualContainer(container)
width, _ = self.flow(virtual_container, last_descender, state=state)
return InlineFlowableSpan(width, virtual_container)
class InlineFlowableSpan(DocumentElement):
number_of_spaces = 0
ends_with_space = False
def __init__(self, width, virtual_container):
super().__init__()
self.width = width
self.virtual_container = virtual_container
def font(self, document):
raise InlineFlowableException
@property
def span(self):
return self
def height(self, document):
return self.virtual_container.cursor
def ascender(self, document):
return self.height(document)
def descender(self, document):
return 0
def line_gap(self, document):
return 0
def before_placing(self, container):
pass
# TODO: get_style and word_to_glyphs may need proper implementations
def get_style(self, attribute, document=None):
pass
def word_to_glyphs(self, word):
return word
| # This file is part of RinohType, the Python document preparation system.
#
# Copyright (c) Brecht Machiels.
#
# Use of this source code is subject to the terms of the GNU Affero General
# Public License v3. See the LICENSE file or http://www.gnu.org/licenses/.
from .element import DocumentElement
from .flowable import Flowable
from .layout import VirtualContainer
__all__ = ['InlineFlowableException', 'InlineFlowable']
class InlineFlowableException(Exception):
pass
class InlineFlowable(Flowable):
def font(self, document):
raise InlineFlowableException
def y_offset(self, document):
return 0
def spans(self, document):
yield self
def split(self, container):
yield self
def flow_inline(self, container, last_descender, state=None):
virtual_container = VirtualContainer(container)
width, _ = self.flow(virtual_container, last_descender, state=state)
return InlineFlowableSpan(width, virtual_container)
class InlineFlowableSpan(DocumentElement):
number_of_spaces = 0
ends_with_space = False
def __init__(self, width, virtual_container):
super().__init__()
self.width = width
self.virtual_container = virtual_container
def font(self, document):
raise InlineFlowableException
@property
def span(self):
return self
def height(self, document):
return self.virtual_container.cursor
def ascender(self, document):
return self.height(document)
def descender(self, document):
return 0
def line_gap(self, document):
return 0
def before_placing(self, container):
pass | agpl-3.0 | Python |
ce5cd35c62a94b7b35d0de6c351d0ad05e1a9f11 | fix #12 | r2600r/py-junos-netconify,r2600r/py-junos-netconify | lib/netconify/tty_telnet.py | lib/netconify/tty_telnet.py | from time import sleep
import telnetlib
from .tty import Terminal
##### -------------------------------------------------------------------------
##### Terminal connection over TELNET CONSOLE
##### -------------------------------------------------------------------------
class Telnet(Terminal):
RETRY_OPEN = 3 # number of attempts to open TTY
RETRY_BACKOFF = 2 # seconds to wait between retries
def __init__(self, host, port, **kvargs):
"""
:host:
The hostname or ip-addr of the ternminal server
:port:
The TCP port that maps to the TTY device on the
console server
:kvargs['timeout']:
this is the tty read polling timeout.
generally you should not have to tweak this.
"""
# initialize the underlying TTY device
self._tn = telnetlib.Telnet()
self.host = host
self.port = port
self.timeout = kvargs.get('timeout', self.TIMEOUT)
self._tty_name = "{}:{}".format(host,port)
Terminal.__init__(self, **kvargs)
### -------------------------------------------------------------------------
### I/O open close called from Terminal class
### -------------------------------------------------------------------------
def _tty_open(self):
retry = self.RETRY_OPEN
while retry > 0:
try:
self._tn.open(self.host,self.port,self.timeout)
break
except:
retry -= 1
# print "TTY busy, checking back in {} ...".format(self.RETRY_BACKOFF)
sleep(self.RETRY_BACKOFF)
else:
raise RuntimeError("open_fail: port not ready")
self.write('\n')
def _tty_close(self):
self._tn.close()
### -------------------------------------------------------------------------
### I/O read and write called from Terminal class
### -------------------------------------------------------------------------
def write(self, content):
""" write content + <ENTER> """
self._tn.write(content+'\n')
def rawwrite(self,content):
""" write content as-is """
self._tn.write(content)
def read(self):
""" read a single line """
return self._tn.read_until('\n', self.EXPECT_TIMEOUT)
def read_prompt(self):
got = self._tn.expect(Terminal._RE_PAT, self.EXPECT_TIMEOUT)
sre = got[1]
if 'in use' in got[2]:
raise RuntimeError("open_fail: port already in use")
# (buffer, RE group)
return (None,None) if not got[1] else (got[2], got[1].lastgroup)
| import telnetlib
from .tty import Terminal
##### -------------------------------------------------------------------------
##### Terminal connection over TELNET CONSOLE
##### -------------------------------------------------------------------------
class Telnet(Terminal):
def __init__(self, host, port, **kvargs):
"""
:host:
The hostname or ip-addr of the ternminal server
:port:
The TCP port that maps to the TTY device on the
console server
:kvargs['timeout']:
this is the tty read polling timeout.
generally you should not have to tweak this.
"""
# initialize the underlying TTY device
self._tn = telnetlib.Telnet()
self.host = host
self.port = port
self.timeout = kvargs.get('timeout', self.TIMEOUT)
self._tty_name = "{}:{}".format(host,port)
Terminal.__init__(self, **kvargs)
### -------------------------------------------------------------------------
### I/O open close called from Terminal class
### -------------------------------------------------------------------------
def _tty_open(self):
try:
self._tn.open(self.host,self.port,self.timeout)
except:
raise RuntimeError("open_fail: port not ready")
self.write('\n')
def _tty_close(self):
self._tn.close()
### -------------------------------------------------------------------------
### I/O read and write called from Terminal class
### -------------------------------------------------------------------------
def write(self, content):
""" write content + <ENTER> """
self._tn.write(content+'\n')
def rawwrite(self,content):
""" write content as-is """
self._tn.write(content)
def read(self):
""" read a single line """
return self._tn.read_until('\n', self.EXPECT_TIMEOUT)
def read_prompt(self):
got = self._tn.expect(Terminal._RE_PAT, self.EXPECT_TIMEOUT)
sre = got[1]
if 'in use' in got[2]:
raise RuntimeError("open_fail: port already in use")
# (buffer, RE group)
return (None,None) if not got[1] else (got[2], got[1].lastgroup)
| apache-2.0 | Python |
838175b849dd4eeb7ece537c4b286de88ff78987 | use new colorbar method | matplotlib/basemap,matplotlib/basemap,guziy/basemap,guziy/basemap | doc/users/figures/plotprecip.py | doc/users/figures/plotprecip.py | from mpl_toolkits.basemap import Basemap, cm
# requires netcdf4-python (netcdf4-python.googlecode.com)
from netCDF4 import Dataset as NetCDFFile
import numpy as np
import matplotlib.pyplot as plt
import copy
from matplotlib import rcParams
# plot rainfall from NWS using special precipitation
# colormap used by the NWS, and included in basemap.
nc = NetCDFFile('../../../examples/nws_precip_conus_20061222.nc')
# data from http://water.weather.gov/precip/
prcpvar = nc.variables['amountofprecip']
data = 0.01*prcpvar[:]
latcorners = nc.variables['lat'][:]
loncorners = -nc.variables['lon'][:]
lon_0 = -nc.variables['true_lon'].getValue()
lat_0 = nc.variables['true_lat'].getValue()
# create figure and axes instances
fig = plt.figure(figsize=(8,8))
ax = fig.add_axes([0.1,0.1,0.8,0.8])
# create polar stereographic Basemap instance.
m = Basemap(projection='stere',lon_0=lon_0,lat_0=90.,lat_ts=lat_0,\
llcrnrlat=latcorners[0],urcrnrlat=latcorners[2],\
llcrnrlon=loncorners[0],urcrnrlon=loncorners[2],\
rsphere=6371200.,resolution='l',area_thresh=10000)
# draw coastlines, state and country boundaries, edge of map.
m.drawcoastlines()
m.drawstates()
m.drawcountries()
# draw parallels.
parallels = np.arange(0.,90,10.)
m.drawparallels(parallels,labels=[1,0,0,0],fontsize=10)
# draw meridians
meridians = np.arange(180.,360.,10.)
m.drawmeridians(meridians,labels=[0,0,0,1],fontsize=10)
ny = data.shape[0]; nx = data.shape[1]
lons, lats = m.makegrid(nx, ny) # get lat/lons of ny by nx evenly space grid.
x, y = m(lons, lats) # compute map proj coordinates.
# draw filled contours.
clevs = [0,1,2.5,5,7.5,10,15,20,30,40,50,70,100,150,200,250,300,400,500,600,750]
cs = m.contourf(x,y,data,clevs,cmap=cm.s3pcpn)
# add colorbar.
cbar = m.colorbar(cs,location='bottom',pad=0.25)
cbar.set_label('mm')
# add title
plt.title(prcpvar.long_name+' for period ending '+prcpvar.dateofdata)
plt.savefig('plotprecip.png')
plt.show()
| from mpl_toolkits.basemap import Basemap, cm
# requires netcdf4-python (netcdf4-python.googlecode.com)
from netCDF4 import Dataset as NetCDFFile
import numpy as np
import matplotlib.pyplot as plt
import copy
from matplotlib import rcParams
# plot rainfall from NWS using special precipitation
# colormap used by the NWS, and included in basemap.
nc = NetCDFFile('../../../examples/nws_precip_conus_20061222.nc')
# data from http://water.weather.gov/precip/
prcpvar = nc.variables['amountofprecip']
data = 0.01*prcpvar[:]
latcorners = nc.variables['lat'][:]
loncorners = -nc.variables['lon'][:]
lon_0 = -nc.variables['true_lon'].getValue()
lat_0 = nc.variables['true_lat'].getValue()
# create figure and axes instances
fig = plt.figure(figsize=(8,8))
ax = fig.add_axes([0.05,0.05,0.9,0.9])
# create polar stereographic Basemap instance.
m = Basemap(projection='stere',lon_0=lon_0,lat_0=90.,lat_ts=lat_0,\
llcrnrlat=latcorners[0],urcrnrlat=latcorners[2],\
llcrnrlon=loncorners[0],urcrnrlon=loncorners[2],\
rsphere=6371200.,resolution='l',area_thresh=10000)
# draw coastlines, state and country boundaries, edge of map.
m.drawcoastlines()
m.drawstates()
m.drawcountries()
# draw parallels.
parallels = np.arange(0.,90,10.)
m.drawparallels(parallels,labels=[1,0,0,0],fontsize=10)
# draw meridians
meridians = np.arange(180.,360.,10.)
m.drawmeridians(meridians,labels=[0,0,0,1],fontsize=10)
ny = data.shape[0]; nx = data.shape[1]
lons, lats = m.makegrid(nx, ny) # get lat/lons of ny by nx evenly space grid.
x, y = m(lons, lats) # compute map proj coordinates.
# draw filled contours.
clevs = [0,1,2.5,5,7.5,10,15,20,30,40,50,70,100,150,200,250,300,400,500,600,750]
cs = m.contourf(x,y,data,clevs,cmap=cm.s3pcpn)
cbar = plt.colorbar(orientation='horizontal',shrink=0.75)
cbar.set_label('mm')
# plot title
plt.title(prcpvar.long_name+' for period ending '+prcpvar.dateofdata)
plt.savefig('plotprecip.png')
| mit | Python |
c760a0c8b06367d5e2ca954cb94d47efc2fcbe61 | Update 06_Potentiometer_Controlled_Servo.py | userdw/RaspberryPi_3_Starter_Kit | 06_Potentiometer_Controlled_Servo/06_Potentiometer_Controlled_Servo.py | 06_Potentiometer_Controlled_Servo/06_Potentiometer_Controlled_Servo.py | import MCP3202, wiringpi, os
from time import sleep
wiringpi.wiringPiSetup() # Must be called before using I/O function
wiringpi.softPwmCreate(1, 0, 100)
def translate(value,leftMin,leftMax,rightMin,rightMax):
# Figure out how 'wide' each range is
leftSpan = leftMax - leftMin
rightSpan = rightMax - rightMin
# Convert the left range into a 0-1 range (float)
valueScaled = float(value - leftMin) / float(leftSpan)
# Convert the 0-1 range into a value in the right range.
return rightMin + (valueScaled * rightSpan)
try:
while 1: # endless loop
os.system("clear")
value1 = MCP3202.readADC(0)
map = translate(value1, 0, 4095, 0, 100)
position = translate (map, 100, 0, 0, 180)
print("Servo Position")
print("Curent Position : ", int(position), "degree")
print("")
print("Press CTRL+C to exit")
wiringpi.softPwmWrite(1, int(map))
sleep(0.001)
except KeyboardInterrupt:
wiringpi.softPwmWrite(1, 0)
print "exit"
| import MCP3202, wiringpi,os # import library WiringPi2-Python
from time import sleep
wiringpi.wiringPiSetup() # Must be called before using IO function
wiringpi.softPwmCreate(1,0,100)
def translate(value,leftMin,leftMax,rightMin,rightMax):
# Figure out how 'wide' each range is
leftSpan = leftMax - leftMin
rightSpan = rightMax - rightMin
# Convert the left range into a 0-1 range (float)
valueScaled = float(value - leftMin) / float(leftSpan)
# Convert the 0-1 range into a value in the right range.
return rightMin + (valueScaled * rightSpan)
try:
while 1: # endless loop
os.system('clear')
value1 = MCP3202.readADC(0) # range data 0 - vref (volt)
map = translate(value1,0,4095,0,100)
position = translate (map,100,0,0,180)
print "Servo Position"
print "Curent Position : ",int(position),"degree"
print ""
print "Press CTRL+C to exit"
wiringpi.softPwmWrite(1,int(map)) # PWM pulse on pin 9
sleep(0.001)
except KeyboardInterrupt:
wiringpi.softPwmWrite(1,0) # PWM pulse on pin 9
print "exit"
| mit | Python |
428db361f4deaeb6b5d17a5e4eda85efecdeb77d | mark for release | PaesslerAG/django-performance-testing | django_performance_testing/__init__.py | django_performance_testing/__init__.py | __version__ = '0.7.0'
default_app_config = \
'django_performance_testing.apps.DjangoPerformanceTestingAppConfig'
| __version__ = '0.7.0-dev'
default_app_config = \
'django_performance_testing.apps.DjangoPerformanceTestingAppConfig'
| bsd-3-clause | Python |
1df3dd466c312118cbce1ab952a8ec1baed344ae | Remove info | RDCEP/atlas-viewer,RDCEP/atlas-viewer,RDCEP/atlas-viewer | atlas/constants.py | atlas/constants.py | import os
BASE_DIR = os.path.abspath(os.path.dirname(__file__))
NC_FILE = os.path.join(
BASE_DIR, 'data', 'netcdf', 'full_global',
'papsim_wfdei.cru_hist_default_firr_aet_whe_annual_1979_2012.nc4')
MONGO = dict(user='username',
password='password',
domain='domain.tld',
database='database',
local=False,
port=27017,)
SCENARIOS = [
(0, 'default', 'Default', ),
(1, 'fullharm', 'Full harm', ),
]
IRRIGATION = [
(0, 'firr', 'Full', ),
(1, 'noirr', 'Rainfed', ),
(1, 'sum', 'Sum', ),
]
MODELS = [
(0, 'papsim', 'pAPSIM', ),
(1, 'pdssat', 'pDSSAT', ),
]
DATASETS = [
(0, 'wfdei.cru', 'WFDEI.CRU', 1979, 2012),
(1, 'agmerra', 'AgMERRA', 1980, 2010),
(2, 'hadgem', 'HADGEM', 1950, 2099),
]
CROPS = [
(0, 'mai', 'Maize', ),
(1, 'mil', 'Millet', ),
(2, 'ric', 'Rice', ),
(3, 'sor', 'Sorghum', ),
(4, 'soy', 'Soybean', ),
(5, 'whe', 'Wheat', ),
]
VARIABLES = [
(0, 'aet', 'aet', ),
(1, 'anth-day', 'anth-day', ),
(2, 'gsprcp', 'gsprcp', ),
(3, 'initr', 'initr', ),
(4, 'leach', 'leach', ),
(5, 'maty-day', 'maty-day', ),
(6, 'pirrww', 'pirrww', ),
(7, 'plant-day', 'plant-day', ),
(8, 'sco2', 'sco2', ),
(9, 'sn2o', 'sn2o', ),
(10, 'sumt', 'sumt', ),
(11, 'yield', 'yield', ),
]
MODEL_OPTS = {
'papsim': {
'variables': [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11],
'crops': [0, 3, 4, 5],
},
} | import os
BASE_DIR = os.path.abspath(os.path.dirname(__file__))
NC_FILE = os.path.join(
BASE_DIR, 'data', 'netcdf', 'full_global',
'papsim_wfdei.cru_hist_default_firr_aet_whe_annual_1979_2012.nc4')
MONGO = dict(user='username',
password='password',
domain='domain.tld',
database='database',
local=False,
SCENARIOS = [
(0, 'default', 'Default', ),
(1, 'fullharm', 'Full harm', ),
]
IRRIGATION = [
(0, 'firr', 'Full', ),
(1, 'noirr', 'Rainfed', ),
(1, 'sum', 'Sum', ),
]
MODELS = [
(0, 'papsim', 'pAPSIM', ),
(1, 'pdssat', 'pDSSAT', ),
]
DATASETS = [
(0, 'wfdei.cru', 'WFDEI.CRU', 1979, 2012),
(1, 'agmerra', 'AgMERRA', 1980, 2010),
(2, 'hadgem', 'HADGEM', 1950, 2099),
]
CROPS = [
(0, 'mai', 'Maize', ),
(1, 'mil', 'Millet', ),
(2, 'ric', 'Rice', ),
(3, 'sor', 'Sorghum', ),
(4, 'soy', 'Soybean', ),
(5, 'whe', 'Wheat', ),
]
VARIABLES = [
(0, 'aet', 'aet', ),
(1, 'anth-day', 'anth-day', ),
(2, 'gsprcp', 'gsprcp', ),
(3, 'initr', 'initr', ),
(4, 'leach', 'leach', ),
(5, 'maty-day', 'maty-day', ),
(6, 'pirrww', 'pirrww', ),
(7, 'plant-day', 'plant-day', ),
(8, 'sco2', 'sco2', ),
(9, 'sn2o', 'sn2o', ),
(10, 'sumt', 'sumt', ),
(11, 'yield', 'yield', ),
]
MODEL_OPTS = {
'papsim': {
'variables': [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11],
'crops': [0, 3, 4, 5],
},
} | apache-2.0 | Python |
571b83b5d0389c205bd9a7a3978fb04bc146c192 | replace action hash list() by a set(). | maximumG/exscript,knipknap/exscript,knipknap/exscript,maximumG/exscript | src/Exscript/Task.py | src/Exscript/Task.py | # Copyright (C) 2007-2011 Samuel Abels.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2, as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""
Represents a batch of enqueued actions.
"""
from Exscript.util.event import Event
class Task(object):
"""
Represents a batch of running actions.
"""
def __init__(self, queue):
self.done_event = Event()
self.queue = queue
self.action_hashes = set()
self.completed = 0
def _on_action_done(self, action):
self.completed += 1
if self.is_completed():
self.done_event()
def is_completed(self):
"""
Returns True if all actions in the task are completed, returns
False otherwise.
@rtype: bool
@return: Whether the task is completed.
"""
return self.completed == len(self.action_hashes)
def wait(self):
"""
Waits until all actions in the task have completed.
Does not use any polling.
"""
for thehash in self.action_hashes:
self.queue.wait_for(thehash)
def add_action(self, action):
"""
Adds a new action to the task.
@type action: Action
@param action: The action to be added.
"""
self.action_hashes.add(action.__hash__())
action.aborted_event.listen(self._on_action_done)
action.succeeded_event.listen(self._on_action_done)
| # Copyright (C) 2007-2011 Samuel Abels.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2, as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""
Represents a batch of enqueued actions.
"""
from Exscript.util.event import Event
class Task(object):
"""
Represents a batch of running actions.
"""
def __init__(self, queue):
self.done_event = Event()
self.queue = queue
self.action_hash_list = []
self.completed = 0
def _on_action_done(self, action):
self.completed += 1
if self.is_completed():
self.done_event()
def is_completed(self):
"""
Returns True if all actions in the task are completed, returns
False otherwise.
@rtype: bool
@return: Whether the task is completed.
"""
return self.completed == len(self.action_hash_list)
def wait(self):
"""
Waits until all actions in the task have completed.
Does not use any polling.
"""
for thehash in self.action_hash_list:
self.queue.wait_for(thehash)
def add_action(self, action):
"""
Adds a new action to the task.
@type action: Action
@param action: The action to be added.
"""
self.action_hash_list.append(action.__hash__())
action.aborted_event.listen(self._on_action_done)
action.succeeded_event.listen(self._on_action_done)
| mit | Python |
97d2711e9b3aa1d2a8610cbb622f7b451ba660fa | Add a way to login | hreeder/WHAuth,hreeder/WHAuth,hreeder/WHAuth | auth/core/views.py | auth/core/views.py | from flask import render_template, redirect, url_for, flash, request
from flask.ext.login import login_required, login_user
from auth import db
from auth.utils import send_email
from auth.core import core
from auth.core.forms import LoginForm, RegistrationForm
from auth.core.models.user import User
@core.route("/")
@login_required
def home():
return render_template("core_home.html")
@core.route("/login", methods=["GET", "POST"])
def login():
form = LoginForm()
if form.validate_on_submit():
user = User.query.filter_by(
username=form.username.data,
active=True
).first()
if not user:
flash("User account not found!", "danger")
return redirect(url_for("core.login"))
if user.validate_password(form.password.data):
login_user(user)
return redirect(request.args.get("next") or url_for("core.home"))
else:
flash("Your password was incorrect!", "danger")
return render_template("core_login.html", form=form)
@core.route("/register", methods=["GET", "POST"])
def register():
form = RegistrationForm()
if form.validate_on_submit():
# Create user model
new_user = User(
username=form.username.data,
email=form.email.data,
password=User.generate_password_hash(form.password.data)
)
# Set activation key
new_user.generate_activation_key()
print(new_user.activation_key)
print(url_for('core.validate_registration', username=new_user.username, key=new_user.activation_key))
# Save user
db.session.add(new_user)
db.session.commit()
# Send the new user their activation code
# send_email()
return redirect(url_for('core.post_register'))
return render_template("core_register.html", form=form)
@core.route("/register/validating")
def post_register():
return render_template("core_post_register.html")
@core.route("/register/validate/<username>/<key>")
def validate_registration(username, key):
user = User.query.filter_by(username=username, activation_key=key, active=False).first_or_404()
user.activate()
db.session.add(user)
db.session.commit()
return redirect(url_for('core.login')) | from flask import render_template, redirect, url_for
from flask.ext.login import login_required
from auth import db
from auth.utils import send_email
from auth.core import core
from auth.core.forms import LoginForm, RegistrationForm
from auth.core.models.user import User
@core.route("/")
@login_required
def home():
return render_template("core_home.html")
@core.route("/login", methods=["GET", "POST"])
def login():
form = LoginForm()
return render_template("core_login.html", form=form)
@core.route("/register", methods=["GET", "POST"])
def register():
form = RegistrationForm()
if form.validate_on_submit():
# Create user model
new_user = User(
username=form.username.data,
email=form.email.data,
password=User.generate_password_hash(form.password.data)
)
# Set activation key
new_user.generate_activation_key()
print(new_user.activation_key)
print(url_for('core.validate_registration', username=new_user.username, key=new_user.activation_key))
# Save user
db.session.add(new_user)
db.session.commit()
# Send the new user their activation code
# send_email()
return redirect(url_for('core.post_register'))
return render_template("core_register.html", form=form)
@core.route("/register/validating")
def post_register():
return render_template("core_post_register.html")
@core.route("/register/validate/<username>/<key>")
def validate_registration(username, key):
user = User.query.filter_by(username=username, activation_key=key, active=False).first_or_404()
user.activate()
db.session.add(user)
db.session.commit()
return redirect(url_for('core.login')) | mit | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.